@inproceedings{lee-etal-2021-effective,
title = "Effective Use of Graph Convolution Network and Contextual Sub-Tree for Commodity News Event Extraction",
author = "Lee, Meisin and
Soon, Lay-Ki and
Siew, Eu-Gene",
editor = "Hahn, Udo and
Hoste, Veronique and
Stent, Amanda",
booktitle = "Proceedings of the Third Workshop on Economics and Natural Language Processing",
month = nov,
year = "2021",
address = "Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2021.econlp-1.10/",
doi = "10.18653/v1/2021.econlp-1.10",
pages = "69--81",
abstract = "Event extraction in commodity news is a less researched area as compared to generic event extraction. However, accurate event extraction from commodity news is useful in abroad range of applications such as under-standing event chains and learning event-event relations, which can then be used for commodity price prediction. The events found in commodity news exhibit characteristics different from generic events, hence posing a unique challenge in event extraction using existing methods. This paper proposes an effective use of Graph Convolutional Networks(GCN) with a pruned dependency parse tree, termed contextual sub-tree, for better event ex-traction in commodity news. The event ex-traction model is trained using feature embed-dings from ComBERT, a BERT-based masked language model that was produced through domain-adaptive pre-training on a commodity news corpus. Experimental results show the efficiency of the proposed solution, which out-performs existing methods with F1 scores as high as 0.90. Furthermore, our pre-trained language model outperforms GloVe by 23{\%}, and BERT and RoBERTa by 7{\%} in terms of argument roles classification. For the goal of re-producibility, the code and trained models are made publicly available."
}
Markdown (Informal)
[Effective Use of Graph Convolution Network and Contextual Sub-Tree for Commodity News Event Extraction](https://preview.aclanthology.org/fix-sig-urls/2021.econlp-1.10/) (Lee et al., ECONLP 2021)
ACL