@inproceedings{liang-etal-2022-raat,
title = "{RAAT}: Relation-Augmented Attention Transformer for Relation Modeling in Document-Level Event Extraction",
author = "Liang, Yuan and
Jiang, Zhuoxuan and
Yin, Di and
Ren, Bo",
editor = "Carpuat, Marine and
de Marneffe, Marie-Catherine and
Meza Ruiz, Ivan Vladimir",
booktitle = "Proceedings of the 2022 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies",
month = jul,
year = "2022",
address = "Seattle, United States",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.naacl-main.367/",
doi = "10.18653/v1/2022.naacl-main.367",
pages = "4985--4997",
abstract = "In document-level event extraction (DEE) task, event arguments always scatter across sentences (across-sentence issue) and multipleevents may lie in one document (multi-event issue). In this paper, we argue that the relation information of event arguments is of greatsignificance for addressing the above two issues, and propose a new DEE framework which can model the relation dependencies, calledRelation-augmented Document-level Event Extraction (ReDEE). More specifically, this framework features a novel and tailored transformer,named as Relation-augmented Attention Transformer (RAAT). RAAT is scalable to capture multi-scale and multi-amount argument relations. To further leverage relation information, we introduce a separate event relation prediction task and adopt multi-task learning method to explicitly enhance event extraction performance. Extensive experiments demonstrate the effectiveness of the proposed method, which can achieve state-of-the-art performance on two public datasets. Our code is available at \url{https://github.com/TencentYoutuResearch/RAAT}."
}
Markdown (Informal)
[RAAT: Relation-Augmented Attention Transformer for Relation Modeling in Document-Level Event Extraction](https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.naacl-main.367/) (Liang et al., NAACL 2022)
ACL