@inproceedings{yan-etal-2023-joint,
title = "Joint Entity and Relation Extraction with Span Pruning and Hypergraph Neural Networks",
author = "Yan, Zhaohui and
Yang, Songlin and
Liu, Wei and
Tu, Kewei",
editor = "Bouamor, Houda and
Pino, Juan and
Bali, Kalika",
booktitle = "Proceedings of the 2023 Conference on Empirical Methods in Natural Language Processing",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2023.emnlp-main.467/",
doi = "10.18653/v1/2023.emnlp-main.467",
pages = "7512--7526",
abstract = "Entity and Relation Extraction (ERE) is an important task in information extraction. Recent marker-based pipeline models achieve state-of-the-art performance, but still suffer from the error propagation issue. Also, most of current ERE models do not take into account higher-order interactions between multiple entities and relations, while higher-order modeling could be beneficial.In this work, we propose HyperGraph neural network for ERE (HGERE), which is built upon the PL-marker (a state-of-the-art marker-based pipleline model). To alleviate error propagation, we use a high-recall pruner mechanism to transfer the burden of entity identification and labeling from the NER module to the joint module of our model. For higher-order modeling, we build a hypergraph, where nodes are entities (provided by the span pruner) and relations thereof, and hyperedges encode interactions between two different relations or between a relation and its associated subject and object entities. We then run a hypergraph neural network for higher-order inference by applying message passing over the built hypergraph. Experiments on three widely used benchmarks (ACE2004, ACE2005 and SciERC) for ERE task show significant improvements over the previous state-of-the-art PL-marker."
}
Markdown (Informal)
[Joint Entity and Relation Extraction with Span Pruning and Hypergraph Neural Networks](https://preview.aclanthology.org/add-emnlp-2024-awards/2023.emnlp-main.467/) (Yan et al., EMNLP 2023)
ACL