@inproceedings{cao-ananiadou-2021-generativere-incorporating,
title = "{G}enerative{RE}: Incorporating a Novel Copy Mechanism and Pretrained Model for Joint Entity and Relation Extraction",
author = "Cao, Jiarun and
Ananiadou, Sophia",
editor = "Moens, Marie-Francine and
Huang, Xuanjing and
Specia, Lucia and
Yih, Scott Wen-tau",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2021",
month = nov,
year = "2021",
address = "Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2021.findings-emnlp.182/",
doi = "10.18653/v1/2021.findings-emnlp.182",
pages = "2119--2126",
abstract = "Previous neural Seq2Seq models have shown the effectiveness for jointly extracting relation triplets. However, most of these models suffer from incompletion and disorder problems when they extract multi-token entities from input sentences. To tackle these problems, we propose a generative, multi-task learning framework, named GenerativeRE. We firstly propose a special entity labelling method on both input and output sequences. During the training stage, GenerativeRE fine-tunes the pre-trained generative model and learns the special entity labels simultaneously. During the inference stage, we propose a novel copy mechanism equipped with three mask strategies, to generate the most probable tokens by diminishing the scope of the model decoder. Experimental results show that our model achieves 4.6{\%} and 0.9{\%} F1 score improvements over the current state-of-the-art methods in the NYT24 and NYT29 benchmark datasets respectively."
}
Markdown (Informal)
[GenerativeRE: Incorporating a Novel Copy Mechanism and Pretrained Model for Joint Entity and Relation Extraction](https://preview.aclanthology.org/add-emnlp-2024-awards/2021.findings-emnlp.182/) (Cao & Ananiadou, Findings 2021)
ACL