@inproceedings{asada-miwa-2025-improving,
title = "Improving Relation Extraction by Sequence-to-sequence-based Dependency Parsing Pre-training",
author = "Asada, Masaki and
Miwa, Makoto",
editor = "Rambow, Owen and
Wanner, Leo and
Apidianaki, Marianna and
Al-Khalifa, Hend and
Eugenio, Barbara Di and
Schockaert, Steven",
booktitle = "Proceedings of the 31st International Conference on Computational Linguistics",
month = jan,
year = "2025",
address = "Abu Dhabi, UAE",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2025.coling-main.473/",
pages = "7099--7105",
abstract = "Relation extraction is a crucial natural language processing task that extracts relational triplets from raw text. Syntactic dependencies information has shown its effectiveness for relation extraction tasks. However, in most existing studies, dependency information is used only for traditional encoder-only-based relation extraction, not for generative sequence-to-sequence (seq2seq)-based relation extraction. In this study, we propose a syntax-aware seq2seq pre-trained model for seq2seq-based relation extraction. The model incorporates dependency information into a seq2seq pre-trained language model by continual pre-training with a seq2seq-based dependency parsing task. Experimental results on two widely used relation extraction benchmark datasets show that dependency parsing pre-training can improve the relation extraction performance."
}
Markdown (Informal)
[Improving Relation Extraction by Sequence-to-sequence-based Dependency Parsing Pre-training](https://preview.aclanthology.org/fix-sig-urls/2025.coling-main.473/) (Asada & Miwa, COLING 2025)
ACL