@inproceedings{gupta-etal-2016-table,
title = "Table Filling Multi-Task Recurrent Neural Network for Joint Entity and Relation Extraction",
author = {Gupta, Pankaj and
Sch{\"u}tze, Hinrich and
Andrassy, Bernt},
editor = "Matsumoto, Yuji and
Prasad, Rashmi",
booktitle = "Proceedings of {COLING} 2016, the 26th International Conference on Computational Linguistics: Technical Papers",
month = dec,
year = "2016",
address = "Osaka, Japan",
publisher = "The COLING 2016 Organizing Committee",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/C16-1239/",
pages = "2537--2547",
abstract = "This paper proposes a novel context-aware joint entity and word-level relation extraction approach through semantic composition of words, introducing a Table Filling Multi-Task Recurrent Neural Network (TF-MTRNN) model that reduces the entity recognition and relation classification tasks to a table-filling problem and models their interdependencies. The proposed neural network architecture is capable of modeling multiple relation instances without knowing the corresponding relation arguments in a sentence. The experimental results show that a simple approach of piggybacking candidate entities to model the label dependencies from relations to entities improves performance. We present state-of-the-art results with improvements of 2.0{\%} and 2.7{\%} for entity recognition and relation classification, respectively on CoNLL04 dataset."
}
Markdown (Informal)
[Table Filling Multi-Task Recurrent Neural Network for Joint Entity and Relation Extraction](https://preview.aclanthology.org/add-emnlp-2024-awards/C16-1239/) (Gupta et al., COLING 2016)
ACL