@inproceedings{nayak-ng-2019-effective,
title = "Effective Attention Modeling for Neural Relation Extraction",
author = "Nayak, Tapas and
Ng, Hwee Tou",
editor = "Bansal, Mohit and
Villavicencio, Aline",
booktitle = "Proceedings of the 23rd Conference on Computational Natural Language Learning (CoNLL)",
month = nov,
year = "2019",
address = "Hong Kong, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/K19-1056/",
doi = "10.18653/v1/K19-1056",
pages = "603--612",
abstract = "Relation extraction is the task of determining the relation between two entities in a sentence. Distantly-supervised models are popular for this task. However, sentences can be long and two entities can be located far from each other in a sentence. The pieces of evidence supporting the presence of a relation between two entities may not be very direct, since the entities may be connected via some indirect links such as a third entity or via co-reference. Relation extraction in such scenarios becomes more challenging as we need to capture the long-distance interactions among the entities and other words in the sentence. Also, the words in a sentence do not contribute equally in identifying the relation between the two entities. To address this issue, we propose a novel and effective attention model which incorporates syntactic information of the sentence and a multi-factor attention mechanism. Experiments on the New York Times corpus show that our proposed model outperforms prior state-of-the-art models."
}
Markdown (Informal)
[Effective Attention Modeling for Neural Relation Extraction](https://preview.aclanthology.org/jlcl-multiple-ingestion/K19-1056/) (Nayak & Ng, CoNLL 2019)
ACL