@inproceedings{du-etal-2018-multi,
title = "Multi-Level Structured Self-Attentions for Distantly Supervised Relation Extraction",
author = "Du, Jinhua and
Han, Jingguang and
Way, Andy and
Wan, Dadong",
editor = "Riloff, Ellen and
Chiang, David and
Hockenmaier, Julia and
Tsujii, Jun{'}ichi",
booktitle = "Proceedings of the 2018 Conference on Empirical Methods in Natural Language Processing",
month = oct # "-" # nov,
year = "2018",
address = "Brussels, Belgium",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/D18-1245/",
doi = "10.18653/v1/D18-1245",
pages = "2216--2225",
abstract = "Attention mechanism is often used in deep neural networks for distantly supervised relation extraction (DS-RE) to distinguish valid from noisy instances. However, traditional 1-D vector attention model is insufficient for learning of different contexts in the selection of valid instances to predict the relationship for an entity pair. To alleviate this issue, we propose a novel multi-level structured (2-D matrix) self-attention mechanism for DS-RE in a multi-instance learning (MIL) framework using bidirectional recurrent neural networks (BiRNN). In the proposed method, a structured word-level self-attention learns a 2-D matrix where each row vector represents a weight distribution for different aspects of an instance regarding two entities. Targeting the MIL issue, the structured sentence-level attention learns a 2-D matrix where each row vector represents a weight distribution on selection of different valid instances. Experiments conducted on two publicly available DS-RE datasets show that the proposed framework with multi-level structured self-attention mechanism significantly outperform baselines in terms of PR curves, P@N and F1 measures."
}
Markdown (Informal)
[Multi-Level Structured Self-Attentions for Distantly Supervised Relation Extraction](https://preview.aclanthology.org/add-emnlp-2024-awards/D18-1245/) (Du et al., EMNLP 2018)
ACL