@inproceedings{wang-etal-2018-neural-transition,
title = "A Neural Transition-based Model for Nested Mention Recognition",
author = "Wang, Bailin and
Lu, Wei and
Wang, Yu and
Jin, Hongxia",
editor = "Riloff, Ellen and
Chiang, David and
Hockenmaier, Julia and
Tsujii, Jun{'}ichi",
booktitle = "Proceedings of the 2018 Conference on Empirical Methods in Natural Language Processing",
month = oct # "-" # nov,
year = "2018",
address = "Brussels, Belgium",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Add-Cong-Liu-Florida-Atlantic-University-author-id/D18-1124/",
doi = "10.18653/v1/D18-1124",
pages = "1011--1017",
abstract = "It is common that entity mentions can contain other mentions recursively. This paper introduces a scalable transition-based method to model the nested structure of mentions. We first map a sentence with nested mentions to a designated forest where each mention corresponds to a constituent of the forest. Our shift-reduce based system then learns to construct the forest structure in a bottom-up manner through an action sequence whose maximal length is guaranteed to be three times of the sentence length. Based on Stack-LSTM which is employed to efficiently and effectively represent the states of the system in a continuous space, our system is further incorporated with a character-based component to capture letter-level patterns. Our model gets the state-of-the-art performances in ACE datasets, showing its effectiveness in detecting nested mentions."
}
Markdown (Informal)
[A Neural Transition-based Model for Nested Mention Recognition](https://preview.aclanthology.org/Add-Cong-Liu-Florida-Atlantic-University-author-id/D18-1124/) (Wang et al., EMNLP 2018)
ACL