@inproceedings{zhang-etal-2018-higher,
title = "Does Higher Order {LSTM} Have Better Accuracy for Segmenting and Labeling Sequence Data?",
author = "Zhang, Yi and
Sun, Xu and
Ma, Shuming and
Yang, Yang and
Ren, Xuancheng",
editor = "Bender, Emily M. and
Derczynski, Leon and
Isabelle, Pierre",
booktitle = "Proceedings of the 27th International Conference on Computational Linguistics",
month = aug,
year = "2018",
address = "Santa Fe, New Mexico, USA",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/C18-1061/",
pages = "723--733",
abstract = "Existing neural models usually predict the tag of the current token independent of the neighboring tags. The popular LSTM-CRF model considers the tag dependencies between every two consecutive tags. However, it is hard for existing neural models to take longer distance dependencies between tags into consideration. The scalability is mainly limited by the complex model structures and the cost of dynamic programming during training. In our work, we first design a new model called {\textquotedblleft}high order LSTM{\textquotedblright} to predict multiple tags for the current token which contains not only the current tag but also the previous several tags. We call the number of tags in one prediction as {\textquotedblleft}order{\textquotedblright}. Then we propose a new method called Multi-Order BiLSTM (MO-BiLSTM) which combines low order and high order LSTMs together. MO-BiLSTM keeps the scalability to high order models with a pruning technique. We evaluate MO-BiLSTM on all-phrase chunking and NER datasets. Experiment results show that MO-BiLSTM achieves the state-of-the-art result in chunking and highly competitive results in two NER datasets."
}
Markdown (Informal)
[Does Higher Order LSTM Have Better Accuracy for Segmenting and Labeling Sequence Data?](https://preview.aclanthology.org/add-emnlp-2024-awards/C18-1061/) (Zhang et al., COLING 2018)
ACL