@inproceedings{jia-zhang-2020-multi,
title = "Multi-Cell Compositional {LSTM} for {NER} Domain Adaptation",
author = "Jia, Chen and
Zhang, Yue",
editor = "Jurafsky, Dan and
Chai, Joyce and
Schluter, Natalie and
Tetreault, Joel",
booktitle = "Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.acl-main.524/",
doi = "10.18653/v1/2020.acl-main.524",
pages = "5906--5917",
abstract = "Cross-domain NER is a challenging yet practical problem. Entity mentions can be highly different across domains. However, the correlations between entity types can be relatively more stable across domains. We investigate a multi-cell compositional LSTM structure for multi-task learning, modeling each entity type using a separate cell state. With the help of entity typed units, cross-domain knowledge transfer can be made in an entity type level. Theoretically, the resulting distinct feature distributions for each entity type make it more powerful for cross-domain transfer. Empirically, experiments on four few-shot and zero-shot datasets show our method significantly outperforms a series of multi-task learning methods and achieves the best results."
}
Markdown (Informal)
[Multi-Cell Compositional LSTM for NER Domain Adaptation](https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.acl-main.524/) (Jia & Zhang, ACL 2020)
ACL