@inproceedings{lu-etal-2019-sc,
title = "{SC}-{LSTM}: Learning Task-Specific Representations in Multi-Task Learning for Sequence Labeling",
author = "Lu, Peng and
Bai, Ting and
Langlais, Philippe",
editor = "Burstein, Jill and
Doran, Christy and
Solorio, Thamar",
booktitle = "Proceedings of the 2019 Conference of the North {A}merican Chapter of the Association for Computational Linguistics: Human Language Technologies, Volume 1 (Long and Short Papers)",
month = jun,
year = "2019",
address = "Minneapolis, Minnesota",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/N19-1249/",
doi = "10.18653/v1/N19-1249",
pages = "2396--2406",
abstract = "Multi-task learning (MTL) has been studied recently for sequence labeling. Typically, auxiliary tasks are selected specifically in order to improve the performance of a target task. Jointly learning multiple tasks in a way that benefit all of them simultaneously can increase the utility of MTL. In order to do so, we propose a new LSTM cell which contains both shared parameters that can learn from all tasks, and task-specific parameters that can learn task-specific information. We name it a Shared-Cell Long-Short Term Memory SC-LSTM. Experimental results on three sequence labeling benchmarks (named-entity recognition, text chunking, and part-of-speech tagging) demonstrate the effectiveness of our SC-LSTM cell."
}
Markdown (Informal)
[SC-LSTM: Learning Task-Specific Representations in Multi-Task Learning for Sequence Labeling](https://preview.aclanthology.org/jlcl-multiple-ingestion/N19-1249/) (Lu et al., NAACL 2019)
ACL