@inproceedings{oguz-vu-2020-two,
title = "A Two-stage Model for Slot Filling in Low-resource Settings: Domain-agnostic Non-slot Reduction and Pretrained Contextual Embeddings",
author = "Oguz, Cennet and
Vu, Ngoc Thang",
editor = "Moosavi, Nafise Sadat and
Fan, Angela and
Shwartz, Vered and
Glava{\v{s}}, Goran and
Joty, Shafiq and
Wang, Alex and
Wolf, Thomas",
booktitle = "Proceedings of SustaiNLP: Workshop on Simple and Efficient Natural Language Processing",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2020.sustainlp-1.10/",
doi = "10.18653/v1/2020.sustainlp-1.10",
pages = "73--82",
abstract = "Learning-based slot filling - a key component of spoken language understanding systems - typically requires a large amount of in-domain hand-labeled data for training. In this paper, we propose a novel two-stage model architecture that can be trained with only a few in-domain hand-labeled examples. The first step is designed to remove non-slot tokens (i.e., O labeled tokens), as they introduce noise in the input of slot filling models. This step is domain-agnostic and therefore, can be trained by exploiting out-of-domain data. The second step identifies slot names only for slot tokens by using state-of-the-art pretrained contextual embeddings such as ELMO and BERT. We show that our approach outperforms other state-of-art systems on the SNIPS benchmark dataset."
}
Markdown (Informal)
[A Two-stage Model for Slot Filling in Low-resource Settings: Domain-agnostic Non-slot Reduction and Pretrained Contextual Embeddings](https://preview.aclanthology.org/add-emnlp-2024-awards/2020.sustainlp-1.10/) (Oguz & Vu, sustainlp 2020)
ACL