@inproceedings{gaspers-etal-2021-impact,
title = "The impact of domain-specific representations on {BERT}-based multi-domain spoken language understanding",
author = {Gaspers, Judith and
Do, Quynh and
R{\"o}ding, Tobias and
Bradford, Melanie},
editor = "Ben-David, Eyal and
Cohen, Shay and
McDonald, Ryan and
Plank, Barbara and
Reichart, Roi and
Rotman, Guy and
Ziser, Yftah",
booktitle = "Proceedings of the Second Workshop on Domain Adaptation for NLP",
month = apr,
year = "2021",
address = "Kyiv, Ukraine",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2021.adaptnlp-1.4/",
pages = "28--32",
abstract = "This paper provides the first experimental study on the impact of using domain-specific representations on a BERT-based multi-task spoken language understanding (SLU) model for multi-domain applications. Our results on a real-world dataset covering three languages indicate that by using domain-specific representations learned adversarially, model performance can be improved across all of the three SLU subtasks domain classification, intent classification and slot filling. Gains are particularly large for domains with limited training data."
}
Markdown (Informal)
[The impact of domain-specific representations on BERT-based multi-domain spoken language understanding](https://preview.aclanthology.org/add-emnlp-2024-awards/2021.adaptnlp-1.4/) (Gaspers et al., AdaptNLP 2021)
ACL