@inproceedings{wang-etal-2021-cs,
title = "{CS}-{BERT}: a pretrained model for customer service dialogues",
author = "Wang, Peiyao and
Fang, Joyce and
Reinspach, Julia",
editor = "Papangelis, Alexandros and
Budzianowski, Pawe{\l} and
Liu, Bing and
Nouri, Elnaz and
Rastogi, Abhinav and
Chen, Yun-Nung",
booktitle = "Proceedings of the 3rd Workshop on Natural Language Processing for Conversational AI",
month = nov,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.nlp4convai-1.13/",
doi = "10.18653/v1/2021.nlp4convai-1.13",
pages = "130--142",
abstract = "Large-scale pretrained transformer models have demonstrated state-of-the-art (SOTA) performance in a variety of NLP tasks. Nowadays, numerous pretrained models are available in different model flavors and different languages, and can be easily adapted to one`s downstream task. However, only a limited number of models are available for dialogue tasks, and in particular, goal-oriented dialogue tasks. In addition, the available pretrained models are trained on general domain language, creating a mismatch between the pretraining language and the downstream domain launguage. In this contribution, we present CS-BERT, a BERT model pretrained on millions of dialogues in the customer service domain. We evaluate CS-BERT on several downstream customer service dialogue tasks, and demonstrate that our in-domain pretraining is advantageous compared to other pretrained models in both zero-shot experiments as well as in finetuning experiments, especially in a low-resource data setting."
}
Markdown (Informal)
[CS-BERT: a pretrained model for customer service dialogues](https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.nlp4convai-1.13/) (Wang et al., NLP4ConvAI 2021)
ACL