@inproceedings{zhu-etal-2023-towards-optimizing,
title = "Towards Optimizing Pre-trained Language Model Ensemble Learning for Task-oriented Dialogue System",
author = "Zhu, Zhiyuan and
Liao, Yusheng and
Chen, Zhe and
Wang, Yu and
Guan, Yunfeng",
editor = "Chen, Yun-Nung and
Crook, Paul and
Galley, Michel and
Ghazarian, Sarik and
Gunasekara, Chulaka and
Gupta, Raghav and
Hedayatnia, Behnam and
Kottur, Satwik and
Moon, Seungwhan and
Zhang, Chen",
booktitle = "Proceedings of the Eleventh Dialog System Technology Challenge",
month = sep,
year = "2023",
address = "Prague, Czech Republic",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2023.dstc-1.17/",
pages = "144--149",
abstract = "Task-oriented dialogue systems that employ external knowledge to generate informative responses have become an important field of research. This paper outlines our contribution to Track 5 of the Eleventh Dialog System Technology Challenge (DSTC11), which focuses on constructing high-performing, subjective knowledge-enriched task-oriented dialogue systems. Specifically, we investigate the complementarity of various language models to tackle the diverse knowledge selection task that involves multiple external sources. Based on this investigation, we propose pre- and post-generation model ensemble approaches to mitigate potential biases inherent in using a single model for the knowledge selection task. Finally, we utilize the consensus decoding approach to combine fine-tuned ensemble models and improve the performance of the generation system. Our system ranked 1st in human evaluation, even outperforming human annotation."
}
Markdown (Informal)
[Towards Optimizing Pre-trained Language Model Ensemble Learning for Task-oriented Dialogue System](https://preview.aclanthology.org/fix-sig-urls/2023.dstc-1.17/) (Zhu et al., DSTC 2023)
ACL