@inproceedings{zhao-etal-2017-learning,
title = "Learning Discourse-level Diversity for Neural Dialog Models using Conditional Variational Autoencoders",
author = "Zhao, Tiancheng and
Zhao, Ran and
Eskenazi, Maxine",
editor = "Barzilay, Regina and
Kan, Min-Yen",
booktitle = "Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2017",
address = "Vancouver, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/P17-1061/",
doi = "10.18653/v1/P17-1061",
pages = "654--664",
abstract = "While recent neural encoder-decoder models have shown great promise in modeling open-domain conversations, they often generate dull and generic responses. Unlike past work that has focused on diversifying the output of the decoder from word-level to alleviate this problem, we present a novel framework based on conditional variational autoencoders that capture the discourse-level diversity in the encoder. Our model uses latent variables to learn a distribution over potential conversational intents and generates diverse responses using only greedy decoders. We have further developed a novel variant that is integrated with linguistic prior knowledge for better performance. Finally, the training procedure is improved through introducing a bag-of-word loss. Our proposed models have been validated to generate significantly more diverse responses than baseline approaches and exhibit competence of discourse-level decision-making."
}
Markdown (Informal)
[Learning Discourse-level Diversity for Neural Dialog Models using Conditional Variational Autoencoders](https://preview.aclanthology.org/fix-sig-urls/P17-1061/) (Zhao et al., ACL 2017)
ACL