@inproceedings{mou-etal-2016-sequence,
title = "Sequence to Backward and Forward Sequences: A Content-Introducing Approach to Generative Short-Text Conversation",
author = "Mou, Lili and
Song, Yiping and
Yan, Rui and
Li, Ge and
Zhang, Lu and
Jin, Zhi",
editor = "Matsumoto, Yuji and
Prasad, Rashmi",
booktitle = "Proceedings of {COLING} 2016, the 26th International Conference on Computational Linguistics: Technical Papers",
month = dec,
year = "2016",
address = "Osaka, Japan",
publisher = "The COLING 2016 Organizing Committee",
url = "https://preview.aclanthology.org/fix-sig-urls/C16-1316/",
pages = "3349--3358",
abstract = "Using neural networks to generate replies in human-computer dialogue systems is attracting increasing attention over the past few years. However, the performance is not satisfactory: the neural network tends to generate safe, universally relevant replies which carry little meaning. In this paper, we propose a content-introducing approach to neural network-based generative dialogue systems. We first use pointwise mutual information (PMI) to predict a noun as a keyword, reflecting the main gist of the reply. We then propose seq2BF, a ``sequence to backward and forward sequences'' model, which generates a reply containing the given keyword. Experimental results show that our approach significantly outperforms traditional sequence-to-sequence models in terms of human evaluation and the entropy measure, and that the predicted keyword can appear at an appropriate position in the reply."
}
Markdown (Informal)
[Sequence to Backward and Forward Sequences: A Content-Introducing Approach to Generative Short-Text Conversation](https://preview.aclanthology.org/fix-sig-urls/C16-1316/) (Mou et al., COLING 2016)
ACL