@inproceedings{pandey-etal-2022-mix,
title = "Mix-and-Match: Scalable Dialog Response Retrieval using {G}aussian Mixture Embeddings",
author = "Pandey, Gaurav and
Contractor, Danish and
Joshi, Sachindra",
editor = "Goldberg, Yoav and
Kozareva, Zornitsa and
Zhang, Yue",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2022",
month = dec,
year = "2022",
address = "Abu Dhabi, United Arab Emirates",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2022.findings-emnlp.239/",
doi = "10.18653/v1/2022.findings-emnlp.239",
pages = "3273--3287",
abstract = "Embedding-based approaches for dialog response retrieval embed the context-response pairs as points in the embedding space. These approaches are scalable, but fail to account for the complex, many-to-many relationships that exist between context-response pairs. On the other end of the spectrum, there are approaches that feed the context-response pairs jointly through multiple layers of neural networks. These approaches can model the complex relationships between context-response pairs, but fail to scale when the set of responses is moderately large ({\ensuremath{>}}1000). In this paper, we propose a scalable model that can learn complex relationships between context-response pairs. Specifically, the model maps the contexts as well as responses to probability distributions over the embedding space. We train the models by optimizing the Kullback-Leibler divergence between the distributions induced by context-response pairs in the training data. We show that the resultant model achieves better performance as compared to other embedding-based approaches on publicly available conversation data."
}
Markdown (Informal)
[Mix-and-Match: Scalable Dialog Response Retrieval using Gaussian Mixture Embeddings](https://preview.aclanthology.org/fix-sig-urls/2022.findings-emnlp.239/) (Pandey et al., Findings 2022)
ACL