@inproceedings{lv-etal-2023-envisioning,
title = "Envisioning Future from the Past: Hierarchical Duality Learning for Multi-Turn Dialogue Generation",
author = "Lv, Ang and
Li, Jinpeng and
Xie, Shufang and
Yan, Rui",
editor = "Rogers, Anna and
Boyd-Graber, Jordan and
Okazaki, Naoaki",
booktitle = "Proceedings of the 61st Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2023",
address = "Toronto, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Ingest-2025-COMPUTEL/2023.acl-long.407/",
doi = "10.18653/v1/2023.acl-long.407",
pages = "7382--7394",
abstract = "In this paper, we define a widely neglected property in dialogue text, duality, which is a hierarchical property that is reflected in human behaviours in daily conversations: Based on the logic in a conversation (or a sentence), people can infer follow-up utterances (or tokens) based on the previous text, and vice versa. We propose a hierarchical duality learning for dialogue (HDLD) to simulate this human cognitive ability, for generating high quality responses that connect both previous and follow-up dialogues. HDLD utilizes hierarchical dualities at token hierarchy and utterance hierarchy. HDLD maximizes the mutual information between past and future utterances. Thus, even if future text is invisible during inference, HDLD is capable of estimating future information implicitly based on dialogue history and generates both coherent and informative responses. In contrast to previous approaches that solely utilize future text as auxiliary information to encode during training, HDLD leverages duality to enable interaction between dialogue history and the future. This enhances the utilization of dialogue data, leading to the improvement in both automatic and human evaluation."
}
Markdown (Informal)
[Envisioning Future from the Past: Hierarchical Duality Learning for Multi-Turn Dialogue Generation](https://preview.aclanthology.org/Ingest-2025-COMPUTEL/2023.acl-long.407/) (Lv et al., ACL 2023)
ACL