@inproceedings{hua-etal-2023-improving,
title = "Improving Long Dialogue Summarization with Semantic Graph Representation",
author = "Hua, Yilun and
Deng, Zhaoyuan and
McKeown, Kathleen",
editor = "Rogers, Anna and
Boyd-Graber, Jordan and
Okazaki, Naoaki",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2023",
month = jul,
year = "2023",
address = "Toronto, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.findings-acl.871/",
doi = "10.18653/v1/2023.findings-acl.871",
pages = "13851--13883",
abstract = "Although Large Language Models (LLMs) are successful in abstractive summarization of short dialogues, summarization of long dialogues remains challenging. To address this challenge, we propose a novel algorithm that processes complete dialogues comprising thousands of tokens into topic-segment-level Abstract Meaning Representation (AMR) graphs, which explicitly capture the dialogue structure, highlight salient semantics, and preserve high-level information. We also develop a new text-graph attention to leverage both graph semantics and a pretrained LLM that exploits the text. Finally, we propose an AMR node selection loss used jointly with conventional cross-entropy loss, to create additional training signals that facilitate graph feature encoding and content selection. Experiments show that our system outperforms the state-of-the-art models on multiple long dialogue summarization datasets, especially in low-resource settings, and generalizes well to out-of-domain data."
}
Markdown (Informal)
[Improving Long Dialogue Summarization with Semantic Graph Representation](https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.findings-acl.871/) (Hua et al., Findings 2023)
ACL