@inproceedings{kim-kim-2025-nexussum,
title = "{N}exus{S}um: Hierarchical {LLM} Agents for Long-Form Narrative Summarization",
author = "Kim, Hyuntak and
Kim, Byung-Hak",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.500/",
pages = "10120--10157",
ISBN = "979-8-89176-251-0",
abstract = "Summarizing long-form narratives{---}such as books, movies, and TV scripts{---}requires capturing intricate plotlines, character interactions, and thematic coherence, a task that remains challenging for existing LLMs. We introduce NexusSum, a multi-agent LLM framework for narrative summarization that processes long-form text through a structured, sequential pipeline{---}without requiring fine-tuning. Our approach introduces two key innovations: **(1) Dialogue-to-Description Transformation**: A narrative-specific preprocessing method that standardizes character dialogue and descriptive text into a unified format, improving coherence. **(2) Hierarchical Multi-LLM Summarization**: A structured summarization pipeline that optimizes chunk processing and controls output length for accurate, high-quality summaries. Our method establishes a new state-of-the-art in narrative summarization, achieving up to **a 30.0{\%} improvement in BERTScore (F1)** across books, movies, and TV scripts. These results demonstrate the effectiveness of multi-agent LLMs in handling long-form content, offering a scalable approach for structured summarization in diverse storytelling domains."
}
Markdown (Informal)
[NexusSum: Hierarchical LLM Agents for Long-Form Narrative Summarization](https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.500/) (Kim & Kim, ACL 2025)
ACL