@inproceedings{zhang-etal-2025-enhancing-event,
title = "Enhancing Event-centric News Cluster Summarization via Data Sharpening and Localization Insights",
author = "Zhang, Longyin and
Zou, Bowei and
Aw, AiTi",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.801/",
pages = "16412--16426",
ISBN = "979-8-89176-251-0",
abstract = "This paper tackles the challenges of clustering news articles by main events (MEs) and summarizing these clusters, focusing on diverse languages and localized contexts. Our approach consists of four key contributions. First, we investigate the role of dynamic clustering and the integration of various ME references, including event attributions extracted by language models (LMs), in enhancing event-centric clustering. Second, we propose a data-sharpening framework that optimizes the balance between information volume and entropy in input texts, thereby optimizing generated summaries on multiple indicators. Third, we fine-tune LMs with local news articles for cross-lingual temporal question-answering and text summarization, achieving notable improvements in capturing localized contexts. Lastly, we present the first cross-lingual dataset and comprehensive evaluation metrics tailored for the event-centric news cluster summarization pipeline. Our findings enhance the understanding of news summarization across N-gram, event-level coverage, and faithfulness, providing new insights into leveraging LMs for large-scale cross-lingual and localized news analysis."
}
Markdown (Informal)
[Enhancing Event-centric News Cluster Summarization via Data Sharpening and Localization Insights](https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.801/) (Zhang et al., ACL 2025)
ACL