@inproceedings{yeginbergen-etal-2025-dynamic,
title = "Dynamic Knowledge Integration for Evidence-Driven Counter-Argument Generation with Large Language Models",
author = "Yeginbergen, Anar and
Oronoz, Maite and
Agerri, Rodrigo",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2025",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/mtsummit-25-ingestion/2025.findings-acl.1161/",
doi = "10.18653/v1/2025.findings-acl.1161",
pages = "22568--22584",
ISBN = "979-8-89176-256-5",
abstract = "This paper investigates the role of dynamic external knowledge integration in improving counter-argument generation using Large Language Models (LLMs). While LLMs have shown promise in argumentative tasks, their tendency to generate lengthy, potentially non-factual responses highlights the need for more controlled and evidence-based approaches. We introduce a reconstructed and manually curated dataset of argument and counter-argument pairs specifically designed to balance argumentative complexity with evaluative feasibility. We also propose a new LLM-as-a-Judge evaluation methodology that shows a stronger correlation with human judgments compared to traditional reference-based metrics. Our experimental results demonstrate that integrating dynamic external knowledge from the web significantly improves the quality of generated counter-arguments, particularly in terms of relatedness, persuasiveness, and factuality. The findings suggest that combining LLMs with real-time external knowledge retrieval offers a promising direction for developing more effective and reliable counter-argumentation systems. Data and code are publicly available: https://github.com/anaryegen/ counter-argument-generation"
}
Markdown (Informal)
[Dynamic Knowledge Integration for Evidence-Driven Counter-Argument Generation with Large Language Models](https://preview.aclanthology.org/mtsummit-25-ingestion/2025.findings-acl.1161/) (Yeginbergen et al., Findings 2025)
ACL