@inproceedings{li-chu-2025-adaedit,
title = "{A}da{E}dit: Advancing Continuous Knowledge Editing For Large Language Models",
author = "Li, Qi and
Chu, Xiaowen",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.208/",
pages = "4127--4149",
ISBN = "979-8-89176-251-0",
abstract = "Knowledge editing (KE) has emerged as a prominent alternative that enables efficient and precise information modification inside language models. However, a critical challenge arises in continuous language models editing {---} a significant performance decline both in knowledge update and retention when the number of edits increases. By dissecting the perturbation weight of language model in continuous KE, we uncover that disentangled and sparsified knowledge representation can significantly alleviate the performance decline. Building on these insights, we introduce AdaEdit, a novel knowledge editing method. Extensive empirical evaluations on multiple LLMs demonstrate that our proposed methods can enhance the performance of edited LLMs in large-size continuous editing regimes, outperforming existing ones without substantially compromising the general abilities of these models."
}
Markdown (Informal)
[AdaEdit: Advancing Continuous Knowledge Editing For Large Language Models](https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.208/) (Li & Chu, ACL 2025)
ACL