@inproceedings{dong-etal-2025-memit,
title = "{MEMIT}-Merge: Addressing {MEMIT}{'}s Key-Value Conflicts in Same-Subject Batch Editing for {LLM}s",
author = "Dong, Zilu and
Shen, Xiangqing and
Xia, Rui",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2025",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/display_plenaries/2025.findings-acl.415/",
pages = "7952--7960",
ISBN = "979-8-89176-256-5",
abstract = "As large language models (LLMs) continue to scale up, knowledge editing techniques that modify models' internal knowledge without full retraining have gained significant attention. MEMIT, a prominent batch editing algorithm, stands out for its capability to perform mass knowledge modifications. However, we uncovers a critical limitation that MEMIT{'}s editing efficacy significantly deteriorates when processing batches containing multiple edits sharing the same subject. Our analysis reveals the root cause lies in MEMIT{'}s key-value modeling framework: when multiple facts with the same subject in a batch are modeled through MEMIT{'}s key-value mechanism, identical keys (derived from the shared subject) are forced to represent different values (corresponding to distinct knowledge), resulting in update conflicts during editing. Addressing this issue, we propose MEMIT-Merge, an enhanced approach that merges value computation processes for facts sharing the same subject, effectively resolving the performance degradation in same-subject batch editing scenarios. Experimental results demonstrate that at a batch size of 5, while the original MEMIT{'}s success rate drops to 46{\%}, MEMIT-Merge maintains a 98{\%} editing success rate, showcasing remarkable robustness to subject entity collisions."
}
Markdown (Informal)
[MEMIT-Merge: Addressing MEMIT’s Key-Value Conflicts in Same-Subject Batch Editing for LLMs](https://preview.aclanthology.org/display_plenaries/2025.findings-acl.415/) (Dong et al., Findings 2025)
ACL