@inproceedings{wang-etal-2025-microedit,
title = "{M}icro{E}dit: Neuron-level Knowledge Disentanglement and Localization in Lifelong Model Editing",
author = "Wang, Shiqi and
Wang, Qi and
Niu, Runliang and
Kong, He and
Chang, Yi",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Proceedings of the 2025 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/name-variant-enfa-fane/2025.emnlp-main.1719/",
doi = "10.18653/v1/2025.emnlp-main.1719",
pages = "33870--33884",
ISBN = "979-8-89176-332-6",
abstract = "Large language models (LLMs) require continual knowledge updates to keep pace with the evolving world. While various model editing methods have been proposed, most face critical challenges in the context of lifelong learning due to two fundamental limitations: (1) Edit Overshooting - parameter updates intended for a specific fact spill over to unrelated regions, causing interference with previously retained knowledge; and (2) Knowledge Entanglement - polysemantic neurons' overlapping encoding of multiple concepts makes it difficult to isolate and edit a single fact. In this paper, we propose MicroEdit, a neuron-level editing method that performs minimal and controlled interventions within LLMs. By leveraging a sparse autoencoder (SAE), MicroEdit disentangles knowledge representations and activates only a minimal set of necessary neurons for precise parameter updates. This targeted design enables fine-grained control over the editing scope, effectively mitigating interference and preserving unrelated knowledge. Extensive experiments show that MicroEdit outperforms prior methods and robustly handles lifelong knowledge editing across QA and Hallucination settings on LLaM and Mistral."
}Markdown (Informal)
[MicroEdit: Neuron-level Knowledge Disentanglement and Localization in Lifelong Model Editing](https://preview.aclanthology.org/name-variant-enfa-fane/2025.emnlp-main.1719/) (Wang et al., EMNLP 2025)
ACL