@inproceedings{yi-etal-2025-edit,
title = "Can We Edit {LLM}s for Long-Tail Biomedical Knowledge?",
author = "Yi, Xinhao and
Lever, Jake and
Bryson, Kevin and
Meng, Zaiqiao",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.1399/",
doi = "10.18653/v1/2025.findings-emnlp.1399",
pages = "25662--25679",
ISBN = "979-8-89176-335-7",
abstract = "Knowledge editing has emerged as an effective approach for updating large language models (LLMs) by modifying their internal knowledge. However, their application to the biomedical domain faces unique challenges due to the long-tailed distribution of biomedical knowledge, where rare and infrequent information is prevalent. In this paper, we conduct the first comprehensive study to investigate the effectiveness of knowledge editing methods for editing long-tail biomedical knowledge. Our results indicate that, while existing editing methods can enhance LLMs' performance on long-tail biomedical knowledge, their performance on long-tail knowledge remains inferior to that on high-frequency popular knowledge, even after editing. Our further analysis reveals that long-tail biomedical knowledge contains a significant amount of one-to-many knowledge, where one subject and relation link to multiple objects. This high prevalence of one-to-many knowledge limits the effectiveness of knowledge editing in improving LLMs' understanding of long-tail biomedical knowledge, highlighting the need for tailored strategies to bridge this performance gap."
}Markdown (Informal)
[Can We Edit LLMs for Long-Tail Biomedical Knowledge?](https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.1399/) (Yi et al., Findings 2025)
ACL
- Xinhao Yi, Jake Lever, Kevin Bryson, and Zaiqiao Meng. 2025. Can We Edit LLMs for Long-Tail Biomedical Knowledge?. In Findings of the Association for Computational Linguistics: EMNLP 2025, pages 25662–25679, Suzhou, China. Association for Computational Linguistics.