@inproceedings{wu-etal-2025-dynamic,
title = "Dynamic Prefix as Instructor for Incremental Named Entity Recognition: A Unified {S}eq2{S}eq Generation Framework",
author = "Wu, Zihao and
Hua, YongXiang and
Zhu, Yongxin and
Zhang, Fang and
Xu, Linli",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2025",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/transition-to-people-yaml/2025.findings-acl.172/",
doi = "10.18653/v1/2025.findings-acl.172",
pages = "3294--3306",
ISBN = "979-8-89176-256-5",
abstract = "The Incremental Named Entity Recognition (INER) task aims to update a model to extract entities from an expanding set of entity type candidates due to concerns related to data privacy and scarcity. However, conventional sequence labeling approaches to INER often suffer from the catastrophic forgetting problem, which leads to the degradation of the model{'}s performance on previously encountered entity types. In this paper, we formalize INER as a unified seq2seq generation task and propose a parameter-efficient dynamic prefix method. By employing the dynamic prefix as a task instructor to guide the generative model, our approach can preserve task-invariant knowledge while adapting to new entities with minimal parameter updates, making it particularly effective in low-resource scenarios. Additionally, we introduce a generative label augmentation strategy with dual optimization objectives including a self-entropy loss and a task-aware similarity loss to enable optimal balance between stability and plasticity. Empirical experiments on NER benchmarks demonstrate the effectiveness of our proposed method in addressing the challenges associated with INER."
}
Markdown (Informal)
[Dynamic Prefix as Instructor for Incremental Named Entity Recognition: A Unified Seq2Seq Generation Framework](https://preview.aclanthology.org/transition-to-people-yaml/2025.findings-acl.172/) (Wu et al., Findings 2025)
ACL