@inproceedings{zeng-etal-2026-sparse,
title = "Sparse Adapter Fusion for Continual Learning in {NLP}",
author = "Zeng, Min and
Chen, Xi and
Yang, Haiqin and
Guo, Yike",
editor = "Demberg, Vera and
Inui, Kentaro and
Marquez, Llu{\'i}s",
booktitle = "Proceedings of the 19th Conference of the {E}uropean Chapter of the {A}ssociation for {C}omputational {L}inguistics (Volume 1: Long Papers)",
month = mar,
year = "2026",
address = "Rabat, Morocco",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-eacl/2026.eacl-long.37/",
pages = "852--863",
ISBN = "979-8-89176-380-7",
abstract = "Continual learning in natural language processing plays a crucial role in adapting to evolving data and preventing catastrophic forgetting. Despite significant progress, existing methods still face challenges, such as inefficient parameter reuse across tasks, risking catastrophic forgetting when tasks are dissimilar, and the unnecessary introduction of new parameters for each task, which hampers knowledge sharing among similar tasks. To tackle these issues, we propose a Sparse Adapter Fusion Method (SAFM), which dynamically fuses old and new adapters to address these challenges. SAFM operates in two stages: the decision stage and the tuning stage. In the decision stage, SAFM determines whether to incorporate a new adapter, reuse an existing one, or add an empty adapter. The architecture search procedure, designed to prioritize reusing or adding empty adapters, minimizes parameter consumption and maximizes reuse. In the tuning stage, SAFM especially facilitates a layer-wise loss to encourage differentiation between adapters, effectively capturing knowledge within the same task. Experimental results consistently show that SAFM outperforms state-of-the-art (SOTA) methods, achieving comparable performance while utilizing less than 60{\%} of the parameters."
}Markdown (Informal)
[Sparse Adapter Fusion for Continual Learning in NLP](https://preview.aclanthology.org/ingest-eacl/2026.eacl-long.37/) (Zeng et al., EACL 2026)
ACL
- Min Zeng, Xi Chen, Haiqin Yang, and Yike Guo. 2026. Sparse Adapter Fusion for Continual Learning in NLP. In Proceedings of the 19th Conference of the European Chapter of the Association for Computational Linguistics (Volume 1: Long Papers), pages 852–863, Rabat, Morocco. Association for Computational Linguistics.