@inproceedings{zeng-etal-2025-task,
title = "Task-wrapped Continual Learning in Task-Oriented Dialogue Systems",
author = "Zeng, Min and
Yang, Haiqin and
Chen, Xi and
Guo, Yike",
editor = "Chiruzzo, Luis and
Ritter, Alan and
Wang, Lu",
booktitle = "Findings of the Association for Computational Linguistics: NAACL 2025",
month = apr,
year = "2025",
address = "Albuquerque, New Mexico",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2025.findings-naacl.174/",
pages = "3173--3183",
ISBN = "979-8-89176-195-7",
abstract = "Continual learning is vital for task-oriented dialogue systems (ToDs), and AdapterCL, equipped with residual adapters, has proven effectiveness in this domain. However, its performance is limited by training separate adapters for each task, preventing global knowledge sharing. To address this, we propose **Task-wrapped Continual Learning (TCL)**, a novel framework that employs **Task-Wrapped Adapters (TWAs)**, to simultaneously learn both global and task-specific information through parameter sharing. TCL leverages task-conditioned hypernetworks to transfer global knowledge across tasks, enabling TWAs to start from more informed initialization, efficiently learning task-specific details while reducing model parameters. Additionally, the simple, linear structure of both hypernetworks and TWAs ensure stable training, with task-free inference supported through effective loss utilization. Across 37 ToD domains, TCL consistently outperforms AdapterCL, significantly reducing forgetting. Remarkably, by setting the task embedding dimension to 1, TCL achieves a 4.76{\%} improvement over AdapterCL while using only 46{\%} of the parameters. These findings position TWA as a lightweight, powerful alternative to traditional adapters, offering a promising solution for continual learning in ToDs. The code is availableat https://github.com/cloversjtu/TCL."
}
Markdown (Informal)
[Task-wrapped Continual Learning in Task-Oriented Dialogue Systems](https://preview.aclanthology.org/fix-sig-urls/2025.findings-naacl.174/) (Zeng et al., Findings 2025)
ACL