@inproceedings{zhang-etal-2025-dynamic-task,
title = "Dynamic Task Vector Grouping for Efficient Multi-Task Prompt Tuning",
author = "Zhang, Peiyi and
Zhang, Richong and
Nie, Zhijie and
Wang, Ziqiao",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2025",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingestion-acl-25/2025.findings-acl.1374/",
pages = "26805--26821",
ISBN = "979-8-89176-256-5",
abstract = "Multi-task prompt tuning utilizes multiple high-resource source tasks to improve performance on low-source target tasks. Existing approaches transfer the soft prompt trained by combining all source tasks or a single ``high-similar'' source task one-time-only. However, we find that the optimal transfer performance often comes from a combination of source tasks, which is neither one nor all. Further, we find that the similarity between source and target tasks also changes dynamically during fine-tuning after transfering, making similarity calculation in the initiation stage inadequate. To address these issues, we propose a method called Dynamic Task Vector Grouping (DTVG), whose core ideas contain (1) measuring the task similarity with task vectors instead of soft prompt, (2) grouping the optimal source task combination based on two metrics: \textit{target similarity} and \textit{knowledge consistency}; (3) dynamically updating the combination in each iteration step. Extensive experiments on the 26 NLP datasets under different settings demonstrate that DTVG effectively groups similar source tasks while reducing negative transfer, achieving the start-of-art performance."
}
Markdown (Informal)
[Dynamic Task Vector Grouping for Efficient Multi-Task Prompt Tuning](https://preview.aclanthology.org/ingestion-acl-25/2025.findings-acl.1374/) (Zhang et al., Findings 2025)
ACL