@inproceedings{guo-etal-2021-parameter,
title = "Parameter-Efficient Transfer Learning with Diff Pruning",
author = "Guo, Demi and
Rush, Alexander and
Kim, Yoon",
editor = "Zong, Chengqing and
Xia, Fei and
Li, Wenjie and
Navigli, Roberto",
booktitle = "Proceedings of the 59th Annual Meeting of the Association for Computational Linguistics and the 11th International Joint Conference on Natural Language Processing (Volume 1: Long Papers)",
month = aug,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2021.acl-long.378/",
doi = "10.18653/v1/2021.acl-long.378",
pages = "4884--4896",
abstract = "The large size of pretrained networks makes them difficult to deploy for multiple tasks in storage-constrained settings. Diff pruning enables parameter-efficient transfer learning that scales well with new tasks. The approach learns a task-specific ``diff'' vector that extends the original pretrained parameters. This diff vector is adaptively pruned during training with a differentiable approximation to the L0-norm penalty to encourage sparsity. As the number of tasks increases, diff pruning remains parameter-efficient, as it requires storing only a small diff vector for each task. Since it does not require access to all tasks during training, it is attractive in on-device deployment settings where tasks arrive in stream or even from different providers. Diff pruning can match the performance of finetuned baselines on the GLUE benchmark while only modifying 0.5{\%} of the pretrained model{'}s parameters per task and scales favorably in comparison to popular pruning approaches."
}
Markdown (Informal)
[Parameter-Efficient Transfer Learning with Diff Pruning](https://preview.aclanthology.org/fix-sig-urls/2021.acl-long.378/) (Guo et al., ACL-IJCNLP 2021)
ACL
- Demi Guo, Alexander Rush, and Yoon Kim. 2021. Parameter-Efficient Transfer Learning with Diff Pruning. In Proceedings of the 59th Annual Meeting of the Association for Computational Linguistics and the 11th International Joint Conference on Natural Language Processing (Volume 1: Long Papers), pages 4884–4896, Online. Association for Computational Linguistics.