@inproceedings{kim-etal-2021-revisiting,
title = "Revisiting Pretraining with Adapters",
author = "Kim, Seungwon and
Shum, Alex and
Susanj, Nathan and
Hilgart, Jonathan",
editor = "Rogers, Anna and
Calixto, Iacer and
Vuli{\'c}, Ivan and
Saphra, Naomi and
Kassner, Nora and
Camburu, Oana-Maria and
Bansal, Trapit and
Shwartz, Vered",
booktitle = "Proceedings of the 6th Workshop on Representation Learning for NLP (RepL4NLP-2021)",
month = aug,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/moar-dois/2021.repl4nlp-1.11/",
doi = "10.18653/v1/2021.repl4nlp-1.11",
pages = "90--99",
abstract = "Pretrained language models have served as the backbone for many state-of-the-art NLP results. These models are large and expensive to train. Recent work suggests that continued pretraining on task-specific data is worth the effort as pretraining leads to improved performance on downstream tasks. We explore alternatives to full-scale task-specific pretraining of language models through the use of adapter modules, a parameter-efficient approach to transfer learning. We find that adapter-based pretraining is able to achieve comparable results to task-specific pretraining while using a fraction of the overall trainable parameters. We further explore direct use of adapters without pretraining and find that the direct fine-tuning performs mostly on par with pretrained adapter models, contradicting previously proposed benefits of continual pretraining in full pretraining fine-tuning strategies. Lastly, we perform an ablation study on task-adaptive pretraining to investigate how different hyperparameter settings can change the effectiveness of the pretraining."
}
Markdown (Informal)
[Revisiting Pretraining with Adapters](https://preview.aclanthology.org/moar-dois/2021.repl4nlp-1.11/) (Kim et al., RepL4NLP 2021)
ACL
- Seungwon Kim, Alex Shum, Nathan Susanj, and Jonathan Hilgart. 2021. Revisiting Pretraining with Adapters. In Proceedings of the 6th Workshop on Representation Learning for NLP (RepL4NLP-2021), pages 90–99, Online. Association for Computational Linguistics.