@inproceedings{kunz-holmstrom-2024-impact,
title = "The Impact of Language Adapters in Cross-Lingual Transfer for {NLU}",
author = {Kunz, Jenny and
Holmstr{\"o}m, Oskar},
editor = {V{\'a}zquez, Ra{\'u}l and
Mickus, Timothee and
Tiedemann, J{\"o}rg and
Vuli{\'c}, Ivan and
{\"U}st{\"u}n, Ahmet},
booktitle = "Proceedings of the 1st Workshop on Modular and Open Multilingual NLP (MOOMIN 2024)",
month = mar,
year = "2024",
address = "St Julians, Malta",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2024.moomin-1.4/",
pages = "24--43",
abstract = "Modular deep learning has been proposed for the efficient adaption of pre-trained models to new tasks, domains and languages. In particular, combining language adapters with task adapters has shown potential where no supervised data exists for a language. In this paper, we explore the role of language adapters in zero-shot cross-lingual transfer for natural language understanding (NLU) benchmarks. We study the effect of including a target-language adapter in detailed ablation studies with two multilingual models and three multilingual datasets. Our results show that the effect of target-language adapters is highly inconsistent across tasks, languages and models. Retaining the source-language adapter instead often leads to an equivalent, and sometimes to a better, performance. Removing the language adapter after training has only a weak negative effect, indicating that the language adapters do not have a strong impact on the predictions."
}
Markdown (Informal)
[The Impact of Language Adapters in Cross-Lingual Transfer for NLU](https://preview.aclanthology.org/add-emnlp-2024-awards/2024.moomin-1.4/) (Kunz & Holmström, MOOMIN 2024)
ACL