@inproceedings{klimaszewski-etal-2025-train,
title = "No Train but Gain: Language Arithmetic for training-free Language Adapters enhancement",
author = "Klimaszewski, Mateusz and
Andruszkiewicz, Piotr and
Birch, Alexandra",
editor = "Rambow, Owen and
Wanner, Leo and
Apidianaki, Marianna and
Al-Khalifa, Hend and
Eugenio, Barbara Di and
Schockaert, Steven",
booktitle = "Proceedings of the 31st International Conference on Computational Linguistics",
month = jan,
year = "2025",
address = "Abu Dhabi, UAE",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2025.coling-main.737/",
pages = "11121--11134",
abstract = "Modular deep learning is the state-of-the-art solution for lifting the curse of multilinguality, preventing the impact of negative interference and enabling cross-lingual performance in Multilingual Pre-trained Language Models. However, a trade-off of this approach is the reduction in positive transfer learning from closely related languages. In response, we introduce a novel method called language arithmetic, which enables training-free post-processing to address this limitation. Extending the task arithmetic framework, we apply learning via addition to the language adapters, transitioning the framework from a multi-task to a multilingual setup. The effectiveness of the proposed solution is demonstrated on three downstream tasks in a MAD-X-based set of cross-lingual schemes, acting as a post-processing procedure. Language arithmetic consistently improves the baselines with significant gains, especially in the most challenging case of zero-shot application. Our code and models are available at https://github.com/mklimasz/language-arithmetic."
}
Markdown (Informal)
[No Train but Gain: Language Arithmetic for training-free Language Adapters enhancement](https://preview.aclanthology.org/add-emnlp-2024-awards/2025.coling-main.737/) (Klimaszewski et al., COLING 2025)
ACL