@inproceedings{samantaray-jyothi-2025-cross,
title = "Cross-lingual Transfer Dynamics in {BLOOMZ}: Insights into Multilingual Generalization",
author = "Samantaray, Sabyasachi and
Jyothi, Preethi",
editor = "Adelani, David Ifeoluwa and
Arnett, Catherine and
Ataman, Duygu and
Chang, Tyler A. and
Gonen, Hila and
Raja, Rahul and
Schmidt, Fabian and
Stap, David and
Wang, Jiayi",
booktitle = "Proceedings of the 5th Workshop on Multilingual Representation Learning (MRL 2025)",
month = nov,
year = "2025",
address = "Suzhuo, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/lei-li-partial-disambiguation/2025.mrl-main.4/",
pages = "47--61",
ISBN = "979-8-89176-345-6",
abstract = "Multilingual large language models have emerged as a promising solution for resource-constrained settings, with significant efforts aimed towards improving multilingual capabilities of English-centric pretrained models. However, the broader cross-lingual implications of fine-tuning interventions remain understudied. This work examines instruction tuning (IT) over the BLOOMZ model for Question Answering (QA) in low-resource settings, with special emphasis on transfer dynamics across several languages. Our findings reveal two critical insights: first, IT on the target language can negatively impact its own performance in constrained short-span generation tasks due to overgeneration tendencies; second, in QA tasks, IT appears to suppress performance in some interfering languages, thereby enhancing capabilities in some target Indic languages by $extbf{more than doubling}$ QA performance. These results highlight important trade-offs in multilingual LLM adaptation and enhance our understanding of cross-lingual transfer mechanisms."
}Markdown (Informal)
[Cross-lingual Transfer Dynamics in BLOOMZ: Insights into Multilingual Generalization](https://preview.aclanthology.org/lei-li-partial-disambiguation/2025.mrl-main.4/) (Samantaray & Jyothi, MRL 2025)
ACL