@inproceedings{hb-ptaszynski-2025-rbg,
title = "{RBG}-{AI}: Benefits of Multilingual Language Models for Low-Resource Languages",
author = "Hb, Barathi Ganesh and
Ptaszynski, Michal",
editor = "Haddow, Barry and
Kocmi, Tom and
Koehn, Philipp and
Monz, Christof",
booktitle = "Proceedings of the Tenth Conference on Machine Translation",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/name-variant-enfa-fane/2025.wmt-1.100/",
doi = "10.18653/v1/2025.wmt-1.100",
pages = "1233--1239",
ISBN = "979-8-89176-341-8",
abstract = "This paper investigates how multilingual language models benefit low-resource languages through our submission to the WMT 2025 Low-Resource Indic Language Translation shared task. We explore whether languages from related families can effectively support translation for low-resource languages that were absent or underrepresented during model training. Using a quantized multilingual pretrained foundation model, we examine zero-shot translation capabilities and cross-lingual transfer effects across three language families: Tibeto-Burman, Indo-Aryan, and Austroasiatic. Our findings demonstrate that multilingual models failed to leverage linguistic similarities, particularly evidenced within the Tibeto-Burman family. The study provides insights into the practical feasibility of zero-shot translation for low-resource language settings and the role of language family relationships in multilingual model performance."
}Markdown (Informal)
[RBG-AI: Benefits of Multilingual Language Models for Low-Resource Languages](https://preview.aclanthology.org/name-variant-enfa-fane/2025.wmt-1.100/) (Hb & Ptaszynski, WMT 2025)
ACL