@inproceedings{chau-smith-2021-specializing,
title = "Specializing Multilingual Language Models: An Empirical Study",
author = "Chau, Ethan C. and
Smith, Noah A.",
editor = "Ataman, Duygu and
Birch, Alexandra and
Conneau, Alexis and
Firat, Orhan and
Ruder, Sebastian and
Sahin, Gozde Gul",
booktitle = "Proceedings of the 1st Workshop on Multilingual Representation Learning",
month = nov,
year = "2021",
address = "Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2021.mrl-1.5/",
doi = "10.18653/v1/2021.mrl-1.5",
pages = "51--61",
abstract = "Pretrained multilingual language models have become a common tool in transferring NLP capabilities to low-resource languages, often with adaptations. In this work, we study the performance, extensibility, and interaction of two such adaptations: vocabulary augmentation and script transliteration. Our evaluations on part-of-speech tagging, universal dependency parsing, and named entity recognition in nine diverse low-resource languages uphold the viability of these approaches while raising new questions around how to optimally adapt multilingual models to low-resource settings."
}
Markdown (Informal)
[Specializing Multilingual Language Models: An Empirical Study](https://preview.aclanthology.org/add-emnlp-2024-awards/2021.mrl-1.5/) (Chau & Smith, MRL 2021)
ACL