@inproceedings{khatri-etal-2021-language-model,
title = "Language Model Pretraining and Transfer Learning for Very Low Resource Languages",
author = "Khatri, Jyotsana and
Murthy, Rudra and
Bhattacharyya, Pushpak",
editor = "Barrault, Loic and
Bojar, Ondrej and
Bougares, Fethi and
Chatterjee, Rajen and
Costa-jussa, Marta R. and
Federmann, Christian and
Fishel, Mark and
Fraser, Alexander and
Freitag, Markus and
Graham, Yvette and
Grundkiewicz, Roman and
Guzman, Paco and
Haddow, Barry and
Huck, Matthias and
Yepes, Antonio Jimeno and
Koehn, Philipp and
Kocmi, Tom and
Martins, Andre and
Morishita, Makoto and
Monz, Christof",
booktitle = "Proceedings of the Sixth Conference on Machine Translation",
month = nov,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2021.wmt-1.106/",
pages = "995--998",
abstract = "This paper describes our submission for the shared task on Unsupervised MT and Very Low Resource Supervised MT at WMT 2021. We submitted systems for two language pairs: German {\ensuremath{\leftrightarrow}} Upper Sorbian (de {\ensuremath{\leftrightarrow}} hsb) and German-Lower Sorbian (de {\ensuremath{\leftrightarrow}} dsb). For de {\ensuremath{\leftrightarrow}} hsb, we pretrain our system using MASS (Masked Sequence to Sequence) objective and then finetune using iterative back-translation. Final finetunng is performed using the parallel data provided for translation objective. For de {\ensuremath{\leftrightarrow}} dsb, no parallel data is provided in the task, we use final de {\ensuremath{\leftrightarrow}} hsb model as initialization of the de {\ensuremath{\leftrightarrow}} dsb model and train it further using iterative back-translation, using the same vocabulary as used in the de {\ensuremath{\leftrightarrow}} hsb model."
}
Markdown (Informal)
[Language Model Pretraining and Transfer Learning for Very Low Resource Languages](https://preview.aclanthology.org/fix-sig-urls/2021.wmt-1.106/) (Khatri et al., WMT 2021)
ACL