@inproceedings{madaan-etal-2020-transfer,
title = "Transfer Learning for Related Languages: Submissions to the {WMT}20 Similar Language Translation Task",
author = "Madaan, Lovish and
Sharma, Soumya and
Singla, Parag",
editor = {Barrault, Lo{\"i}c and
Bojar, Ond{\v{r}}ej and
Bougares, Fethi and
Chatterjee, Rajen and
Costa-juss{\`a}, Marta R. and
Federmann, Christian and
Fishel, Mark and
Fraser, Alexander and
Graham, Yvette and
Guzman, Paco and
Haddow, Barry and
Huck, Matthias and
Yepes, Antonio Jimeno and
Koehn, Philipp and
Martins, Andr{\'e} and
Morishita, Makoto and
Monz, Christof and
Nagata, Masaaki and
Nakazawa, Toshiaki and
Negri, Matteo},
booktitle = "Proceedings of the Fifth Conference on Machine Translation",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2020.wmt-1.46/",
pages = "402--408",
abstract = "In this paper, we describe IIT Delhi{'}s submissions to the WMT 2020 task on Similar Language Translation for four language directions: Hindi {\ensuremath{<}}-{\ensuremath{>}} Marathi and Spanish {\ensuremath{<}}-{\ensuremath{>}} Portuguese. We try out three different model settings for the translation task and select our primary and contrastive submissions on the basis of performance of these three models. For our best submissions, we fine-tune the mBART model on the parallel data provided for the task. The pre-training is done using self-supervised objectives on a large amount of monolingual data for many languages. Overall, our models are ranked in the top four of all systems for the submitted language pairs, with first rank in Spanish -{\ensuremath{>}} Portuguese."
}
Markdown (Informal)
[Transfer Learning for Related Languages: Submissions to the WMT20 Similar Language Translation Task](https://preview.aclanthology.org/fix-sig-urls/2020.wmt-1.46/) (Madaan et al., WMT 2020)
ACL