@inproceedings{dabre-etal-2019-exploiting,
title = "Exploiting Multilingualism through Multistage Fine-Tuning for Low-Resource Neural Machine Translation",
author = "Dabre, Raj and
Fujita, Atsushi and
Chu, Chenhui",
editor = "Inui, Kentaro and
Jiang, Jing and
Ng, Vincent and
Wan, Xiaojun",
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP)",
month = nov,
year = "2019",
address = "Hong Kong, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Add-Cong-Liu-Florida-Atlantic-University-author-id/D19-1146/",
doi = "10.18653/v1/D19-1146",
pages = "1410--1416",
abstract = "This paper highlights the impressive utility of multi-parallel corpora for transfer learning in a one-to-many low-resource neural machine translation (NMT) setting. We report on a systematic comparison of multistage fine-tuning configurations, consisting of (1) pre-training on an external large (209k{--}440k) parallel corpus for English and a helping target language, (2) mixed pre-training or fine-tuning on a mixture of the external and low-resource (18k) target parallel corpora, and (3) pure fine-tuning on the target parallel corpora. Our experiments confirm that multi-parallel corpora are extremely useful despite their scarcity and content-wise redundancy thus exhibiting the true power of multilingualism. Even when the helping target language is not one of the target languages of our concern, our multistage fine-tuning can give 3{--}9 BLEU score gains over a simple one-to-one model."
}
Markdown (Informal)
[Exploiting Multilingualism through Multistage Fine-Tuning for Low-Resource Neural Machine Translation](https://preview.aclanthology.org/Add-Cong-Liu-Florida-Atlantic-University-author-id/D19-1146/) (Dabre et al., EMNLP-IJCNLP 2019)
ACL