@inproceedings{kumar-etal-2021-learning-curricula,
title = "Learning Curricula for Multilingual Neural Machine Translation Training",
author = "Kumar, Gaurav and
Koehn, Philipp and
Khudanpur, Sanjeev",
editor = "Duh, Kevin and
Guzm{\'a}n, Francisco",
booktitle = "Proceedings of Machine Translation Summit XVIII: Research Track",
month = aug,
year = "2021",
address = "Virtual",
publisher = "Association for Machine Translation in the Americas",
url = "https://aclanthology.org/2021.mtsummit-research.1",
pages = "1--9",
abstract = "Low-resource Multilingual Neural Machine Translation (MNMT) is typically tasked with improving the translation performance on one or more language pairs with the aid of high-resource language pairs. In this paper and we propose two simple search based curricula {--} orderings of the multilingual training data {--} which help improve translation performance in conjunction with existing techniques such as fine-tuning. Additionally and we attempt to learn a curriculum for MNMT from scratch jointly with the training of the translation system using contextual multi-arm bandits. We show on the FLORES low-resource translation dataset that these learned curricula can provide better starting points for fine tuning and improve overall performance of the translation system.",
}
Markdown (Informal)
[Learning Curricula for Multilingual Neural Machine Translation Training](https://aclanthology.org/2021.mtsummit-research.1) (Kumar et al., MTSummit 2021)
ACL