@inproceedings{wang-neubig-2019-target,
title = "Target Conditioned Sampling: Optimizing Data Selection for Multilingual Neural Machine Translation",
author = "Wang, Xinyi and
Neubig, Graham",
editor = "Korhonen, Anna and
Traum, David and
M{\`a}rquez, Llu{\'i}s",
booktitle = "Proceedings of the 57th Annual Meeting of the Association for Computational Linguistics",
month = jul,
year = "2019",
address = "Florence, Italy",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/P19-1583/",
doi = "10.18653/v1/P19-1583",
pages = "5823--5828",
abstract = "To improve low-resource Neural Machine Translation (NMT) with multilingual corpus, training on the most related high-resource language only is generally more effective than us- ing all data available (Neubig and Hu, 2018). However, it remains a question whether a smart data selection strategy can further improve low-resource NMT with data from other auxiliary languages. In this paper, we seek to construct a sampling distribution over all multilingual data, so that it minimizes the training loss of the low-resource language. Based on this formulation, we propose and efficient algorithm, (TCS), which first samples a target sentence, and then conditionally samples its source sentence. Experiments show TCS brings significant gains of up to 2 BLEU improvements on three of four languages we test, with minimal training overhead."
}
Markdown (Informal)
[Target Conditioned Sampling: Optimizing Data Selection for Multilingual Neural Machine Translation](https://preview.aclanthology.org/jlcl-multiple-ingestion/P19-1583/) (Wang & Neubig, ACL 2019)
ACL