@inproceedings{wang-etal-2020-multi,
title = "Multi-task Learning for Multilingual Neural Machine Translation",
author = "Wang, Yiren and
Zhai, ChengXiang and
Hassan, Hany",
editor = "Webber, Bonnie and
Cohn, Trevor and
He, Yulan and
Liu, Yang",
booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing (EMNLP)",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2020.emnlp-main.75/",
doi = "10.18653/v1/2020.emnlp-main.75",
pages = "1022--1034",
abstract = "While monolingual data has been shown to be useful in improving bilingual neural machine translation (NMT), effectively and efficiently leveraging monolingual data for Multilingual NMT (MNMT) systems is a less explored area. In this work, we propose a multi-task learning (MTL) framework that jointly trains the model with the translation task on bitext data and two denoising tasks on the monolingual data. We conduct extensive empirical studies on MNMT systems with 10 language pairs from WMT datasets. We show that the proposed approach can effectively improve the translation quality for both high-resource and low-resource languages with large margin, achieving significantly better results than the individual bilingual models. We also demonstrate the efficacy of the proposed approach in the zero-shot setup for language pairs without bitext training data. Furthermore, we show the effectiveness of MTL over pre-training approaches for both NMT and cross-lingual transfer learning NLU tasks; the proposed approach outperforms massive scale models trained on single task."
}
Markdown (Informal)
[Multi-task Learning for Multilingual Neural Machine Translation](https://preview.aclanthology.org/add-emnlp-2024-awards/2020.emnlp-main.75/) (Wang et al., EMNLP 2020)
ACL