@inproceedings{dabre-chakrabarty-2020-nicts,
title = "{NICT}{`}s Submission To {WAT} 2020: How Effective Are Simple Many-To-Many Neural Machine Translation Models?",
author = "Dabre, Raj and
Chakrabarty, Abhisek",
editor = "Nakazawa, Toshiaki and
Nakayama, Hideki and
Ding, Chenchen and
Dabre, Raj and
Kunchukuttan, Anoop and
Pa, Win Pa and
Bojar, Ond{\v{r}}ej and
Parida, Shantipriya and
Goto, Isao and
Mino, Hidaya and
Manabe, Hiroshi and
Sudoh, Katsuhito and
Kurohashi, Sadao and
Bhattacharyya, Pushpak",
booktitle = "Proceedings of the 7th Workshop on Asian Translation",
month = dec,
year = "2020",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2020.wat-1.9/",
doi = "10.18653/v1/2020.wat-1.9",
pages = "98--102",
abstract = "In this paper we describe our team{`}s (NICT-5) Neural Machine Translation (NMT) models whose translations were submitted to shared tasks of the 7th Workshop on Asian Translation. We participated in the Indic language multilingual sub-task as well as the NICT-SAP multilingual multi-domain sub-task. We focused on naive many-to-many NMT models which gave reasonable translation quality despite their simplicity. Our observations are twofold: (a.) Many-to-many models suffer from a lack of consistency where the translation quality for some language pairs is very good but for some others it is terrible when compared against one-to-many and many-to-one baselines. (b.) Oversampling smaller corpora does not necessarily give the best translation quality for the language pair associated with that pair."
}
Markdown (Informal)
[NICT‘s Submission To WAT 2020: How Effective Are Simple Many-To-Many Neural Machine Translation Models?](https://preview.aclanthology.org/fix-sig-urls/2020.wat-1.9/) (Dabre & Chakrabarty, WAT 2020)
ACL