@inproceedings{zhu-etal-2020-language,
title = "Language-aware Interlingua for Multilingual Neural Machine Translation",
author = "Zhu, Changfeng and
Yu, Heng and
Cheng, Shanbo and
Luo, Weihua",
editor = "Jurafsky, Dan and
Chai, Joyce and
Schluter, Natalie and
Tetreault, Joel",
booktitle = "Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2020.acl-main.150/",
doi = "10.18653/v1/2020.acl-main.150",
pages = "1650--1655",
abstract = "Multilingual neural machine translation (NMT) has led to impressive accuracy improvements in low-resource scenarios by sharing common linguistic information across languages. However, the traditional multilingual model fails to capture the diversity and specificity of different languages, resulting in inferior performance compared with individual models that are sufficiently trained. In this paper, we incorporate a language-aware interlingua into the Encoder-Decoder architecture. The interlingual network enables the model to learn a language-independent representation from the semantic spaces of different languages, while still allowing for language-specific specialization of a particular language-pair. Experiments show that our proposed method achieves remarkable improvements over state-of-the-art multilingual NMT baselines and produces comparable performance with strong individual models."
}
Markdown (Informal)
[Language-aware Interlingua for Multilingual Neural Machine Translation](https://preview.aclanthology.org/add-emnlp-2024-awards/2020.acl-main.150/) (Zhu et al., ACL 2020)
ACL