@inproceedings{khatri-etal-2021-language,
title = "Language Relatedness and Lexical Closeness can help Improve Multilingual {NMT}: {IITB}ombay@{M}ulti{I}ndic{NMT} {WAT}2021",
author = "Khatri, Jyotsana and
Saini, Nikhil and
Bhattacharyya, Pushpak",
editor = "Nakazawa, Toshiaki and
Nakayama, Hideki and
Goto, Isao and
Mino, Hideya and
Ding, Chenchen and
Dabre, Raj and
Kunchukuttan, Anoop and
Higashiyama, Shohei and
Manabe, Hiroshi and
Pa, Win Pa and
Parida, Shantipriya and
Bojar, Ond{\v{r}}ej and
Chu, Chenhui and
Eriguchi, Akiko and
Abe, Kaori and
Oda, Yusuke and
Sudoh, Katsuhito and
Kurohashi, Sadao and
Bhattacharyya, Pushpak",
booktitle = "Proceedings of the 8th Workshop on Asian Translation (WAT2021)",
month = aug,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2021.wat-1.26/",
doi = "10.18653/v1/2021.wat-1.26",
pages = "217--223",
abstract = "Multilingual Neural Machine Translation has achieved remarkable performance by training a single translation model for multiple languages. This paper describes our submission (Team ID: CFILT-IITB) for the MultiIndicMT: An Indic Language Multilingual Task at WAT 2021. We train multilingual NMT systems by sharing encoder and decoder parameters with language embedding associated with each token in both encoder and decoder. Furthermore, we demonstrate the use of transliteration (script conversion) for Indic languages in reducing the lexical gap for training a multilingual NMT system. Further, we show improvement in performance by training a multilingual NMT system using languages of the same family, i.e., related languages."
}
Markdown (Informal)
[Language Relatedness and Lexical Closeness can help Improve Multilingual NMT: IITBombay@MultiIndicNMT WAT2021](https://preview.aclanthology.org/add-emnlp-2024-awards/2021.wat-1.26/) (Khatri et al., WAT 2021)
ACL