@inproceedings{dabre-chakrabarty-2021-nict,
title = "{NICT}-5{'}s Submission To {WAT} 2021: {MBART} Pre-training And In-Domain Fine Tuning For Indic Languages",
author = "Dabre, Raj and
Chakrabarty, Abhisek",
booktitle = "Proceedings of the 8th Workshop on Asian Translation (WAT2021)",
month = aug,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.wat-1.23",
doi = "10.18653/v1/2021.wat-1.23",
pages = "198--204",
abstract = "In this paper we describe our submission to the multilingual Indic language translation wtask {``}MultiIndicMT{''} under the team name {``}NICT-5{''}. This task involves translation from 10 Indic languages into English and vice-versa. The objective of the task was to explore the utility of multilingual approaches using a variety of in-domain and out-of-domain parallel and monolingual corpora. Given the recent success of multilingual NMT pre-training we decided to explore pre-training an MBART model on a large monolingual corpus collection covering all languages in this task followed by multilingual fine-tuning on small in-domain corpora. Firstly, we observed that a small amount of pre-training followed by fine-tuning on small bilingual corpora can yield large gains over when pre-training is not used. Furthermore, multilingual fine-tuning leads to further gains in translation quality which significantly outperforms a very strong multilingual baseline that does not rely on any pre-training.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="dabre-chakrabarty-2021-nict">
<titleInfo>
<title>NICT-5’s Submission To WAT 2021: MBART Pre-training And In-Domain Fine Tuning For Indic Languages</title>
</titleInfo>
<name type="personal">
<namePart type="given">Raj</namePart>
<namePart type="family">Dabre</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Abhisek</namePart>
<namePart type="family">Chakrabarty</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-aug</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 8th Workshop on Asian Translation (WAT2021)</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>In this paper we describe our submission to the multilingual Indic language translation wtask “MultiIndicMT” under the team name “NICT-5”. This task involves translation from 10 Indic languages into English and vice-versa. The objective of the task was to explore the utility of multilingual approaches using a variety of in-domain and out-of-domain parallel and monolingual corpora. Given the recent success of multilingual NMT pre-training we decided to explore pre-training an MBART model on a large monolingual corpus collection covering all languages in this task followed by multilingual fine-tuning on small in-domain corpora. Firstly, we observed that a small amount of pre-training followed by fine-tuning on small bilingual corpora can yield large gains over when pre-training is not used. Furthermore, multilingual fine-tuning leads to further gains in translation quality which significantly outperforms a very strong multilingual baseline that does not rely on any pre-training.</abstract>
<identifier type="citekey">dabre-chakrabarty-2021-nict</identifier>
<identifier type="doi">10.18653/v1/2021.wat-1.23</identifier>
<location>
<url>https://aclanthology.org/2021.wat-1.23</url>
</location>
<part>
<date>2021-aug</date>
<extent unit="page">
<start>198</start>
<end>204</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T NICT-5’s Submission To WAT 2021: MBART Pre-training And In-Domain Fine Tuning For Indic Languages
%A Dabre, Raj
%A Chakrabarty, Abhisek
%S Proceedings of the 8th Workshop on Asian Translation (WAT2021)
%D 2021
%8 aug
%I Association for Computational Linguistics
%C Online
%F dabre-chakrabarty-2021-nict
%X In this paper we describe our submission to the multilingual Indic language translation wtask “MultiIndicMT” under the team name “NICT-5”. This task involves translation from 10 Indic languages into English and vice-versa. The objective of the task was to explore the utility of multilingual approaches using a variety of in-domain and out-of-domain parallel and monolingual corpora. Given the recent success of multilingual NMT pre-training we decided to explore pre-training an MBART model on a large monolingual corpus collection covering all languages in this task followed by multilingual fine-tuning on small in-domain corpora. Firstly, we observed that a small amount of pre-training followed by fine-tuning on small bilingual corpora can yield large gains over when pre-training is not used. Furthermore, multilingual fine-tuning leads to further gains in translation quality which significantly outperforms a very strong multilingual baseline that does not rely on any pre-training.
%R 10.18653/v1/2021.wat-1.23
%U https://aclanthology.org/2021.wat-1.23
%U https://doi.org/10.18653/v1/2021.wat-1.23
%P 198-204
Markdown (Informal)
[NICT-5’s Submission To WAT 2021: MBART Pre-training And In-Domain Fine Tuning For Indic Languages](https://aclanthology.org/2021.wat-1.23) (Dabre & Chakrabarty, WAT 2021)
ACL