@inproceedings{madaan-etal-2020-transfer,
title = "Transfer Learning for Related Languages: Submissions to the {WMT}20 Similar Language Translation Task",
author = "Madaan, Lovish and
Sharma, Soumya and
Singla, Parag",
booktitle = "Proceedings of the Fifth Conference on Machine Translation",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.wmt-1.46",
pages = "402--408",
abstract = "In this paper, we describe IIT Delhi{'}s submissions to the WMT 2020 task on Similar Language Translation for four language directions: Hindi {\textless}-{\textgreater} Marathi and Spanish {\textless}-{\textgreater} Portuguese. We try out three different model settings for the translation task and select our primary and contrastive submissions on the basis of performance of these three models. For our best submissions, we fine-tune the mBART model on the parallel data provided for the task. The pre-training is done using self-supervised objectives on a large amount of monolingual data for many languages. Overall, our models are ranked in the top four of all systems for the submitted language pairs, with first rank in Spanish -{\textgreater} Portuguese.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="madaan-etal-2020-transfer">
<titleInfo>
<title>Transfer Learning for Related Languages: Submissions to the WMT20 Similar Language Translation Task</title>
</titleInfo>
<name type="personal">
<namePart type="given">Lovish</namePart>
<namePart type="family">Madaan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Soumya</namePart>
<namePart type="family">Sharma</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Parag</namePart>
<namePart type="family">Singla</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-nov</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Fifth Conference on Machine Translation</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>In this paper, we describe IIT Delhi’s submissions to the WMT 2020 task on Similar Language Translation for four language directions: Hindi \textless-\textgreater Marathi and Spanish \textless-\textgreater Portuguese. We try out three different model settings for the translation task and select our primary and contrastive submissions on the basis of performance of these three models. For our best submissions, we fine-tune the mBART model on the parallel data provided for the task. The pre-training is done using self-supervised objectives on a large amount of monolingual data for many languages. Overall, our models are ranked in the top four of all systems for the submitted language pairs, with first rank in Spanish -\textgreater Portuguese.</abstract>
<identifier type="citekey">madaan-etal-2020-transfer</identifier>
<location>
<url>https://aclanthology.org/2020.wmt-1.46</url>
</location>
<part>
<date>2020-nov</date>
<extent unit="page">
<start>402</start>
<end>408</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Transfer Learning for Related Languages: Submissions to the WMT20 Similar Language Translation Task
%A Madaan, Lovish
%A Sharma, Soumya
%A Singla, Parag
%S Proceedings of the Fifth Conference on Machine Translation
%D 2020
%8 nov
%I Association for Computational Linguistics
%C Online
%F madaan-etal-2020-transfer
%X In this paper, we describe IIT Delhi’s submissions to the WMT 2020 task on Similar Language Translation for four language directions: Hindi \textless-\textgreater Marathi and Spanish \textless-\textgreater Portuguese. We try out three different model settings for the translation task and select our primary and contrastive submissions on the basis of performance of these three models. For our best submissions, we fine-tune the mBART model on the parallel data provided for the task. The pre-training is done using self-supervised objectives on a large amount of monolingual data for many languages. Overall, our models are ranked in the top four of all systems for the submitted language pairs, with first rank in Spanish -\textgreater Portuguese.
%U https://aclanthology.org/2020.wmt-1.46
%P 402-408
Markdown (Informal)
[Transfer Learning for Related Languages: Submissions to the WMT20 Similar Language Translation Task](https://aclanthology.org/2020.wmt-1.46) (Madaan et al., WMT 2020)
ACL