@inproceedings{rathinasamy-etal-2020-infosys,
title = "Infosys Machine Translation System for {WMT}20 Similar Language Translation Task",
author = "Rathinasamy, Kamalkumar and
Singh, Amanpreet and
Sivasambagupta, Balaguru and
Prasad Neerchal, Prajna and
Sivasankaran, Vani",
booktitle = "Proceedings of the Fifth Conference on Machine Translation",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.wmt-1.52",
pages = "437--441",
abstract = "This paper describes Infosys{'}s submission to the WMT20 Similar Language Translation shared task. We participated in Indo-Aryan language pair in the language direction Hindi to Marathi. Our baseline system is byte-pair encoding based transformer model trained with the Fairseq sequence modeling toolkit. Our final system is an ensemble of two transformer models, which ranked first in WMT20 evaluation. One model is designed to learn the nuances of translation of this low resource language pair by taking advantage of the fact that the source and target languages are same alphabet languages. The other model is the result of experimentation with the proportion of back-translated data to the parallel data to improve translation fluency.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="rathinasamy-etal-2020-infosys">
<titleInfo>
<title>Infosys Machine Translation System for WMT20 Similar Language Translation Task</title>
</titleInfo>
<name type="personal">
<namePart type="given">Kamalkumar</namePart>
<namePart type="family">Rathinasamy</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Amanpreet</namePart>
<namePart type="family">Singh</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Balaguru</namePart>
<namePart type="family">Sivasambagupta</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Prajna</namePart>
<namePart type="family">Prasad Neerchal</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Vani</namePart>
<namePart type="family">Sivasankaran</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-nov</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Fifth Conference on Machine Translation</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>This paper describes Infosys’s submission to the WMT20 Similar Language Translation shared task. We participated in Indo-Aryan language pair in the language direction Hindi to Marathi. Our baseline system is byte-pair encoding based transformer model trained with the Fairseq sequence modeling toolkit. Our final system is an ensemble of two transformer models, which ranked first in WMT20 evaluation. One model is designed to learn the nuances of translation of this low resource language pair by taking advantage of the fact that the source and target languages are same alphabet languages. The other model is the result of experimentation with the proportion of back-translated data to the parallel data to improve translation fluency.</abstract>
<identifier type="citekey">rathinasamy-etal-2020-infosys</identifier>
<location>
<url>https://aclanthology.org/2020.wmt-1.52</url>
</location>
<part>
<date>2020-nov</date>
<extent unit="page">
<start>437</start>
<end>441</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Infosys Machine Translation System for WMT20 Similar Language Translation Task
%A Rathinasamy, Kamalkumar
%A Singh, Amanpreet
%A Sivasambagupta, Balaguru
%A Prasad Neerchal, Prajna
%A Sivasankaran, Vani
%S Proceedings of the Fifth Conference on Machine Translation
%D 2020
%8 nov
%I Association for Computational Linguistics
%C Online
%F rathinasamy-etal-2020-infosys
%X This paper describes Infosys’s submission to the WMT20 Similar Language Translation shared task. We participated in Indo-Aryan language pair in the language direction Hindi to Marathi. Our baseline system is byte-pair encoding based transformer model trained with the Fairseq sequence modeling toolkit. Our final system is an ensemble of two transformer models, which ranked first in WMT20 evaluation. One model is designed to learn the nuances of translation of this low resource language pair by taking advantage of the fact that the source and target languages are same alphabet languages. The other model is the result of experimentation with the proportion of back-translated data to the parallel data to improve translation fluency.
%U https://aclanthology.org/2020.wmt-1.52
%P 437-441
Markdown (Informal)
[Infosys Machine Translation System for WMT20 Similar Language Translation Task](https://aclanthology.org/2020.wmt-1.52) (Rathinasamy et al., WMT 2020)
ACL