@inproceedings{atrio-etal-2021-iict,
title = "The {IICT}-Yverdon System for the {WMT} 2021 Unsupervised {MT} and Very Low Resource Supervised {MT} Task",
author = "Atrio, {\`A}lex R. and
Luthier, Gabriel and
Fahy, Axel and
Vernikos, Giorgos and
Popescu-Belis, Andrei and
Dolamic, Ljiljana",
booktitle = "Proceedings of the Sixth Conference on Machine Translation",
month = nov,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.wmt-1.103",
pages = "973--981",
abstract = "In this paper, we present the systems submitted by our team from the Institute of ICT (HEIG-VD / HES-SO) to the Unsupervised MT and Very Low Resource Supervised MT task. We first study the improvements brought to a baseline system by techniques such as back-translation and initialization from a parent model. We find that both techniques are beneficial and suffice to reach performance that compares with more sophisticated systems from the 2020 task. We then present the application of this system to the 2021 task for low-resource supervised Upper Sorbian (HSB) to German translation, in both directions. Finally, we present a contrastive system for HSB-DE in both directions, and for unsupervised German to Lower Sorbian (DSB) translation, which uses multi-task training with various training schedules to improve over the baseline.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="atrio-etal-2021-iict">
<titleInfo>
<title>The IICT-Yverdon System for the WMT 2021 Unsupervised MT and Very Low Resource Supervised MT Task</title>
</titleInfo>
<name type="personal">
<namePart type="given">Àlex</namePart>
<namePart type="given">R</namePart>
<namePart type="family">Atrio</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Gabriel</namePart>
<namePart type="family">Luthier</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Axel</namePart>
<namePart type="family">Fahy</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Giorgos</namePart>
<namePart type="family">Vernikos</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Andrei</namePart>
<namePart type="family">Popescu-Belis</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ljiljana</namePart>
<namePart type="family">Dolamic</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-nov</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Sixth Conference on Machine Translation</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>In this paper, we present the systems submitted by our team from the Institute of ICT (HEIG-VD / HES-SO) to the Unsupervised MT and Very Low Resource Supervised MT task. We first study the improvements brought to a baseline system by techniques such as back-translation and initialization from a parent model. We find that both techniques are beneficial and suffice to reach performance that compares with more sophisticated systems from the 2020 task. We then present the application of this system to the 2021 task for low-resource supervised Upper Sorbian (HSB) to German translation, in both directions. Finally, we present a contrastive system for HSB-DE in both directions, and for unsupervised German to Lower Sorbian (DSB) translation, which uses multi-task training with various training schedules to improve over the baseline.</abstract>
<identifier type="citekey">atrio-etal-2021-iict</identifier>
<location>
<url>https://aclanthology.org/2021.wmt-1.103</url>
</location>
<part>
<date>2021-nov</date>
<extent unit="page">
<start>973</start>
<end>981</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T The IICT-Yverdon System for the WMT 2021 Unsupervised MT and Very Low Resource Supervised MT Task
%A Atrio, Àlex R.
%A Luthier, Gabriel
%A Fahy, Axel
%A Vernikos, Giorgos
%A Popescu-Belis, Andrei
%A Dolamic, Ljiljana
%S Proceedings of the Sixth Conference on Machine Translation
%D 2021
%8 nov
%I Association for Computational Linguistics
%C Online
%F atrio-etal-2021-iict
%X In this paper, we present the systems submitted by our team from the Institute of ICT (HEIG-VD / HES-SO) to the Unsupervised MT and Very Low Resource Supervised MT task. We first study the improvements brought to a baseline system by techniques such as back-translation and initialization from a parent model. We find that both techniques are beneficial and suffice to reach performance that compares with more sophisticated systems from the 2020 task. We then present the application of this system to the 2021 task for low-resource supervised Upper Sorbian (HSB) to German translation, in both directions. Finally, we present a contrastive system for HSB-DE in both directions, and for unsupervised German to Lower Sorbian (DSB) translation, which uses multi-task training with various training schedules to improve over the baseline.
%U https://aclanthology.org/2021.wmt-1.103
%P 973-981
Markdown (Informal)
[The IICT-Yverdon System for the WMT 2021 Unsupervised MT and Very Low Resource Supervised MT Task](https://aclanthology.org/2021.wmt-1.103) (Atrio et al., WMT 2021)
ACL