@inproceedings{simonarson-etal-2021-mideinds,
title = "Mi{\dh}eind{'}s {WMT} 2021 Submission",
author = "S{\'\i}monarson, Haukur Barri and
Sn{\ae}bjarnarson, V{\'e}steinn and
Ragnarson, P{\'e}tur Orri and
J{\'o}nsson, Haukur and
Thorsteinsson, Vilhjalmur",
booktitle = "Proceedings of the Sixth Conference on Machine Translation",
month = nov,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.wmt-1.9",
pages = "136--139",
abstract = "We present Mi{\dh}eind{'}s submission for the English→Icelandic and Icelandic→English subsets of the 2021 WMT news translation task. Transformer-base models are trained for translation on parallel data to generate backtranslations teratively. A pretrained mBART-25 model is then adapted for translation using parallel data as well as the last backtranslation iteration. This adapted pretrained model is then used to re-generate backtranslations, and the training of the adapted model is continued.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="simonarson-etal-2021-mideinds">
<titleInfo>
<title>Mi\dheind’s WMT 2021 Submission</title>
</titleInfo>
<name type="personal">
<namePart type="given">Haukur</namePart>
<namePart type="given">Barri</namePart>
<namePart type="family">Símonarson</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Vésteinn</namePart>
<namePart type="family">Snæbjarnarson</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Pétur</namePart>
<namePart type="given">Orri</namePart>
<namePart type="family">Ragnarson</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Haukur</namePart>
<namePart type="family">Jónsson</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Vilhjalmur</namePart>
<namePart type="family">Thorsteinsson</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-nov</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Sixth Conference on Machine Translation</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>We present Mi\dheind’s submission for the English→Icelandic and Icelandic→English subsets of the 2021 WMT news translation task. Transformer-base models are trained for translation on parallel data to generate backtranslations teratively. A pretrained mBART-25 model is then adapted for translation using parallel data as well as the last backtranslation iteration. This adapted pretrained model is then used to re-generate backtranslations, and the training of the adapted model is continued.</abstract>
<identifier type="citekey">simonarson-etal-2021-mideinds</identifier>
<location>
<url>https://aclanthology.org/2021.wmt-1.9</url>
</location>
<part>
<date>2021-nov</date>
<extent unit="page">
<start>136</start>
<end>139</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Mi\dheind’s WMT 2021 Submission
%A Símonarson, Haukur Barri
%A Snæbjarnarson, Vésteinn
%A Ragnarson, Pétur Orri
%A Jónsson, Haukur
%A Thorsteinsson, Vilhjalmur
%S Proceedings of the Sixth Conference on Machine Translation
%D 2021
%8 nov
%I Association for Computational Linguistics
%C Online
%F simonarson-etal-2021-mideinds
%X We present Mi\dheind’s submission for the English→Icelandic and Icelandic→English subsets of the 2021 WMT news translation task. Transformer-base models are trained for translation on parallel data to generate backtranslations teratively. A pretrained mBART-25 model is then adapted for translation using parallel data as well as the last backtranslation iteration. This adapted pretrained model is then used to re-generate backtranslations, and the training of the adapted model is continued.
%U https://aclanthology.org/2021.wmt-1.9
%P 136-139
Markdown (Informal)
[Miðeind’s WMT 2021 Submission](https://aclanthology.org/2021.wmt-1.9) (Símonarson et al., WMT 2021)
ACL
- Haukur Barri Símonarson, Vésteinn Snæbjarnarson, Pétur Orri Ragnarson, Haukur Jónsson, and Vilhjalmur Thorsteinsson. 2021. Miðeind’s WMT 2021 Submission. In Proceedings of the Sixth Conference on Machine Translation, pages 136–139, Online. Association for Computational Linguistics.