@inproceedings{kim-etal-2020-iterative,
title = "An Iterative Knowledge Transfer {NMT} System for {WMT}20 News Translation Task",
author = "Kim, Jiwan and
Park, Soyoon and
Kim, Sangha and
Choi, Yoonjung",
booktitle = "Proceedings of the Fifth Conference on Machine Translation",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.wmt-1.11",
pages = "139--144",
abstract = "This paper describes our submission to the WMT20 news translation shared task in English to Japanese direction. Our main approach is based on transferring knowledge of domain and linguistic characteristics by pre-training the encoder-decoder model with large amount of in-domain monolingual data through unsupervised and supervised prediction task. We then fine-tune the model with parallel data and in-domain synthetic data, generated with iterative back-translation. For additional gain, we generate final results with an ensemble model and re-rank them with averaged models and language models. Through these methods, we achieve +5.42 BLEU score compare to the baseline model.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="kim-etal-2020-iterative">
<titleInfo>
<title>An Iterative Knowledge Transfer NMT System for WMT20 News Translation Task</title>
</titleInfo>
<name type="personal">
<namePart type="given">Jiwan</namePart>
<namePart type="family">Kim</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Soyoon</namePart>
<namePart type="family">Park</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Sangha</namePart>
<namePart type="family">Kim</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yoonjung</namePart>
<namePart type="family">Choi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-nov</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Fifth Conference on Machine Translation</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>This paper describes our submission to the WMT20 news translation shared task in English to Japanese direction. Our main approach is based on transferring knowledge of domain and linguistic characteristics by pre-training the encoder-decoder model with large amount of in-domain monolingual data through unsupervised and supervised prediction task. We then fine-tune the model with parallel data and in-domain synthetic data, generated with iterative back-translation. For additional gain, we generate final results with an ensemble model and re-rank them with averaged models and language models. Through these methods, we achieve +5.42 BLEU score compare to the baseline model.</abstract>
<identifier type="citekey">kim-etal-2020-iterative</identifier>
<location>
<url>https://aclanthology.org/2020.wmt-1.11</url>
</location>
<part>
<date>2020-nov</date>
<extent unit="page">
<start>139</start>
<end>144</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T An Iterative Knowledge Transfer NMT System for WMT20 News Translation Task
%A Kim, Jiwan
%A Park, Soyoon
%A Kim, Sangha
%A Choi, Yoonjung
%S Proceedings of the Fifth Conference on Machine Translation
%D 2020
%8 nov
%I Association for Computational Linguistics
%C Online
%F kim-etal-2020-iterative
%X This paper describes our submission to the WMT20 news translation shared task in English to Japanese direction. Our main approach is based on transferring knowledge of domain and linguistic characteristics by pre-training the encoder-decoder model with large amount of in-domain monolingual data through unsupervised and supervised prediction task. We then fine-tune the model with parallel data and in-domain synthetic data, generated with iterative back-translation. For additional gain, we generate final results with an ensemble model and re-rank them with averaged models and language models. Through these methods, we achieve +5.42 BLEU score compare to the baseline model.
%U https://aclanthology.org/2020.wmt-1.11
%P 139-144
Markdown (Informal)
[An Iterative Knowledge Transfer NMT System for WMT20 News Translation Task](https://aclanthology.org/2020.wmt-1.11) (Kim et al., WMT 2020)
ACL