@inproceedings{zhang-etal-2020-niutrans,
title = "The {N}iu{T}rans Machine Translation Systems for {WMT}20",
author = "Zhang, Yuhao and
Wang, Ziyang and
Cao, Runzhe and
Wei, Binghao and
Shan, Weiqiao and
Zhou, Shuhan and
Reheman, Abudurexiti and
Zhou, Tao and
Zeng, Xin and
Wang, Laohu and
Mu, Yongyu and
Zhang, Jingnan and
Liu, Xiaoqian and
Zhou, Xuanjun and
Li, Yinqiao and
Li, Bei and
Xiao, Tong and
Zhu, Jingbo",
booktitle = "Proceedings of the Fifth Conference on Machine Translation",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.wmt-1.37",
pages = "338--345",
abstract = "This paper describes NiuTrans neural machine translation systems of the WMT20 news translation tasks. We participated in Japanese{\textless}-{\textgreater}English, English-{\textgreater}Chinese, Inuktitut-{\textgreater}English and Tamil-{\textgreater}English total five tasks and rank first in Japanese{\textless}-{\textgreater}English both sides. We mainly utilized iterative back-translation, different depth and widen model architectures, iterative knowledge distillation and iterative fine-tuning. And we find that adequately widened and deepened the model simultaneously, the performance will significantly improve. Also, iterative fine-tuning strategy we implemented is effective during adapting domain. For Inuktitut-{\textgreater}English and Tamil-{\textgreater}English tasks, we built multilingual models separately and employed pretraining word embedding to obtain better performance.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="zhang-etal-2020-niutrans">
<titleInfo>
<title>The NiuTrans Machine Translation Systems for WMT20</title>
</titleInfo>
<name type="personal">
<namePart type="given">Yuhao</namePart>
<namePart type="family">Zhang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ziyang</namePart>
<namePart type="family">Wang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Runzhe</namePart>
<namePart type="family">Cao</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Binghao</namePart>
<namePart type="family">Wei</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Weiqiao</namePart>
<namePart type="family">Shan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Shuhan</namePart>
<namePart type="family">Zhou</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Abudurexiti</namePart>
<namePart type="family">Reheman</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Tao</namePart>
<namePart type="family">Zhou</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Xin</namePart>
<namePart type="family">Zeng</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Laohu</namePart>
<namePart type="family">Wang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yongyu</namePart>
<namePart type="family">Mu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jingnan</namePart>
<namePart type="family">Zhang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Xiaoqian</namePart>
<namePart type="family">Liu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Xuanjun</namePart>
<namePart type="family">Zhou</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yinqiao</namePart>
<namePart type="family">Li</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Bei</namePart>
<namePart type="family">Li</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Tong</namePart>
<namePart type="family">Xiao</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jingbo</namePart>
<namePart type="family">Zhu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-nov</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Fifth Conference on Machine Translation</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>This paper describes NiuTrans neural machine translation systems of the WMT20 news translation tasks. We participated in Japanese\textless-\textgreaterEnglish, English-\textgreaterChinese, Inuktitut-\textgreaterEnglish and Tamil-\textgreaterEnglish total five tasks and rank first in Japanese\textless-\textgreaterEnglish both sides. We mainly utilized iterative back-translation, different depth and widen model architectures, iterative knowledge distillation and iterative fine-tuning. And we find that adequately widened and deepened the model simultaneously, the performance will significantly improve. Also, iterative fine-tuning strategy we implemented is effective during adapting domain. For Inuktitut-\textgreaterEnglish and Tamil-\textgreaterEnglish tasks, we built multilingual models separately and employed pretraining word embedding to obtain better performance.</abstract>
<identifier type="citekey">zhang-etal-2020-niutrans</identifier>
<location>
<url>https://aclanthology.org/2020.wmt-1.37</url>
</location>
<part>
<date>2020-nov</date>
<extent unit="page">
<start>338</start>
<end>345</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T The NiuTrans Machine Translation Systems for WMT20
%A Zhang, Yuhao
%A Wang, Ziyang
%A Cao, Runzhe
%A Wei, Binghao
%A Shan, Weiqiao
%A Zhou, Shuhan
%A Reheman, Abudurexiti
%A Zhou, Tao
%A Zeng, Xin
%A Wang, Laohu
%A Mu, Yongyu
%A Zhang, Jingnan
%A Liu, Xiaoqian
%A Zhou, Xuanjun
%A Li, Yinqiao
%A Li, Bei
%A Xiao, Tong
%A Zhu, Jingbo
%S Proceedings of the Fifth Conference on Machine Translation
%D 2020
%8 nov
%I Association for Computational Linguistics
%C Online
%F zhang-etal-2020-niutrans
%X This paper describes NiuTrans neural machine translation systems of the WMT20 news translation tasks. We participated in Japanese\textless-\textgreaterEnglish, English-\textgreaterChinese, Inuktitut-\textgreaterEnglish and Tamil-\textgreaterEnglish total five tasks and rank first in Japanese\textless-\textgreaterEnglish both sides. We mainly utilized iterative back-translation, different depth and widen model architectures, iterative knowledge distillation and iterative fine-tuning. And we find that adequately widened and deepened the model simultaneously, the performance will significantly improve. Also, iterative fine-tuning strategy we implemented is effective during adapting domain. For Inuktitut-\textgreaterEnglish and Tamil-\textgreaterEnglish tasks, we built multilingual models separately and employed pretraining word embedding to obtain better performance.
%U https://aclanthology.org/2020.wmt-1.37
%P 338-345
Markdown (Informal)
[The NiuTrans Machine Translation Systems for WMT20](https://aclanthology.org/2020.wmt-1.37) (Zhang et al., WMT 2020)
ACL
- Yuhao Zhang, Ziyang Wang, Runzhe Cao, Binghao Wei, Weiqiao Shan, Shuhan Zhou, Abudurexiti Reheman, Tao Zhou, Xin Zeng, Laohu Wang, Yongyu Mu, Jingnan Zhang, Xiaoqian Liu, Xuanjun Zhou, Yinqiao Li, Bei Li, Tong Xiao, and Jingbo Zhu. 2020. The NiuTrans Machine Translation Systems for WMT20. In Proceedings of the Fifth Conference on Machine Translation, pages 338–345, Online. Association for Computational Linguistics.