@inproceedings{wang-etal-2020-tencent-ai,
title = "Tencent {AI} Lab Machine Translation Systems for the {WMT}20 Biomedical Translation Task",
author = "Wang, Xing and
Tu, Zhaopeng and
Wang, Longyue and
Shi, Shuming",
booktitle = "Proceedings of the Fifth Conference on Machine Translation",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.wmt-1.97",
pages = "881--886",
abstract = "This paper describes the Tencent AI Lab submission of the WMT2020 shared task on biomedical translation in four language directions: German{\textless}-{\textgreater}English, English{\textless}-{\textgreater}German, Chinese{\textless}-{\textgreater}English and English{\textless}-{\textgreater}Chinese. We implement our system with model ensemble technique on different transformer architectures (Deep, Hybrid, Big, Large Transformers). To enlarge the in-domain bilingual corpus, we use back-translation of monolingual in-domain data in the target language as additional in-domain training data. Our systems in German-{\textgreater}English and English-{\textgreater}German are ranked 1st and 3rd respectively according to the official evaluation results in terms of BLEU scores.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="wang-etal-2020-tencent-ai">
<titleInfo>
<title>Tencent AI Lab Machine Translation Systems for the WMT20 Biomedical Translation Task</title>
</titleInfo>
<name type="personal">
<namePart type="given">Xing</namePart>
<namePart type="family">Wang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Zhaopeng</namePart>
<namePart type="family">Tu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Longyue</namePart>
<namePart type="family">Wang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Shuming</namePart>
<namePart type="family">Shi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-nov</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Fifth Conference on Machine Translation</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>This paper describes the Tencent AI Lab submission of the WMT2020 shared task on biomedical translation in four language directions: German\textless-\textgreaterEnglish, English\textless-\textgreaterGerman, Chinese\textless-\textgreaterEnglish and English\textless-\textgreaterChinese. We implement our system with model ensemble technique on different transformer architectures (Deep, Hybrid, Big, Large Transformers). To enlarge the in-domain bilingual corpus, we use back-translation of monolingual in-domain data in the target language as additional in-domain training data. Our systems in German-\textgreaterEnglish and English-\textgreaterGerman are ranked 1st and 3rd respectively according to the official evaluation results in terms of BLEU scores.</abstract>
<identifier type="citekey">wang-etal-2020-tencent-ai</identifier>
<location>
<url>https://aclanthology.org/2020.wmt-1.97</url>
</location>
<part>
<date>2020-nov</date>
<extent unit="page">
<start>881</start>
<end>886</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Tencent AI Lab Machine Translation Systems for the WMT20 Biomedical Translation Task
%A Wang, Xing
%A Tu, Zhaopeng
%A Wang, Longyue
%A Shi, Shuming
%S Proceedings of the Fifth Conference on Machine Translation
%D 2020
%8 nov
%I Association for Computational Linguistics
%C Online
%F wang-etal-2020-tencent-ai
%X This paper describes the Tencent AI Lab submission of the WMT2020 shared task on biomedical translation in four language directions: German\textless-\textgreaterEnglish, English\textless-\textgreaterGerman, Chinese\textless-\textgreaterEnglish and English\textless-\textgreaterChinese. We implement our system with model ensemble technique on different transformer architectures (Deep, Hybrid, Big, Large Transformers). To enlarge the in-domain bilingual corpus, we use back-translation of monolingual in-domain data in the target language as additional in-domain training data. Our systems in German-\textgreaterEnglish and English-\textgreaterGerman are ranked 1st and 3rd respectively according to the official evaluation results in terms of BLEU scores.
%U https://aclanthology.org/2020.wmt-1.97
%P 881-886
Markdown (Informal)
[Tencent AI Lab Machine Translation Systems for the WMT20 Biomedical Translation Task](https://aclanthology.org/2020.wmt-1.97) (Wang et al., WMT 2020)
ACL