@inproceedings{hu-etal-2018-tencentfmrd,
title = "{T}encent{F}m{RD} Neural Machine Translation for {WMT}18",
author = "Hu, Bojie and
Han, Ambyer and
Huang, Shen",
booktitle = "Proceedings of the Third Conference on Machine Translation: Shared Task Papers",
month = oct,
year = "2018",
address = "Belgium, Brussels",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/W18-6413",
doi = "10.18653/v1/W18-6413",
pages = "410--417",
abstract = "This paper describes the Neural Machine Translation (NMT) system of TencentFmRD for Chinese↔English news translation tasks of WMT 2018. Our systems are neural machine translation systems trained with our original system TenTrans. TenTrans is an improved NMT system based on Transformer self-attention mechanism. In addition to the basic settings of Transformer training, TenTrans uses multi-model fusion techniques, multiple features reranking, different segmentation models and joint learning. Finally, we adopt some data selection strategies to fine-tune the trained system and achieve a stable performance improvement. Our Chinese→English system achieved the second best BLEU scores and fourth best cased BLEU scores among all WMT18 submitted systems.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="hu-etal-2018-tencentfmrd">
<titleInfo>
<title>TencentFmRD Neural Machine Translation for WMT18</title>
</titleInfo>
<name type="personal">
<namePart type="given">Bojie</namePart>
<namePart type="family">Hu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ambyer</namePart>
<namePart type="family">Han</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Shen</namePart>
<namePart type="family">Huang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2018-oct</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Third Conference on Machine Translation: Shared Task Papers</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Belgium, Brussels</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>This paper describes the Neural Machine Translation (NMT) system of TencentFmRD for Chinese↔English news translation tasks of WMT 2018. Our systems are neural machine translation systems trained with our original system TenTrans. TenTrans is an improved NMT system based on Transformer self-attention mechanism. In addition to the basic settings of Transformer training, TenTrans uses multi-model fusion techniques, multiple features reranking, different segmentation models and joint learning. Finally, we adopt some data selection strategies to fine-tune the trained system and achieve a stable performance improvement. Our Chinese→English system achieved the second best BLEU scores and fourth best cased BLEU scores among all WMT18 submitted systems.</abstract>
<identifier type="citekey">hu-etal-2018-tencentfmrd</identifier>
<identifier type="doi">10.18653/v1/W18-6413</identifier>
<location>
<url>https://aclanthology.org/W18-6413</url>
</location>
<part>
<date>2018-oct</date>
<extent unit="page">
<start>410</start>
<end>417</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T TencentFmRD Neural Machine Translation for WMT18
%A Hu, Bojie
%A Han, Ambyer
%A Huang, Shen
%S Proceedings of the Third Conference on Machine Translation: Shared Task Papers
%D 2018
%8 oct
%I Association for Computational Linguistics
%C Belgium, Brussels
%F hu-etal-2018-tencentfmrd
%X This paper describes the Neural Machine Translation (NMT) system of TencentFmRD for Chinese↔English news translation tasks of WMT 2018. Our systems are neural machine translation systems trained with our original system TenTrans. TenTrans is an improved NMT system based on Transformer self-attention mechanism. In addition to the basic settings of Transformer training, TenTrans uses multi-model fusion techniques, multiple features reranking, different segmentation models and joint learning. Finally, we adopt some data selection strategies to fine-tune the trained system and achieve a stable performance improvement. Our Chinese→English system achieved the second best BLEU scores and fourth best cased BLEU scores among all WMT18 submitted systems.
%R 10.18653/v1/W18-6413
%U https://aclanthology.org/W18-6413
%U https://doi.org/10.18653/v1/W18-6413
%P 410-417
Markdown (Informal)
[TencentFmRD Neural Machine Translation for WMT18](https://aclanthology.org/W18-6413) (Hu et al., 2018)
ACL
- Bojie Hu, Ambyer Han, and Shen Huang. 2018. TencentFmRD Neural Machine Translation for WMT18. In Proceedings of the Third Conference on Machine Translation: Shared Task Papers, pages 410–417, Belgium, Brussels. Association for Computational Linguistics.