@inproceedings{morishita-etal-2018-ntts,
title = "{NTT}{'}s Neural Machine Translation Systems for {WMT} 2018",
author = "Morishita, Makoto and
Suzuki, Jun and
Nagata, Masaaki",
booktitle = "Proceedings of the Third Conference on Machine Translation: Shared Task Papers",
month = oct,
year = "2018",
address = "Belgium, Brussels",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/W18-6421",
doi = "10.18653/v1/W18-6421",
pages = "461--466",
abstract = "This paper describes NTT{'}s neural machine translation systems submitted to the WMT 2018 English-German and German-English news translation tasks. Our submission has three main components: the Transformer model, corpus cleaning, and right-to-left n-best re-ranking techniques. Through our experiments, we identified two keys for improving accuracy: filtering noisy training sentences and right-to-left re-ranking. We also found that the Transformer model requires more training data than the RNN-based model, and the RNN-based model sometimes achieves better accuracy than the Transformer model when the corpus is small.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="morishita-etal-2018-ntts">
<titleInfo>
<title>NTT’s Neural Machine Translation Systems for WMT 2018</title>
</titleInfo>
<name type="personal">
<namePart type="given">Makoto</namePart>
<namePart type="family">Morishita</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jun</namePart>
<namePart type="family">Suzuki</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Masaaki</namePart>
<namePart type="family">Nagata</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2018-oct</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Third Conference on Machine Translation: Shared Task Papers</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Belgium, Brussels</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>This paper describes NTT’s neural machine translation systems submitted to the WMT 2018 English-German and German-English news translation tasks. Our submission has three main components: the Transformer model, corpus cleaning, and right-to-left n-best re-ranking techniques. Through our experiments, we identified two keys for improving accuracy: filtering noisy training sentences and right-to-left re-ranking. We also found that the Transformer model requires more training data than the RNN-based model, and the RNN-based model sometimes achieves better accuracy than the Transformer model when the corpus is small.</abstract>
<identifier type="citekey">morishita-etal-2018-ntts</identifier>
<identifier type="doi">10.18653/v1/W18-6421</identifier>
<location>
<url>https://aclanthology.org/W18-6421</url>
</location>
<part>
<date>2018-oct</date>
<extent unit="page">
<start>461</start>
<end>466</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T NTT’s Neural Machine Translation Systems for WMT 2018
%A Morishita, Makoto
%A Suzuki, Jun
%A Nagata, Masaaki
%S Proceedings of the Third Conference on Machine Translation: Shared Task Papers
%D 2018
%8 oct
%I Association for Computational Linguistics
%C Belgium, Brussels
%F morishita-etal-2018-ntts
%X This paper describes NTT’s neural machine translation systems submitted to the WMT 2018 English-German and German-English news translation tasks. Our submission has three main components: the Transformer model, corpus cleaning, and right-to-left n-best re-ranking techniques. Through our experiments, we identified two keys for improving accuracy: filtering noisy training sentences and right-to-left re-ranking. We also found that the Transformer model requires more training data than the RNN-based model, and the RNN-based model sometimes achieves better accuracy than the Transformer model when the corpus is small.
%R 10.18653/v1/W18-6421
%U https://aclanthology.org/W18-6421
%U https://doi.org/10.18653/v1/W18-6421
%P 461-466
Markdown (Informal)
[NTT’s Neural Machine Translation Systems for WMT 2018](https://aclanthology.org/W18-6421) (Morishita et al., 2018)
ACL
- Makoto Morishita, Jun Suzuki, and Masaaki Nagata. 2018. NTT’s Neural Machine Translation Systems for WMT 2018. In Proceedings of the Third Conference on Machine Translation: Shared Task Papers, pages 461–466, Belgium, Brussels. Association for Computational Linguistics.