@inproceedings{imamura-sumita-2018-nict,
title = "{NICT} Self-Training Approach to Neural Machine Translation at {NMT}-2018",
author = "Imamura, Kenji and
Sumita, Eiichiro",
editor = "Birch, Alexandra and
Finch, Andrew and
Luong, Thang and
Neubig, Graham and
Oda, Yusuke",
booktitle = "Proceedings of the 2nd Workshop on Neural Machine Translation and Generation",
month = jul,
year = "2018",
address = "Melbourne, Australia",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/W18-2713/",
doi = "10.18653/v1/W18-2713",
pages = "110--115",
abstract = "This paper describes the NICT neural machine translation system submitted at the NMT-2018 shared task. A characteristic of our approach is the introduction of self-training. Since our self-training does not change the model structure, it does not influence the efficiency of translation, such as the translation speed. The experimental results showed that the translation quality improved not only in the sequence-to-sequence (seq-to-seq) models but also in the transformer models."
}
Markdown (Informal)
[NICT Self-Training Approach to Neural Machine Translation at NMT-2018](https://preview.aclanthology.org/jlcl-multiple-ingestion/W18-2713/) (Imamura & Sumita, NGT 2018)
ACL