@inproceedings{wang-htun-2020-gokus,
title = "Goku`s Participation in {WAT} 2020",
author = "Wang, Dongzhe and
Htun, Ohnmar",
editor = "Nakazawa, Toshiaki and
Nakayama, Hideki and
Ding, Chenchen and
Dabre, Raj and
Kunchukuttan, Anoop and
Pa, Win Pa and
Bojar, Ond{\v{r}}ej and
Parida, Shantipriya and
Goto, Isao and
Mino, Hidaya and
Manabe, Hiroshi and
Sudoh, Katsuhito and
Kurohashi, Sadao and
Bhattacharyya, Pushpak",
booktitle = "Proceedings of the 7th Workshop on Asian Translation",
month = dec,
year = "2020",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2020.wat-1.16/",
doi = "10.18653/v1/2020.wat-1.16",
pages = "135--141",
abstract = "This paper introduces our neural machine translation systems' participation in the WAT 2020 (team ID: goku20). We participated in the (i) Patent, (ii) Business Scene Dialogue (BSD) document-level translation, (iii) Mixed-domain tasks. Regardless of simplicity, standard Transformer models have been proven to be very effective in many machine translation systems. Recently, some advanced pre-training generative models have been proposed on the basis of encoder-decoder framework. Our main focus of this work is to explore how robust Transformer models perform in translation from sentence-level to document-level, from resource-rich to low-resource languages. Additionally, we also investigated the improvement that fine-tuning on the top of pre-trained transformer-based models can achieve on various tasks."
}
Markdown (Informal)
[Goku’s Participation in WAT 2020](https://preview.aclanthology.org/add-emnlp-2024-awards/2020.wat-1.16/) (Wang & Htun, WAT 2020)
ACL
- Dongzhe Wang and Ohnmar Htun. 2020. Goku’s Participation in WAT 2020. In Proceedings of the 7th Workshop on Asian Translation, pages 135–141, Suzhou, China. Association for Computational Linguistics.