@inproceedings{kim-komachi-2021-tmu,
title = "{TMU} {NMT} System with {J}apanese {BART} for the Patent task of {WAT} 2021",
author = "Kim, Hwichan and
Komachi, Mamoru",
editor = "Nakazawa, Toshiaki and
Nakayama, Hideki and
Goto, Isao and
Mino, Hideya and
Ding, Chenchen and
Dabre, Raj and
Kunchukuttan, Anoop and
Higashiyama, Shohei and
Manabe, Hiroshi and
Pa, Win Pa and
Parida, Shantipriya and
Bojar, Ond{\v{r}}ej and
Chu, Chenhui and
Eriguchi, Akiko and
Abe, Kaori and
Oda, Yusuke and
Sudoh, Katsuhito and
Kurohashi, Sadao and
Bhattacharyya, Pushpak",
booktitle = "Proceedings of the 8th Workshop on Asian Translation (WAT2021)",
month = aug,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2021.wat-1.13/",
doi = "10.18653/v1/2021.wat-1.13",
pages = "133--137",
abstract = "In this paper, we introduce our TMU Neural Machine Translation (NMT) system submitted for the Patent task (Korean Japanese and English Japanese) of 8th Workshop on Asian Translation (Nakazawa et al., 2021). Recently, several studies proposed pre-trained encoder-decoder models using monolingual data. One of the pre-trained models, BART (Lewis et al., 2020), was shown to improve translation accuracy via fine-tuning with bilingual data. However, they experimented only Romanian!English translation using English BART. In this paper, we examine the effectiveness of Japanese BART using Japan Patent Office Corpus 2.0. Our experiments indicate that Japanese BART can also improve translation accuracy in both Korean Japanese and English Japanese translations."
}
Markdown (Informal)
[TMU NMT System with Japanese BART for the Patent task of WAT 2021](https://preview.aclanthology.org/fix-sig-urls/2021.wat-1.13/) (Kim & Komachi, WAT 2021)
ACL