@inproceedings{tamura-etal-2023-masked,
title = "Does Masked Language Model Pre-training with Artificial Data Improve Low-resource Neural Machine Translation?",
author = "Tamura, Hiroto and
Hirasawa, Tosho and
Kim, Hwichan and
Komachi, Mamoru",
editor = "Vlachos, Andreas and
Augenstein, Isabelle",
booktitle = "Findings of the Association for Computational Linguistics: EACL 2023",
month = may,
year = "2023",
address = "Dubrovnik, Croatia",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.findings-eacl.166/",
doi = "10.18653/v1/2023.findings-eacl.166",
pages = "2216--2225",
abstract = "Pre-training masked language models (MLMs) with artificial data has been proven beneficial for several natural language processing tasks such as natural language understanding and summarization; however, it has been less explored for neural machine translation (NMT).A previous study revealed the benefit of transfer learning for NMT in a limited setup, which differs from MLM.In this study, we prepared two kinds of artificial data and compared the translation performance of NMT when pre-trained with MLM.In addition to the random sequences, we created artificial data mimicking token frequency information from the real world. Our results showed that pre-training the models with artificial data by MLM improves translation performance in low-resource situations. Additionally, we found that pre-training on artificial data created considering token frequency information facilitates improved performance."
}
Markdown (Informal)
[Does Masked Language Model Pre-training with Artificial Data Improve Low-resource Neural Machine Translation?](https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.findings-eacl.166/) (Tamura et al., Findings 2023)
ACL