@inproceedings{aly-etal-2021-efficient,
title = "Efficient Unsupervised {NMT} for Related Languages with Cross-Lingual Language Models and Fidelity Objectives",
author = "Aly, Rami and
Caines, Andrew and
Buttery, Paula",
editor = {Zampieri, Marcos and
Nakov, Preslav and
Ljube{\v{s}}i{\'c}, Nikola and
Tiedemann, J{\"o}rg and
Scherrer, Yves and
Jauhiainen, Tommi},
booktitle = "Proceedings of the Eighth Workshop on NLP for Similar Languages, Varieties and Dialects",
month = apr,
year = "2021",
address = "Kiyv, Ukraine",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2021.vardial-1.6/",
pages = "49--59",
abstract = "The most successful approach to Neural Machine Translation (NMT) when only monolingual training data is available, called unsupervised machine translation, is based on back-translation where noisy translations are generated to turn the task into a supervised one. However, back-translation is computationally very expensive and inefficient. This work explores a novel, efficient approach to unsupervised NMT. A transformer, initialized with cross-lingual language model weights, is fine-tuned exclusively on monolingual data of the target language by jointly learning on a paraphrasing and denoising autoencoder objective. Experiments are conducted on WMT datasets for German-English, French-English, and Romanian-English. Results are competitive to strong baseline unsupervised NMT models, especially for closely related source languages (German) compared to more distant ones (Romanian, French), while requiring about a magnitude less training time."
}
Markdown (Informal)
[Efficient Unsupervised NMT for Related Languages with Cross-Lingual Language Models and Fidelity Objectives](https://preview.aclanthology.org/fix-sig-urls/2021.vardial-1.6/) (Aly et al., VarDial 2021)
ACL