@inproceedings{berckmann-hiziroglu-2020-low,
title = "Low-Resource Translation as Language Modeling",
author = "Berckmann, Tucker and
Hiziroglu, Berkan",
editor = {Barrault, Lo{\"i}c and
Bojar, Ond{\v{r}}ej and
Bougares, Fethi and
Chatterjee, Rajen and
Costa-juss{\`a}, Marta R. and
Federmann, Christian and
Fishel, Mark and
Fraser, Alexander and
Graham, Yvette and
Guzman, Paco and
Haddow, Barry and
Huck, Matthias and
Yepes, Antonio Jimeno and
Koehn, Philipp and
Martins, Andr{\'e} and
Morishita, Makoto and
Monz, Christof and
Nagata, Masaaki and
Nakazawa, Toshiaki and
Negri, Matteo},
booktitle = "Proceedings of the Fifth Conference on Machine Translation",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.wmt-1.127/",
pages = "1079--1083",
abstract = "We present our submission to the very low resource supervised machine translation task at WMT20. We use a decoder-only transformer architecture and formulate the translation task as language modeling. To address the low-resource aspect of the problem, we pretrain over a similar language parallel corpus. Then, we employ an intermediate back-translation step before fine-tuning. Finally, we present an analysis of the system`s performance."
}
Markdown (Informal)
[Low-Resource Translation as Language Modeling](https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.wmt-1.127/) (Berckmann & Hiziroglu, WMT 2020)
ACL