@inproceedings{korotkova-fishel-2023-distilling,
title = "Distilling {E}stonian Text Domains for Production-Oriented Machine Translation",
author = "Korotkova, Elizaveta and
Fishel, Mark",
editor = {Alum{\"a}e, Tanel and
Fishel, Mark},
booktitle = "Proceedings of the 24th Nordic Conference on Computational Linguistics (NoDaLiDa)",
month = may,
year = "2023",
address = "T{\'o}rshavn, Faroe Islands",
publisher = "University of Tartu Library",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2023.nodalida-1.78/",
pages = "772--781",
abstract = "This paper explores knowledge distillation for multi-domain neural machine translation (NMT). We focus on the Estonian-English translation direction and experiment with distilling the knowledge of multiple domain-specific teacher models into a single student model that is tiny and efficient. Our experiments use a large parallel dataset of 18 million sentence pairs, consisting of 10 corpora, divided into 6 domain groups based on source similarity, and incorporate forward-translated monolingual data. Results show that tiny student models can cope with multiple domains even in case of large corpora, with different approaches benefiting frequent and low-resource domains."
}
Markdown (Informal)
[Distilling Estonian Text Domains for Production-Oriented Machine Translation](https://preview.aclanthology.org/add-emnlp-2024-awards/2023.nodalida-1.78/) (Korotkova & Fishel, NoDaLiDa 2023)
ACL