@inproceedings{giri-etal-2025-enriching,
title = "Enriching the Low-Resource Neural Machine Translation with Large Language Model",
author = "Giri, Sachin and
Ninomiya, Takashi and
Goto, Isao",
editor = "T.y.s.s, Santosh and
Shimizu, Shuichiro and
Gong, Yifan",
booktitle = "The 14th International Joint Conference on Natural Language Processing and The 4th Conference of the Asia-Pacific Chapter of the Association for Computational Linguistics",
month = dec,
year = "2025",
address = "Mumbai, India",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-ijcnlp-aacl/2025.ijcnlp-srw.16/",
pages = "184--192",
ISBN = "979-8-89176-304-3",
abstract = "Improving the performance of neural machine translation for low-resource languages is challenging due to the limited availability of parallel corpora. However, recently available Large Language Models (LLM) have demonstrated superior performance in various natural language processing tasks, including translation. In this work, we propose to incorporate an LLM into a Machine Translation (MT) model as a prior distribution to leverage its translation capabilities. The LLM acts as a teacher, instructing the student MT model about the target language. We conducted an experiment in four language pairs: English {\ensuremath{\Leftrightarrow}} German and English {\ensuremath{\Leftrightarrow}} Hindi. This resulted in improved BLEU and COMET scores in a low-resource setting."
}Markdown (Informal)
[Enriching the Low-Resource Neural Machine Translation with Large Language Model](https://preview.aclanthology.org/ingest-ijcnlp-aacl/2025.ijcnlp-srw.16/) (Giri et al., IJCNLP 2025)
ACL