@inproceedings{he-etal-2025-improve,
title = "Improve Fluency Of Neural Machine Translation Using Large Language Models",
author = "He, Jianfei and
Pan, Wenbo and
Yang, Jijia and
Peng, Sen and
Jia, Xiaohua",
editor = "Bouillon, Pierrette and
Gerlach, Johanna and
Girletti, Sabrina and
Volkart, Lise and
Rubino, Raphael and
Sennrich, Rico and
Farinha, Ana C. and
Gaido, Marco and
Daems, Joke and
Kenny, Dorothy and
Moniz, Helena and
Szoc, Sara",
booktitle = "Proceedings of Machine Translation Summit XX: Volume 1",
month = jun,
year = "2025",
address = "Geneva, Switzerland",
publisher = "European Association for Machine Translation",
url = "https://preview.aclanthology.org/mtsummit-25-ingestion/2025.mtsummit-1.5/",
pages = "54--64",
ISBN = "978-2-9701897-0-1",
abstract = "Large language models (LLMs) demonstrate significant capabilities in many natural language processing. However, their performance in machine translation is still behind the models that are specially trained for machine translation with an encoder-decoder architecture. This paper investigates how to improve neural machine translation (NMT) with LLMs. Our proposal is based on an empirical insight that NMT gets worse fluency than human translation. We propose to use LLMs to enhance the fluency of NMT{'}s generation by integrating a language model at the target side. we use contrastive learning to constrain fluency so that it does not exceed the LLMs. Our experiments on three language pairs show that this method can improve the performance of NMT. Our empirical analysis further demonstrates that this method improves the fluency at the target side. Our experiments also show that some straightforward post-processing methods using LLMs, such as re-ranking and refinement, are not effective."
}
Markdown (Informal)
[Improve Fluency Of Neural Machine Translation Using Large Language Models](https://preview.aclanthology.org/mtsummit-25-ingestion/2025.mtsummit-1.5/) (He et al., MTSummit 2025)
ACL