@inproceedings{choi-etal-2025-bridg,
title = "{B}rid{G} {MT}: Enhancing {LLM}s' Machine Translation Capabilities with Sentence Bridging and Gradual {MT}",
author = "Choi, Seungwoo and
Yoo, Gahyun and
Lee, Jay-Yoon",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2025",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/mtsummit-25-ingestion/2025.findings-acl.1336/",
doi = "10.18653/v1/2025.findings-acl.1336",
pages = "26018--26042",
ISBN = "979-8-89176-256-5",
abstract = "Recent Large Language Models (LLMs) have demonstrated impressive translation performance without requiring fine-tuning on additional parallel corpora. However, they still face significant challenges in certain scenarios, particularly when translating low-resource languages. A common approach to address this issue is to provide external knowledge, such as few-shot examples, to assist LLMs in translating specific source sentences. However, this method is fundamentally limited by the quality or quantity of relevant sources, which cannot always be guaranteed. To reduce LLMs' reliance on external sources, we propose BridG MT, a method that combines Sentence Bridging, which generates a sequence of sentences as a bridge that gradually transition from easy-to-translate to more difficult, and Gradual MT, which sequentially translates these sentences using earlier translations as few-shot examples for subsequent ones. Experiments conducted on four LLMs across seven languages demonstrate that our method effectively enhances translation performance, even outperforming translation methods that rely on a large number of few-shot examples."
}
Markdown (Informal)
[BridG MT: Enhancing LLMs’ Machine Translation Capabilities with Sentence Bridging and Gradual MT](https://preview.aclanthology.org/mtsummit-25-ingestion/2025.findings-acl.1336/) (Choi et al., Findings 2025)
ACL