@inproceedings{tran-utiyama-2025-enhanced,
title = "Enhanced Zero-Shot Machine Translation via Fixed Prefix Pair Bootstrapping",
author = "Tran, Van-Hien and
Utiyama, Masao",
editor = "Ojha, Atul Kr. and
Liu, Chao-hong and
Vylomova, Ekaterina and
Pirinen, Flammie and
Washington, Jonathan and
Oco, Nathaniel and
Zhao, Xiaobing",
booktitle = "Proceedings of the Eighth Workshop on Technologies for Machine Translation of Low-Resource Languages (LoResMT 2025)",
month = may,
year = "2025",
address = "Albuquerque, New Mexico, U.S.A.",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2025.loresmt-1.2/",
pages = "10--15",
ISBN = "979-8-89176-230-5",
abstract = "Zero-shot in-context learning allows large language models (LLMs) to perform tasks using only provided instructions. However, pre-trained LLMs often face calibration issues in zero-shot scenarios, leading to challenges such as hallucinations and off-target translations that compromise output quality, particularly in machine translation (MT). This paper introduces a new method to improve zero-shot MT using fixed prefix pair bootstrapping. By initializing translations with an accurate bilingual prefix pair at the start of both source and target sentences, this approach effectively guides the model to generate precise target-language outputs. Extensive evaluations across four model architectures and multiple translation directions demonstrate significant and consistent improvements, showcasing the potential of this straightforward strategy to enhance zero-shot MT performance."
}
Markdown (Informal)
[Enhanced Zero-Shot Machine Translation via Fixed Prefix Pair Bootstrapping](https://preview.aclanthology.org/fix-sig-urls/2025.loresmt-1.2/) (Tran & Utiyama, LoResMT 2025)
ACL