@inproceedings{grubisic-korencic-2025-irb,
title = "{IRB}-{MT} at {WMT}25 Translation Task: A Simple Agentic System Using an Off-the-Shelf {LLM}",
author = "Grubi{\v{s}}i{\'c}, Ivan and
Korencic, Damir",
editor = "Haddow, Barry and
Kocmi, Tom and
Koehn, Philipp and
Monz, Christof",
booktitle = "Proceedings of the Tenth Conference on Machine Translation",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-emnlp/2025.wmt-1.51/",
pages = "753--764",
ISBN = "979-8-89176-341-8",
abstract = "Large Language Models (LLMs) have been demonstrated to achieve state-of-art results on machine translation. LLM-based translation systems usually rely on model adaptation and fine-tuning, requiring datasets and compute. The goal of our team{'}s participation in the ``General Machine Translation'' and ``Multilingual'' tasks of WMT25 was to evaluate the translation effectiveness of a resource-efficient solution consisting of a smaller off-the-shelf LLM coupled with a self-refine agentic workflow. Our approach requires a high-quality multilingual LLM capable of instruction following. We select Gemma3-12B among several candidates using the pretrained translation metric MetricX-24 and a small development dataset. WMT25 automatic evaluations place our solution in the mid tier of all WMT25 systems, and also demonstrate that it can perform competitively for approximately 16{\%} of language pairs."
}Markdown (Informal)
[IRB-MT at WMT25 Translation Task: A Simple Agentic System Using an Off-the-Shelf LLM](https://preview.aclanthology.org/ingest-emnlp/2025.wmt-1.51/) (Grubišić & Korencic, WMT 2025)
ACL