@inproceedings{jasonarson-steingrimsson-2025-ami,
title = "{AMI} at {WMT}25 General Translation Task: How Low Can We Go? Finetuning Lightweight Llama Models for Low Resource Machine Translation",
author = "Jasonarson, Atli and
Steingrimsson, Steinthor",
editor = "Haddow, Barry and
Kocmi, Tom and
Koehn, Philipp and
Monz, Christof",
booktitle = "Proceedings of the Tenth Conference on Machine Translation",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-emnlp/2025.wmt-1.46/",
pages = "695--704",
ISBN = "979-8-89176-341-8",
abstract = "We present the submission of the {\'A}rni Magn{\'u}sson Institute{'}s team for the WMT25 General translation task. We focus on the English-Icelandic translation direction. We pre-train Llama 3.2 3B on 10B tokens of English and Icelandic texts and fine-tune on parallel corpora. Multiple translation hypotheses are produced first by the fine-tuned model, and then more hypotheses are added by that same model further tuned using contrastive preference optimization. The hypotheses are then post-processed using a grammar correction model and post-processing rules before the final translation is selected using minimum Bayes risk decoding. We found that while it is possible to generate translations of decent quality based on a lightweight model with simple approaches such as the ones we apply, our models are quite far behind the best participating systems and it would probably take somewhat larger models to reach competitive levels."
}Markdown (Informal)
[AMI at WMT25 General Translation Task: How Low Can We Go? Finetuning Lightweight Llama Models for Low Resource Machine Translation](https://preview.aclanthology.org/ingest-emnlp/2025.wmt-1.46/) (Jasonarson & Steingrimsson, WMT 2025)
ACL