@inproceedings{mohammed-niculae-2025-context,
title = "Context-Aware or Context-Insensitive? Assessing {LLM}s' Performance in Document-Level Translation",
author = "Mohammed, Wafaa and
Niculae, Vlad",
editor = "Bouillon, Pierrette and
Gerlach, Johanna and
Girletti, Sabrina and
Volkart, Lise and
Rubino, Raphael and
Sennrich, Rico and
Farinha, Ana C. and
Gaido, Marco and
Daems, Joke and
Kenny, Dorothy and
Moniz, Helena and
Szoc, Sara",
booktitle = "Proceedings of Machine Translation Summit XX: Volume 1",
month = jun,
year = "2025",
address = "Geneva, Switzerland",
publisher = "European Association for Machine Translation",
url = "https://preview.aclanthology.org/mtsummit-25-ingestion/2025.mtsummit-1.10/",
pages = "126--137",
ISBN = "978-2-9701897-0-1",
abstract = "Large language models (LLMs) are increasingly strong contenders in machine translation. In this work, we focus on document-level translation, where some words cannot be translated without context from outside the sentence. Specifically, we investigate the ability of prominent LLMs to utilize the document context during translation through a perturbation analysis (analyzing models' robustness to perturbed and randomized document context) and an attribution analysis (examining the contribution of relevant context to the translation). We conduct an extensive evaluation across nine LLMs from diverse model families and training paradigms, including translation-specialized LLMs, alongside two encoder-decoder transformer baselines. We find that LLMs' improved document-translation performance compared to encoder-decoder models is not reflected in pronoun translation performance. Our analysis highlight the need for context-aware finetuning of LLMs with a focus on relevant parts of the context to improve their reliability for document-level translation."
}
Markdown (Informal)
[Context-Aware or Context-Insensitive? Assessing LLMs’ Performance in Document-Level Translation](https://preview.aclanthology.org/mtsummit-25-ingestion/2025.mtsummit-1.10/) (Mohammed & Niculae, MTSummit 2025)
ACL