@inproceedings{staliunaite-etal-2026-uncertainty,
title = "Uncertainty Quantification for Evaluating Gender Bias in Machine Translation",
author = "Staliunaite, Ieva and
Cheng, Julius and
Vlachos, Andreas",
editor = "Demberg, Vera and
Inui, Kentaro and
Marquez, Llu{\'i}s",
booktitle = "Findings of the {A}ssociation for {C}omputational {L}inguistics: {EACL} 2026",
month = mar,
year = "2026",
address = "Rabat, Morocco",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-eacl/2026.findings-eacl.116/",
pages = "2204--2225",
ISBN = "979-8-89176-386-9",
abstract = "The predictive uncertainty of machine translation (MT) models is typically used as a quality estimation proxy. In this work, we posit that apart from confidently translating when a single correct translation exists, models should also maintain uncertainty when the input is ambiguous. We use uncertainty to measure gender bias in MT systems. When the source sentence includes a lexeme whose gender is not overtly marked, but whose target-language equivalent requires gender specification, the model must infer the appropriate gender from the context and can be susceptible to biases. Prior work measured bias via gender accuracy, however it cannot be applied to ambiguous cases. Using semantic uncertainty, we are able to assess bias when translating both ambiguous and unambiguous source sentences, and find that high translation accuracy does not correlate with exhibiting uncertainty appropriately, and that debiasing affects the two cases differently."
}Markdown (Informal)
[Uncertainty Quantification for Evaluating Gender Bias in Machine Translation](https://preview.aclanthology.org/ingest-eacl/2026.findings-eacl.116/) (Staliunaite et al., Findings 2026)
ACL