@inproceedings{gkovedarou-etal-2025-gender,
title = "Gender Bias in {E}nglish-to-{G}reek Machine Translation",
author = "Gkovedarou, Eleni and
Daems, Joke and
Bruyne, Luna De",
editor = "Hackenbuchner, Jani{\c{c}}a and
Bentivogli, Luisa and
Daems, Joke and
Manna, Chiara and
Savoldi, Beatrice and
Vanmassenhove, Eva",
booktitle = "Proceedings of the 3rd Workshop on Gender-Inclusive Translation Technologies (GITT 2025)",
month = jun,
year = "2025",
address = "Geneva, Switzerland",
publisher = "European Association for Machine Translation",
url = "https://preview.aclanthology.org/name-variant-inaki-lacunza/2025.gitt-1.2/",
pages = "17--45",
ISBN = "978-2-9701897-4-9",
abstract = "As the demand for inclusive language increases, concern has grown over the susceptibility of machine translation (MT) systems to reinforce gender stereotypes. This study investigates gender bias in two commercial MT systems, Google Translate and DeepL, focusing on the understudied English-to-Greek language pair. We address three aspects of gender bias: i) male bias, ii) occupational stereotyping, and iii) errors in anti-stereotypical translations. Additionally, we explore the potential of prompted GPT-4o as a bias mitigation tool that provides both gender-explicit and gender-neutral alternatives when necessary. To achieve this, we introduce GendEL, a manually crafted bilingual dataset of 240 gender-ambiguous and unambiguous sentences that feature stereotypical occupational nouns and adjectives. We find persistent gender bias in translations by both MT systems; while they perform well in cases where gender is explicitly defined, with DeepL outperforming both Google Translate and GPT-4o in feminine gender-unambiguous sentences, they are far from producing gender-inclusive or neutral translations when the gender is unspecified. GPT-4o shows promise, generating appropriate gendered and neutral alternatives for most ambiguous cases, though residual biases remain evident. As one of the first comprehensive studies on gender bias in English-to-Greek MT, we provide both our data and code at [github link]."
}
Markdown (Informal)
[Gender Bias in English-to-Greek Machine Translation](https://preview.aclanthology.org/name-variant-inaki-lacunza/2025.gitt-1.2/) (Gkovedarou et al., GITT 2025)
ACL
- Eleni Gkovedarou, Joke Daems, and Luna De Bruyne. 2025. Gender Bias in English-to-Greek Machine Translation. In Proceedings of the 3rd Workshop on Gender-Inclusive Translation Technologies (GITT 2025), pages 17–45, Geneva, Switzerland. European Association for Machine Translation.