@inproceedings{wang-etal-2021-gender,
title = "Are Gender-Neutral Queries Really Gender-Neutral? Mitigating Gender Bias in Image Search",
author = "Wang, Jialu and
Liu, Yang and
Wang, Xin",
editor = "Moens, Marie-Francine and
Huang, Xuanjing and
Specia, Lucia and
Yih, Scott Wen-tau",
booktitle = "Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2021",
address = "Online and Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.emnlp-main.151/",
doi = "10.18653/v1/2021.emnlp-main.151",
pages = "1995--2008",
abstract = "Internet search affects people`s cognition of the world, so mitigating biases in search results and learning fair models is imperative for social good. We study a unique gender bias in image search in this work: the search images are often gender-imbalanced for gender-neutral natural language queries. We diagnose two typical image search models, the specialized model trained on in-domain datasets and the generalized representation model pre-trained on massive image and text data across the internet. Both models suffer from severe gender bias. Therefore, we introduce two novel debiasing approaches: an in-processing fair sampling method to address the gender imbalance issue for training models, and a post-processing feature clipping method base on mutual information to debias multimodal representations of pre-trained models. Extensive experiments on MS-COCO and Flickr30K benchmarks show that our methods significantly reduce the gender bias in image search models."
}
Markdown (Informal)
[Are Gender-Neutral Queries Really Gender-Neutral? Mitigating Gender Bias in Image Search](https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.emnlp-main.151/) (Wang et al., EMNLP 2021)
ACL