@inproceedings{choenni-etal-2021-stepmothers,
title = "Stepmothers are mean and academics are pretentious: What do pretrained language models learn about you?",
author = "Choenni, Rochelle and
Shutova, Ekaterina and
van Rooij, Robert",
editor = "Moens, Marie-Francine and
Huang, Xuanjing and
Specia, Lucia and
Yih, Scott Wen-tau",
booktitle = "Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2021",
address = "Online and Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.emnlp-main.111/",
doi = "10.18653/v1/2021.emnlp-main.111",
pages = "1477--1491",
abstract = "In this paper, we investigate what types of stereotypical information are captured by pretrained language models. We present the first dataset comprising stereotypical attributes of a range of social groups and propose a method to elicit stereotypes encoded by pretrained language models in an unsupervised fashion. Moreover, we link the emergent stereotypes to their manifestation as basic emotions as a means to study their emotional effects in a more generalized manner. To demonstrate how our methods can be used to analyze emotion and stereotype shifts due to linguistic experience, we use fine-tuning on news sources as a case study. Our experiments expose how attitudes towards different social groups vary across models and how quickly emotions and stereotypes can shift at the fine-tuning stage."
}
Markdown (Informal)
[Stepmothers are mean and academics are pretentious: What do pretrained language models learn about you?](https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.emnlp-main.111/) (Choenni et al., EMNLP 2021)
ACL