@inproceedings{wiegand-etal-2021-implicitly,
title = "Implicitly Abusive Comparisons {--} A New Dataset and Linguistic Analysis",
author = "Wiegand, Michael and
Geulig, Maja and
Ruppenhofer, Josef",
editor = "Merlo, Paola and
Tiedemann, Jorg and
Tsarfaty, Reut",
booktitle = "Proceedings of the 16th Conference of the European Chapter of the Association for Computational Linguistics: Main Volume",
month = apr,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2021.eacl-main.27/",
doi = "10.18653/v1/2021.eacl-main.27",
pages = "358--368",
abstract = "We examine the task of detecting implicitly abusive comparisons (e.g. ``Your hair looks like you have been electrocuted''). Implicitly abusive comparisons are abusive comparisons in which abusive words (e.g. ``dumbass'' or ``scum'') are absent. We detail the process of creating a novel dataset for this task via crowdsourcing that includes several measures to obtain a sufficiently representative and unbiased set of comparisons. We also present classification experiments that include a range of linguistic features that help us better understand the mechanisms underlying abusive comparisons."
}
Markdown (Informal)
[Implicitly Abusive Comparisons – A New Dataset and Linguistic Analysis](https://preview.aclanthology.org/fix-sig-urls/2021.eacl-main.27/) (Wiegand et al., EACL 2021)
ACL