@inproceedings{oortwijn-etal-2021-interrater,
title = "Interrater Disagreement Resolution: A Systematic Procedure to Reach Consensus in Annotation Tasks",
author = "Oortwijn, Yvette and
Ossenkoppele, Thijs and
Betti, Arianna",
editor = "Belz, Anya and
Agarwal, Shubham and
Graham, Yvette and
Reiter, Ehud and
Shimorina, Anastasia",
booktitle = "Proceedings of the Workshop on Human Evaluation of NLP Systems (HumEval)",
month = apr,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2021.humeval-1.15/",
pages = "131--141",
abstract = "We present a systematic procedure for interrater disagreement resolution. The procedure is general, but of particular use in multiple-annotator tasks geared towards ground truth construction. We motivate our proposal by arguing that, barring cases in which the researchers' goal is to elicit different viewpoints, interrater disagreement is a sign of poor quality in the design or the description of a task. Consensus among annotators, we maintain, should be striven for, through a systematic procedure for disagreement resolution such as the one we describe."
}
Markdown (Informal)
[Interrater Disagreement Resolution: A Systematic Procedure to Reach Consensus in Annotation Tasks](https://preview.aclanthology.org/fix-sig-urls/2021.humeval-1.15/) (Oortwijn et al., HumEval 2021)
ACL