@inproceedings{sabou-etal-2014-corpus,
title = "Corpus Annotation through Crowdsourcing: Towards Best Practice Guidelines",
author = "Sabou, Marta and
Bontcheva, Kalina and
Derczynski, Leon and
Scharl, Arno",
editor = "Calzolari, Nicoletta and
Choukri, Khalid and
Declerck, Thierry and
Loftsson, Hrafn and
Maegaard, Bente and
Mariani, Joseph and
Moreno, Asuncion and
Odijk, Jan and
Piperidis, Stelios",
booktitle = "Proceedings of the Ninth International Conference on Language Resources and Evaluation ({LREC}`14)",
month = may,
year = "2014",
address = "Reykjavik, Iceland",
publisher = "European Language Resources Association (ELRA)",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/L14-1412/",
pages = "859--866",
abstract = "Crowdsourcing is an emerging collaborative approach that can be used for the acquisition of annotated corpora and a wide range of other linguistic resources. Although the use of this approach is intensifying in all its key genres (paid-for crowdsourcing, games with a purpose, volunteering-based approaches), the community still lacks a set of best-practice guidelines similar to the annotation best practices for traditional, expert-based corpus acquisition. In this paper we focus on the use of crowdsourcing methods for corpus acquisition and propose a set of best practice guidelines based in our own experiences in this area and an overview of related literature. We also introduce GATE Crowd, a plugin of the GATE platform that relies on these guidelines and offers tool support for using crowdsourcing in a more principled and efficient manner."
}
Markdown (Informal)
[Corpus Annotation through Crowdsourcing: Towards Best Practice Guidelines](https://preview.aclanthology.org/jlcl-multiple-ingestion/L14-1412/) (Sabou et al., LREC 2014)
ACL