@inproceedings{asheghi-etal-2014-designing,
title = "Designing and Evaluating a Reliable Corpus of Web Genres via Crowd-Sourcing",
author = "Asheghi, Noushin Rezapour and
Sharoff, Serge and
Markert, Katja",
booktitle = "Proceedings of the Ninth International Conference on Language Resources and Evaluation ({LREC}'14)",
month = may,
year = "2014",
address = "Reykjavik, Iceland",
publisher = "European Language Resources Association (ELRA)",
url = "http://www.lrec-conf.org/proceedings/lrec2014/pdf/470_Paper.pdf",
pages = "1339--1346",
abstract = "Research in Natural Language Processing often relies on a large collection of manually annotated documents. However, currently there is no reliable genre-annotated corpus of web pages to be employed in Automatic Genre Identification (AGI). In AGI, documents are classified based on their genres rather than their topics or subjects. The major shortcoming of available web genre collections is their relatively low inter-coder agreement. Reliability of annotated data is an essential factor for reliability of the research result. In this paper, we present the first web genre corpus which is reliably annotated. We developed precise and consistent annotation guidelines which consist of well-defined and well-recognized categories. For annotating the corpus, we used crowd-sourcing which is a novel approach in genre annotation. We computed the overall as well as the individual categories{'} chance-corrected inter-annotator agreement. The results show that the corpus has been annotated reliably.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="asheghi-etal-2014-designing">
<titleInfo>
<title>Designing and Evaluating a Reliable Corpus of Web Genres via Crowd-Sourcing</title>
</titleInfo>
<name type="personal">
<namePart type="given">Noushin</namePart>
<namePart type="given">Rezapour</namePart>
<namePart type="family">Asheghi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Serge</namePart>
<namePart type="family">Sharoff</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Katja</namePart>
<namePart type="family">Markert</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2014-may</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Ninth International Conference on Language Resources and Evaluation (LREC’14)</title>
</titleInfo>
<originInfo>
<publisher>European Language Resources Association (ELRA)</publisher>
<place>
<placeTerm type="text">Reykjavik, Iceland</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Research in Natural Language Processing often relies on a large collection of manually annotated documents. However, currently there is no reliable genre-annotated corpus of web pages to be employed in Automatic Genre Identification (AGI). In AGI, documents are classified based on their genres rather than their topics or subjects. The major shortcoming of available web genre collections is their relatively low inter-coder agreement. Reliability of annotated data is an essential factor for reliability of the research result. In this paper, we present the first web genre corpus which is reliably annotated. We developed precise and consistent annotation guidelines which consist of well-defined and well-recognized categories. For annotating the corpus, we used crowd-sourcing which is a novel approach in genre annotation. We computed the overall as well as the individual categories’ chance-corrected inter-annotator agreement. The results show that the corpus has been annotated reliably.</abstract>
<identifier type="citekey">asheghi-etal-2014-designing</identifier>
<location>
<url>http://www.lrec-conf.org/proceedings/lrec2014/pdf/470_Paper.pdf</url>
</location>
<part>
<date>2014-may</date>
<extent unit="page">
<start>1339</start>
<end>1346</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Designing and Evaluating a Reliable Corpus of Web Genres via Crowd-Sourcing
%A Asheghi, Noushin Rezapour
%A Sharoff, Serge
%A Markert, Katja
%S Proceedings of the Ninth International Conference on Language Resources and Evaluation (LREC’14)
%D 2014
%8 may
%I European Language Resources Association (ELRA)
%C Reykjavik, Iceland
%F asheghi-etal-2014-designing
%X Research in Natural Language Processing often relies on a large collection of manually annotated documents. However, currently there is no reliable genre-annotated corpus of web pages to be employed in Automatic Genre Identification (AGI). In AGI, documents are classified based on their genres rather than their topics or subjects. The major shortcoming of available web genre collections is their relatively low inter-coder agreement. Reliability of annotated data is an essential factor for reliability of the research result. In this paper, we present the first web genre corpus which is reliably annotated. We developed precise and consistent annotation guidelines which consist of well-defined and well-recognized categories. For annotating the corpus, we used crowd-sourcing which is a novel approach in genre annotation. We computed the overall as well as the individual categories’ chance-corrected inter-annotator agreement. The results show that the corpus has been annotated reliably.
%U http://www.lrec-conf.org/proceedings/lrec2014/pdf/470_Paper.pdf
%P 1339-1346
Markdown (Informal)
[Designing and Evaluating a Reliable Corpus of Web Genres via Crowd-Sourcing](http://www.lrec-conf.org/proceedings/lrec2014/pdf/470_Paper.pdf) (Asheghi et al., LREC 2014)
ACL