@inproceedings{slovikovskaya-attardi-2020-transfer,
title = "Transfer Learning from Transformers to Fake News Challenge Stance Detection ({FNC}-1) Task",
author = "Slovikovskaya, Valeriya and
Attardi, Giuseppe",
booktitle = "Proceedings of the 12th Language Resources and Evaluation Conference",
month = may,
year = "2020",
address = "Marseille, France",
publisher = "European Language Resources Association",
url = "https://aclanthology.org/2020.lrec-1.152",
pages = "1211--1218",
abstract = "Transformer models, trained and publicly released over the last couple of years, have proved effective in many NLP tasks. We wished to test their usefulness in particular on the stance detection task. We performed experiments on the data from the Fake News Challenge Stage 1 (FNC-1). We were indeed able to improve the reported SotA on the challenge, by exploiting the generalization power of large language models based on Transformer architecture. Specifically (1) we improved the FNC-1 best performing model adding BERT sentence embedding of input sequences as a model feature, (2) we fine-tuned BERT, XLNet, and RoBERTa transformers on FNC-1 extended dataset and obtained state-of-the-art results on FNC-1 task.",
language = "English",
ISBN = "979-10-95546-34-4",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="slovikovskaya-attardi-2020-transfer">
<titleInfo>
<title>Transfer Learning from Transformers to Fake News Challenge Stance Detection (FNC-1) Task</title>
</titleInfo>
<name type="personal">
<namePart type="given">Valeriya</namePart>
<namePart type="family">Slovikovskaya</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Giuseppe</namePart>
<namePart type="family">Attardi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-may</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<language>
<languageTerm type="text">English</languageTerm>
<languageTerm type="code" authority="iso639-2b">eng</languageTerm>
</language>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 12th Language Resources and Evaluation Conference</title>
</titleInfo>
<originInfo>
<publisher>European Language Resources Association</publisher>
<place>
<placeTerm type="text">Marseille, France</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
<identifier type="isbn">979-10-95546-34-4</identifier>
</relatedItem>
<abstract>Transformer models, trained and publicly released over the last couple of years, have proved effective in many NLP tasks. We wished to test their usefulness in particular on the stance detection task. We performed experiments on the data from the Fake News Challenge Stage 1 (FNC-1). We were indeed able to improve the reported SotA on the challenge, by exploiting the generalization power of large language models based on Transformer architecture. Specifically (1) we improved the FNC-1 best performing model adding BERT sentence embedding of input sequences as a model feature, (2) we fine-tuned BERT, XLNet, and RoBERTa transformers on FNC-1 extended dataset and obtained state-of-the-art results on FNC-1 task.</abstract>
<identifier type="citekey">slovikovskaya-attardi-2020-transfer</identifier>
<location>
<url>https://aclanthology.org/2020.lrec-1.152</url>
</location>
<part>
<date>2020-may</date>
<extent unit="page">
<start>1211</start>
<end>1218</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Transfer Learning from Transformers to Fake News Challenge Stance Detection (FNC-1) Task
%A Slovikovskaya, Valeriya
%A Attardi, Giuseppe
%S Proceedings of the 12th Language Resources and Evaluation Conference
%D 2020
%8 may
%I European Language Resources Association
%C Marseille, France
%@ 979-10-95546-34-4
%G English
%F slovikovskaya-attardi-2020-transfer
%X Transformer models, trained and publicly released over the last couple of years, have proved effective in many NLP tasks. We wished to test their usefulness in particular on the stance detection task. We performed experiments on the data from the Fake News Challenge Stage 1 (FNC-1). We were indeed able to improve the reported SotA on the challenge, by exploiting the generalization power of large language models based on Transformer architecture. Specifically (1) we improved the FNC-1 best performing model adding BERT sentence embedding of input sequences as a model feature, (2) we fine-tuned BERT, XLNet, and RoBERTa transformers on FNC-1 extended dataset and obtained state-of-the-art results on FNC-1 task.
%U https://aclanthology.org/2020.lrec-1.152
%P 1211-1218
Markdown (Informal)
[Transfer Learning from Transformers to Fake News Challenge Stance Detection (FNC-1) Task](https://aclanthology.org/2020.lrec-1.152) (Slovikovskaya & Attardi, LREC 2020)
ACL