@inproceedings{slovikovskaya-attardi-2020-transfer,
title = "Transfer Learning from Transformers to Fake News Challenge Stance Detection ({FNC}-1) Task",
author = "Slovikovskaya, Valeriya and
Attardi, Giuseppe",
editor = "Calzolari, Nicoletta and
B{\'e}chet, Fr{\'e}d{\'e}ric and
Blache, Philippe and
Choukri, Khalid and
Cieri, Christopher and
Declerck, Thierry and
Goggi, Sara and
Isahara, Hitoshi and
Maegaard, Bente and
Mariani, Joseph and
Mazo, H{\'e}l{\`e}ne and
Moreno, Asuncion and
Odijk, Jan and
Piperidis, Stelios",
booktitle = "Proceedings of the Twelfth Language Resources and Evaluation Conference",
month = may,
year = "2020",
address = "Marseille, France",
publisher = "European Language Resources Association",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2020.lrec-1.152/",
pages = "1211--1218",
language = "eng",
ISBN = "979-10-95546-34-4",
abstract = "Transformer models, trained and publicly released over the last couple of years, have proved effective in many NLP tasks. We wished to test their usefulness in particular on the stance detection task. We performed experiments on the data from the Fake News Challenge Stage 1 (FNC-1). We were indeed able to improve the reported SotA on the challenge, by exploiting the generalization power of large language models based on Transformer architecture. Specifically (1) we improved the FNC-1 best performing model adding BERT sentence embedding of input sequences as a model feature, (2) we fine-tuned BERT, XLNet, and RoBERTa transformers on FNC-1 extended dataset and obtained state-of-the-art results on FNC-1 task."
}
Markdown (Informal)
[Transfer Learning from Transformers to Fake News Challenge Stance Detection (FNC-1) Task](https://preview.aclanthology.org/add-emnlp-2024-awards/2020.lrec-1.152/) (Slovikovskaya & Attardi, LREC 2020)
ACL