@inproceedings{sobrevilla-cabezudo-pardo-2020-nilc-webnlg,
title = "{NILC} at {W}eb{NLG}+: Pretrained Sequence-to-Sequence Models on {RDF}-to-Text Generation",
author = "Sobrevilla Cabezudo, Marco Antonio and
Pardo, Thiago A. S.",
editor = "Castro Ferreira, Thiago and
Gardent, Claire and
Ilinykh, Nikolai and
van der Lee, Chris and
Mille, Simon and
Moussallem, Diego and
Shimorina, Anastasia",
booktitle = "Proceedings of the 3rd International Workshop on Natural Language Generation from the Semantic Web (WebNLG+)",
month = "12",
year = "2020",
address = "Dublin, Ireland (Virtual)",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.webnlg-1.14/",
pages = "131--136",
abstract = "This paper describes the submission by the NILC Computational Linguistics research group of the University of S{\~a}o Paulo/Brazil to the RDF-to-Text task for English at the WebNLG+ challenge. The success of the current pretrained models like BERT or GPT-2 in text-to-text generation tasks is well-known, however, its application/success on data-totext generation has not been well-studied and proven. This way, we explore how good a pretrained model, in particular BART, performs on the data-to-text generation task. The results obtained were worse than the baseline and other systems in almost all automatic measures. However, the human evaluation shows better results for our system. Besides, results suggest that BART may generate paraphrases of reference texts."
}
Markdown (Informal)
[NILC at WebNLG+: Pretrained Sequence-to-Sequence Models on RDF-to-Text Generation](https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.webnlg-1.14/) (Sobrevilla Cabezudo & Pardo, WebNLG 2020)
ACL