@inproceedings{kasner-dusek-2020-train,
title = "Train Hard, Finetune Easy: Multilingual Denoising for {RDF}-to-Text Generation",
author = "Kasner, Zden{\v{e}}k and
Du{\v{s}}ek, Ond{\v{r}}ej",
editor = "Castro Ferreira, Thiago and
Gardent, Claire and
Ilinykh, Nikolai and
van der Lee, Chris and
Mille, Simon and
Moussallem, Diego and
Shimorina, Anastasia",
booktitle = "Proceedings of the 3rd International Workshop on Natural Language Generation from the Semantic Web (WebNLG+)",
month = "12",
year = "2020",
address = "Dublin, Ireland (Virtual)",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.webnlg-1.20/",
pages = "171--176",
abstract = "We describe our system for the RDF-to-text generation task of the WebNLG Challenge 2020. We base our approach on the mBART model, which is pre-trained for multilingual denoising. This allows us to use a simple, identical, end-to-end setup for both English and Russian. Requiring minimal taskor languagespecific effort, our model placed in the first third of the leaderboard for English and first or second for Russian on automatic metrics, and it made it into the best or second-best system cluster on human evaluation."
}
Markdown (Informal)
[Train Hard, Finetune Easy: Multilingual Denoising for RDF-to-Text Generation](https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.webnlg-1.20/) (Kasner & Dušek, WebNLG 2020)
ACL