@inproceedings{howcroft-gkatzia-2023-enunlg,
title = "enunlg: a Python library for reproducible neural data-to-text experimentation",
author = "Howcroft, David M. and
Gkatzia, Dimitra",
editor = "Keet, C. Maria and
Lee, Hung-Yi and
Zarrie{\ss}, Sina",
booktitle = "Proceedings of the 16th International Natural Language Generation Conference: System Demonstrations",
month = sep,
year = "2023",
address = "Prague, Czechia",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2023.inlg-demos.2",
pages = "4--5",
abstract = "Over the past decade, a variety of neural architectures for data-to-text generation (NLG) have been proposed. However, each system typically has its own approach to pre- and post-processing and other implementation details. Diversity in implementations is desirable, but it also confounds attempts to compare model performance: are the differences due to the proposed architectures or are they a byproduct of the libraries used or a result of pre- and post-processing decisions made? To improve reproducibility, we re-implement several pre-Transformer neural models for data-to-text NLG within a single framework to facilitate direct comparisons of the models themselves and better understand the contributions of other design choices. We release our library at https://github.com/NapierNLP/enunlg to serve as a baseline for ongoing work in this area including research on NLG for low-resource languages where transformers might not be optimal.",
}
Markdown (Informal)
[enunlg: a Python library for reproducible neural data-to-text experimentation](https://aclanthology.org/2023.inlg-demos.2) (Howcroft & Gkatzia, INLG-SIGDIAL 2023)
ACL