@inproceedings{liu-etal-2022-plog,
title = "{PLOG}: Table-to-Logic Pretraining for Logical Table-to-Text Generation",
author = "Liu, Ao and
Dong, Haoyu and
Okazaki, Naoaki and
Han, Shi and
Zhang, Dongmei",
editor = "Goldberg, Yoav and
Kozareva, Zornitsa and
Zhang, Yue",
booktitle = "Proceedings of the 2022 Conference on Empirical Methods in Natural Language Processing",
month = dec,
year = "2022",
address = "Abu Dhabi, United Arab Emirates",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Add-Cong-Liu-Florida-Atlantic-University-author-id/2022.emnlp-main.373/",
doi = "10.18653/v1/2022.emnlp-main.373",
pages = "5531--5546",
abstract = "Logical table-to-text generation is a task that involves generating logically faithful sentences from tables, which requires models to derive logical-level facts from table records via logical inference. It raises a new challenge on the logical-level content planning of table-to-text models. However, directly learning the logical inference knowledge from table-text pairs is very difficult for neural models because of the ambiguity of natural language and the scarcity of parallel data. Hence even large-scale pre-trained language models present low logical fidelity on logical table-to-text. In this work, we propose a Pretrained Logical Form Generator (PLOG) framework to improve generation fidelity. Specifically, PLOG is first pretrained on a table-to-logical-form generation (table-to-logic) task, then finetuned on downstream table-to-text tasks. The logical forms are formally defined with unambiguous semantics. Hence we can collect a large amount of accurate logical forms from tables without human annotation. In addition, PLOG can learn logical inference from table-logic pairs much more reliably than from table-text pairs. To evaluate our model, we further collect a controlled logical table-to-text dataset CONTLOG based on an existing dataset. On two benchmarks, LOGICNLG and CONTLOG, PLOG outperforms strong baselines by a large margin on the logical fidelity, demonstrating the effectiveness of table-to-logic pretraining."
}
Markdown (Informal)
[PLOG: Table-to-Logic Pretraining for Logical Table-to-Text Generation](https://preview.aclanthology.org/Add-Cong-Liu-Florida-Atlantic-University-author-id/2022.emnlp-main.373/) (Liu et al., EMNLP 2022)
ACL