@inproceedings{du-etal-2024-bi,
title = "Bi-Directional Multi-Granularity Generation Framework for Knowledge Graph-to-Text with Large Language Model",
author = "Du, Haowei and
Li, Chen and
Zhang, Dinghao and
Zhao, Dongyan",
editor = "Ku, Lun-Wei and
Martins, Andre and
Srikumar, Vivek",
booktitle = "Proceedings of the 62nd Annual Meeting of the Association for Computational Linguistics (Volume 2: Short Papers)",
month = aug,
year = "2024",
address = "Bangkok, Thailand",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2024.acl-short.14/",
doi = "10.18653/v1/2024.acl-short.14",
pages = "147--152",
abstract = "The knowledge graph-to-text (KG-to-text) generation task aims to synthesize coherent and engaging sentences that accurately convey the complex information derived from an input knowledge graph. Existing methods generate the whole target text based on all KG triples at once and may incorporate incorrect KG triples for each sentence. To this end, we propose the bi-directional multi-granularity generation framework. Instead of generating the whole text at a time, we construct the sentence level generation based on the corresponding triples and generate the graph-level text as a result. Moreover, we design a backward relation extraction task to enhance the correctness of relational information. Our method achieves the new state-of-the-art in benchmark dataset WebNLG and further analysis shows the efficiency of different modules."
}
Markdown (Informal)
[Bi-Directional Multi-Granularity Generation Framework for Knowledge Graph-to-Text with Large Language Model](https://preview.aclanthology.org/jlcl-multiple-ingestion/2024.acl-short.14/) (Du et al., ACL 2024)
ACL