@inproceedings{song-etal-2018-graph,
title = "A Graph-to-Sequence Model for {AMR}-to-Text Generation",
author = "Song, Linfeng and
Zhang, Yue and
Wang, Zhiguo and
Gildea, Daniel",
editor = "Gurevych, Iryna and
Miyao, Yusuke",
booktitle = "Proceedings of the 56th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2018",
address = "Melbourne, Australia",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest_wac_2008/P18-1150/",
doi = "10.18653/v1/P18-1150",
pages = "1616--1626",
abstract = "The problem of AMR-to-text generation is to recover a text representing the same meaning as an input AMR graph. The current state-of-the-art method uses a sequence-to-sequence model, leveraging LSTM for encoding a linearized AMR structure. Although being able to model non-local semantic information, a sequence LSTM can lose information from the AMR graph structure, and thus facing challenges with large-graphs, which result in long sequences. We introduce a neural graph-to-sequence model, using a novel LSTM structure for directly encoding graph-level semantics. On a standard benchmark, our model shows superior results to existing methods in the literature."
}
Markdown (Informal)
[A Graph-to-Sequence Model for AMR-to-Text Generation](https://preview.aclanthology.org/ingest_wac_2008/P18-1150/) (Song et al., ACL 2018)
ACL
- Linfeng Song, Yue Zhang, Zhiguo Wang, and Daniel Gildea. 2018. A Graph-to-Sequence Model for AMR-to-Text Generation. In Proceedings of the 56th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers), pages 1616–1626, Melbourne, Australia. Association for Computational Linguistics.