@inproceedings{mager-etal-2020-gpt,
title = "{GPT}-too: A Language-Model-First Approach for {AMR}-to-Text Generation",
author = "Mager, Manuel and
Fernandez Astudillo, Ram{\'o}n and
Naseem, Tahira and
Sultan, Md Arafat and
Lee, Young-Suk and
Florian, Radu and
Roukos, Salim",
booktitle = "Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.acl-main.167",
doi = "10.18653/v1/2020.acl-main.167",
pages = "1846--1852",
abstract = "Abstract Meaning Representations (AMRs) are broad-coverage sentence-level semantic graphs. Existing approaches to generating text from AMR have focused on training sequence-to-sequence or graph-to-sequence models on AMR annotated data only. In this paper, we propose an alternative approach that combines a strong pre-trained language model with cycle consistency-based re-scoring. Despite the simplicity of the approach, our experimental results show these models outperform all previous techniques on the English LDC2017T10 dataset, including the recent use of transformer architectures. In addition to the standard evaluation metrics, we provide human evaluation experiments that further substantiate the strength of our approach.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="mager-etal-2020-gpt">
<titleInfo>
<title>GPT-too: A Language-Model-First Approach for AMR-to-Text Generation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Manuel</namePart>
<namePart type="family">Mager</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ramón</namePart>
<namePart type="family">Fernandez Astudillo</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Tahira</namePart>
<namePart type="family">Naseem</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Md</namePart>
<namePart type="given">Arafat</namePart>
<namePart type="family">Sultan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Young-Suk</namePart>
<namePart type="family">Lee</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Radu</namePart>
<namePart type="family">Florian</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Salim</namePart>
<namePart type="family">Roukos</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-jul</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Abstract Meaning Representations (AMRs) are broad-coverage sentence-level semantic graphs. Existing approaches to generating text from AMR have focused on training sequence-to-sequence or graph-to-sequence models on AMR annotated data only. In this paper, we propose an alternative approach that combines a strong pre-trained language model with cycle consistency-based re-scoring. Despite the simplicity of the approach, our experimental results show these models outperform all previous techniques on the English LDC2017T10 dataset, including the recent use of transformer architectures. In addition to the standard evaluation metrics, we provide human evaluation experiments that further substantiate the strength of our approach.</abstract>
<identifier type="citekey">mager-etal-2020-gpt</identifier>
<identifier type="doi">10.18653/v1/2020.acl-main.167</identifier>
<location>
<url>https://aclanthology.org/2020.acl-main.167</url>
</location>
<part>
<date>2020-jul</date>
<extent unit="page">
<start>1846</start>
<end>1852</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T GPT-too: A Language-Model-First Approach for AMR-to-Text Generation
%A Mager, Manuel
%A Fernandez Astudillo, Ramón
%A Naseem, Tahira
%A Sultan, Md Arafat
%A Lee, Young-Suk
%A Florian, Radu
%A Roukos, Salim
%S Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics
%D 2020
%8 jul
%I Association for Computational Linguistics
%C Online
%F mager-etal-2020-gpt
%X Abstract Meaning Representations (AMRs) are broad-coverage sentence-level semantic graphs. Existing approaches to generating text from AMR have focused on training sequence-to-sequence or graph-to-sequence models on AMR annotated data only. In this paper, we propose an alternative approach that combines a strong pre-trained language model with cycle consistency-based re-scoring. Despite the simplicity of the approach, our experimental results show these models outperform all previous techniques on the English LDC2017T10 dataset, including the recent use of transformer architectures. In addition to the standard evaluation metrics, we provide human evaluation experiments that further substantiate the strength of our approach.
%R 10.18653/v1/2020.acl-main.167
%U https://aclanthology.org/2020.acl-main.167
%U https://doi.org/10.18653/v1/2020.acl-main.167
%P 1846-1852
Markdown (Informal)
[GPT-too: A Language-Model-First Approach for AMR-to-Text Generation](https://aclanthology.org/2020.acl-main.167) (Mager et al., ACL 2020)
ACL
- Manuel Mager, Ramón Fernandez Astudillo, Tahira Naseem, Md Arafat Sultan, Young-Suk Lee, Radu Florian, and Salim Roukos. 2020. GPT-too: A Language-Model-First Approach for AMR-to-Text Generation. In Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics, pages 1846–1852, Online. Association for Computational Linguistics.