@inproceedings{kang-etal-2025-reassessing,
title = "Reassessing Graph Linearization for Sequence-to-sequence {AMR} Parsing: On the Advantages and Limitations of Triple-Based",
author = "Kang, Jeongwoo and
Coavoux, Maximin and
Schwab, Didier and
Lopez, C{\'e}dric",
editor = "Drozd, Aleksandr and
Sedoc, Jo{\~a}o and
Tafreshi, Shabnam and
Akula, Arjun and
Shu, Raphael",
booktitle = "The Sixth Workshop on Insights from Negative Results in NLP",
month = may,
year = "2025",
address = "Albuquerque, New Mexico",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2025.insights-1.3/",
pages = "15--23",
ISBN = "979-8-89176-240-4",
abstract = "Sequence-to-sequence models are widely used to train Abstract Meaning Representation (Banarescu et al.,2013, AMR) parsers. To train such models, AMR graphs have to be linearized into a one-line text format. While Penman encoding is widely used for this purpose, we argue that it has limitations: 1) for deep graphs, some closely related nodes are located far apart in the linearized text 2) Penman{'}s tree-based encoding necessitates inverse roles to handle node re-entrancy, doubling the number of relation types to predict. To address these issues, we propose a triple-based linearization method and compare its efficiency by training an AMR parser with both approaches. Although triple is well suited to represent a graph, our results show that it does not yet improve performance on deeper or longer graphs. It suggests room for improvement in its design to better compete with Penman{'}s concise representation and explicit encoding of a nested graph structure."
}
Markdown (Informal)
[Reassessing Graph Linearization for Sequence-to-sequence AMR Parsing: On the Advantages and Limitations of Triple-Based](https://preview.aclanthology.org/fix-sig-urls/2025.insights-1.3/) (Kang et al., insights 2025)
ACL