@inproceedings{raspanti-etal-2025-grammar,
title = "Grammar-Constrained Decoding Makes Large Language Models Better Logical Parsers",
author = "Raspanti, Federico and
Ozcelebi, Tanir and
Holenderski, Mike",
editor = "Rehm, Georg and
Li, Yunyao",
booktitle = "Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 6: Industry Track)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/landing_page/2025.acl-industry.34/",
pages = "485--499",
ISBN = "979-8-89176-288-6",
abstract = "Large Language Models (LLMs) have shown capabilities in various natural language processing tasks, yet they often struggle with logical reasoning, particularly when dealing with complex natural language statements. To address this challenge, approaches that combine LLMs with symbolic reasoners have been proposed, where the LLM translates the natural language statements into symbolic representations, which are then verified by an external symbolic solver. However, ensuring syntactic correctness in these translations remains a significant challenge. To address this, we propose to constrain the outputs of the LLMs using Grammar-Constrained Decoding, showing that it consistently improves both syntactic correctness and semantic accuracy in logical parsing tasks. Our findings suggest that grammar constraints can serve as an effective substitute for in-context examples, especially beneficial for resource-constrained applications using smaller models."
}
Markdown (Informal)
[Grammar-Constrained Decoding Makes Large Language Models Better Logical Parsers](https://preview.aclanthology.org/landing_page/2025.acl-industry.34/) (Raspanti et al., ACL 2025)
ACL