@inproceedings{ginn-palmer-2025-llm,
title = "{LLM} Dependency Parsing with In-Context Rules",
author = "Ginn, Michael and
Palmer, Alexis",
editor = "Fei, Hao and
Tu, Kewei and
Zhang, Yuhui and
Hu, Xiang and
Han, Wenjuan and
Jia, Zixia and
Zheng, Zilong and
Cao, Yixin and
Zhang, Meishan and
Lu, Wei and
Siddharth, N. and
{\O}vrelid, Lilja and
Xue, Nianwen and
Zhang, Yue",
booktitle = "Proceedings of the 1st Joint Workshop on Large Language Models and Structure Modeling (XLLM 2025)",
month = aug,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/landing_page/2025.xllm-1.17/",
pages = "186--196",
ISBN = "979-8-89176-286-2",
abstract = "We study whether incorporating rules (in various formats) can aid large language models to perform dependency parsing. We consider a paradigm in which LLMs first produce symbolic rules given fully labeled examples, and the rules are then provided in a subsequent call that performs the actual parsing. In addition, we experiment with providing human-created annotation guidelines in-context to the LLMs. We test on eight low-resource languages from Universal Dependencies, finding that while both methods for rule incorporation improve zero-shot performance, the benefit disappears with a few labeled in-context examples."
}
Markdown (Informal)
[LLM Dependency Parsing with In-Context Rules](https://preview.aclanthology.org/landing_page/2025.xllm-1.17/) (Ginn & Palmer, XLLM 2025)
ACL
- Michael Ginn and Alexis Palmer. 2025. LLM Dependency Parsing with In-Context Rules. In Proceedings of the 1st Joint Workshop on Large Language Models and Structure Modeling (XLLM 2025), pages 186–196, Vienna, Austria. Association for Computational Linguistics.