@inproceedings{popovic-etal-2025-docie,
title = "{D}oc{IE}@{XLLM}25: In-Context Learning for Information Extraction using Fully Synthetic Demonstrations",
author = {Popovic, Nicholas and
Kangen, Ashish and
Schopf, Tim and
F{\"a}rber, Michael},
editor = "Fei, Hao and
Tu, Kewei and
Zhang, Yuhui and
Hu, Xiang and
Han, Wenjuan and
Jia, Zixia and
Zheng, Zilong and
Cao, Yixin and
Zhang, Meishan and
Lu, Wei and
Siddharth, N. and
{\O}vrelid, Lilja and
Xue, Nianwen and
Zhang, Yue",
booktitle = "Proceedings of the 1st Joint Workshop on Large Language Models and Structure Modeling (XLLM 2025)",
month = aug,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/landing_page/2025.xllm-1.26/",
pages = "298--309",
ISBN = "979-8-89176-286-2",
abstract = "Large, high-quality annotated corpora remain scarce in document-level entity and relation extraction in zero-shot or few-shot settings.In this paper, we present a fully automatic, LLM-based pipeline for synthetic data generation and in-context learning for document-level entity and relation extraction.In contrast to existing approaches that rely on manually annotated demonstrations or direct zero-shot inference, our method combines synthetic data generation with retrieval-based in-context learning, using a reasoning-optimized language model.This allows us to build a high-quality demonstration database without manual annotation and to dynamically retrieve relevant examples at inference time.Based on our approach we produce a synthetic dataset of over $5k$ Wikipedia abstracts with approximately $59k$ entities and $30k$ relation triples.Finally, we evaluate in-context learning performance on the DocIE shared task, extracting entities and relations from long documents in a zero-shot setting.The code and synthetic dataset are made available for future research."
}
Markdown (Informal)
[DocIE@XLLM25: In-Context Learning for Information Extraction using Fully Synthetic Demonstrations](https://preview.aclanthology.org/landing_page/2025.xllm-1.26/) (Popovic et al., XLLM 2025)
ACL