@inproceedings{li-etal-2025-generation-augmented,
title = "Generation-Augmented Retrieval: Rethinking the Role of Large Language Models in Zero-Shot Relation Extraction",
author = "Li, Zehan and
Zhang, Fu and
Peng, Tianyue and
Liu, He and
Cheng, Jingwei",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-luhme/2025.findings-emnlp.974/",
doi = "10.18653/v1/2025.findings-emnlp.974",
pages = "17928--17941",
ISBN = "979-8-89176-335-7",
abstract = "Recent advances in Relation Extraction (RE) emphasize Zero-Shot methodologies, aiming to recognize unseen relations between entities with no annotated data. Although Large Language Models (LLMs) have demonstrated outstanding performance in many NLP tasks, their performance in Zero-Shot RE (ZSRE) without entity type constraints still lags behind Small Language Models (SLMs). LLM-based ZSRE often involves manual interventions and significant computational overhead, especially when scaling to large-scale multi-choice data.To this end, we introduce RE-GAR-AD, which not only leverages the generative capability of LLMs but also utilizes their representational power without tuning LLMs. We redefine LLM-based ZSRE as a retrieval challenge, utilizing a Generation-Augmented Retrieval framework coupled with a retrieval Adjuster. Specifically, our approach guides LLMs through crafted prompts to distill sentence semantics and enrich relation labels. We encode sentences and relation labels using LLMs and match their embeddings in a triplet fashion. This retrieval technique significantly reduces token input requirements. Additionally, to further optimize embeddings, we propose a plug-in retrieval adjuster with only 2M parameters, which allows rapid fine-tuning without accessing LLMs' parameters. Our LLM-based model demonstrates comparable performance on multiple benchmarks."
}Markdown (Informal)
[Generation-Augmented Retrieval: Rethinking the Role of Large Language Models in Zero-Shot Relation Extraction](https://preview.aclanthology.org/ingest-luhme/2025.findings-emnlp.974/) (Li et al., Findings 2025)
ACL