@inproceedings{shachar-etal-2025-ner,
title = "{NER} Retriever: Zero-Shot Named Entity Retrieval with Type-Aware Embeddings",
author = "Shachar, Or and
Katz, Uri and
Goldberg, Yoav and
Glickman, Oren",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.597/",
doi = "10.18653/v1/2025.findings-emnlp.597",
pages = "11175--11186",
ISBN = "979-8-89176-335-7",
abstract = "We present NER Retriever, a zero-shot retrieval framework for ad-hoc Named Entity Recognition (NER), where a user-defined type description is used to retrieve documents mentioning entities of that type. Instead of relying on fixed schemas or fine-tuned models, our method builds on pretrained language models (LLMs) to embed both entity mentions and type descriptions into a shared semantic space. We show that internal representations{---}specifically, the value vectors from mid-layer transformer blocks{---}encode fine-grained type information more effectively than commonly used top-layer embeddings. To refine these representations, we train a lightweight contrastive projection network that aligns type-compatible entities while separating unrelated types. The resulting entity embeddings are compact, type-aware, and well-suited for nearest-neighbor search. Evaluated on three benchmarks, NER Retriever significantly outperforms both lexical (BM25) and dense (sentence-level) retrieval baselines, particularly in low-context settings. Our findings provide empirical support for representation selection within LLMs and demonstrate a practical solution for scalable, schema-free entity retrieval."
}Markdown (Informal)
[NER Retriever: Zero-Shot Named Entity Retrieval with Type-Aware Embeddings](https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.597/) (Shachar et al., Findings 2025)
ACL