@inproceedings{wischounig-etal-2026-negative,
title = "Negative Sampling Techniques in Dense Retrieval: A Survey",
author = "Wischounig, Laurin and
Abdallah, Abdelrahman and
Jatowt, Adam",
editor = "Demberg, Vera and
Inui, Kentaro and
Marquez, Llu{\'i}s",
booktitle = "Findings of the {A}ssociation for {C}omputational {L}inguistics: {EACL} 2026",
month = mar,
year = "2026",
address = "Rabat, Morocco",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-eacl/2026.findings-eacl.157/",
pages = "3003--3020",
ISBN = "979-8-89176-386-9",
abstract = "Information Retrieval (IR) is fundamental to many modern NLP applications. The rise of dense retrieval (DR), using neural networks to learn semantic vector representations, has significantly advanced IR performance. Central to training effective dense retrievers through contrastive learning is the selection of informative negative samples. Synthesizing 35 seminal papers, this survey provides a comprehensive and up-to-date overview of negative sampling techniques in dense IR. Our unique contribution is the focus on modern NLP applications and the inclusion of recent Large Language Model (LLM)-driven methods, an area absent in prior reviews. We propose a taxonomy that categorizes techniques, including random, static/dynamically mined, and synthetic datasets. We then analyze these approaches with respect to trade-offs between effectiveness, computational cost, and implementation difficulty. The survey concludes by outlining current challenges and promising future directions for the use of LLM-generated synthetic data."
}Markdown (Informal)
[Negative Sampling Techniques in Dense Retrieval: A Survey](https://preview.aclanthology.org/ingest-eacl/2026.findings-eacl.157/) (Wischounig et al., Findings 2026)
ACL