@inproceedings{wei-etal-2025-adapting,
title = "Adapting General-Purpose Embedding Models to Private Datasets Using Keyword-based Retrieval",
author = "Wei, Yubai and
Han, Jiale and
Yang, Yi",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2025",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/landing_page/2025.findings-acl.357/",
pages = "6856--6870",
ISBN = "979-8-89176-256-5",
abstract = "Text embedding models play a cornerstone role in AI applications, such as retrieval-augmented generation (RAG). While general-purpose text embedding models demonstrate strong performance on generic retrieval benchmarks, their effectiveness diminishes when applied to private datasets (e.g., company-specific proprietary data), which often contain specialized terminology and lingo. In this work, we introduce BMEmbed, a novel method for adapting general-purpose text embedding models to private datasets. By leveraging the well-established keyword-based retrieval technique (BM25), we construct supervisory signals from the ranking of keyword-based retrieval results to facilitate model adaptation. We evaluate BMEmbed across a range of domains, datasets, and models, showing consistent improvements in retrieval performance. Moreover, we provide empirical insights into how BM25-based signals contribute to improving embeddings by fostering alignment and uniformity, highlighting the value of this approach in adapting models to domain-specific data. We release the source code for the research community."
}
Markdown (Informal)
[Adapting General-Purpose Embedding Models to Private Datasets Using Keyword-based Retrieval](https://preview.aclanthology.org/landing_page/2025.findings-acl.357/) (Wei et al., Findings 2025)
ACL