@inproceedings{dai-etal-2024-improve,
title = "Improve Dense Passage Retrieval with Entailment Tuning",
author = "Dai, Lu and
Liu, Hao and
Xiong, Hui",
editor = "Al-Onaizan, Yaser and
Bansal, Mohit and
Chen, Yun-Nung",
booktitle = "Proceedings of the 2024 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2024",
address = "Miami, Florida, USA",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2024.emnlp-main.636/",
doi = "10.18653/v1/2024.emnlp-main.636",
pages = "11375--11387",
abstract = "Retrieval module can be plugged into many downstream NLP tasks to improve their performance, such as open-domain question answering and retrieval-augmented generation. The key to a retrieval system is to calculate relevance scores to query and passage pairs. However, the definition of relevance is often ambiguous. We observed that a major class of relevance aligns with the concept of entailment in NLI tasks. Based on this observation, we designed a method called entailment tuning to improve the embedding of dense retrievers. Specifically, we unify the form of retrieval data and NLI data using existence claim as a bridge. Then, we train retrievers to predict the claims entailed in a passage with a variant task of masked prediction. Our method can be efficiently plugged into current dense retrieval methods, and experiments show the effectiveness of our method."
}
Markdown (Informal)
[Improve Dense Passage Retrieval with Entailment Tuning](https://preview.aclanthology.org/jlcl-multiple-ingestion/2024.emnlp-main.636/) (Dai et al., EMNLP 2024)
ACL