@inproceedings{zhang-etal-2025-rpdr,
title = "{RPDR}: A Round-trip Prediction-Based Data Augmentation Framework for Long-Tail Question Answering",
author = "Zhang, Yiming and
Zhang, Siyue and
Zhao, Junbo and
Zhao, Chen",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Proceedings of the 2025 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-emnlp/2025.emnlp-main.1119/",
pages = "22009--22023",
ISBN = "979-8-89176-332-6",
abstract = "Long-tail question answering presents significant challenges for large language models (LLMs) due to their limited ability to acquire and accurately recall less common knowledge. Retrieval-augmented generation (RAG) systems have shown great promise in mitigating this limitation by integrating external retrieval mechanisms. However, dense retrieval models often face the same difficulties when generalizing to rare or niche knowledge. In this study, we introduce RPDR, a novel data augmentation framework that selects high-quality easy-to-learn training data, to enhance dense retrievers. Our approach is built around three core components: synthetic data generation, data selection with Round-Trip prediction to identify easy-to-learn instances, and retriever training with these instances. We evaluate RPDR on two long-tail retrieval benchmarks, PopQA and EntityQuestion, demonstrating substantial improvements over existing retrievers like BM25 and Contriver, especially on extremely long-tail categories. We identify the strengths and limitations of RPDR through detailed human analysis and propose a dynamic routing mechanism to dynamically route queries to specialized retrieval modules to further improve retrieval performance."
}Markdown (Informal)
[RPDR: A Round-trip Prediction-Based Data Augmentation Framework for Long-Tail Question Answering](https://preview.aclanthology.org/ingest-emnlp/2025.emnlp-main.1119/) (Zhang et al., EMNLP 2025)
ACL