@inproceedings{zhu-etal-2025-mitigating,
title = "Mitigating Lost-in-Retrieval Problems in Retrieval Augmented Multi-Hop Question Answering",
author = "Zhu, Rongzhi and
Liu, Xiangyu and
Sun, Zequn and
Wang, Yiwei and
Hu, Wei",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.1089/",
pages = "22362--22375",
ISBN = "979-8-89176-251-0",
abstract = "In this paper, we identify a critical problem, ``lost-in-retrieval'', in retrieval-augmented multi-hop question answering (QA): the key entities are missed in LLMs' sub-question decomposition. ``Lost-in-retrieval'' significantly degrades the retrieval performance, which disrupts the reasoning chain and leads to the incorrect answers. To resolve this problem, we propose a progressive retrieval and rewriting method, namely ChainRAG, which sequentially handles each sub-question by completing missing key entities and retrieving relevant sentences from a sentence graph for answer generation. Each step in our retrieval and rewriting process builds upon the previous one, creating a seamless chain that leads to accurate retrieval and answers. Finally, all retrieved sentences and sub-question answers are integrated to generate a comprehensive answer to the original question. We evaluate ChainRAG on three multi-hop QA datasets{---}MuSiQue, 2Wiki, and HotpotQA{---}using three large language models: GPT4o-mini, Qwen2.5-72B, and GLM-4-Plus. Empirical results demonstrate that ChainRAG consistently outperforms baselines in both effectiveness and efficiency."
}
Markdown (Informal)
[Mitigating Lost-in-Retrieval Problems in Retrieval Augmented Multi-Hop Question Answering](https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.1089/) (Zhu et al., ACL 2025)
ACL