@article{naravajhula-ng-2026-rbr,
title = "{RBR}: {RAG}-Based Open-Domain Question Answering Using a Ranking Approach to Document Retrieval",
author = "Naravajhula, Priyatam Sai and
Ng, Vincent",
editor = "Piperidis, Stelios and
Bel, N{\'u}ria and
van den Heuvel, Henk and
Ide, Nancy and
Krek, Simon and
Toral, Antonio",
journal = "International Conference on Language Resources and Evaluation",
volume = "main",
month = may,
year = "2026",
address = "Palma de Mallorca, Spain",
publisher = "ELRA Language Resource Association",
url = "https://preview.aclanthology.org/ingest-lrec/2026.lrec-main.60/",
pages = "805--817",
abstract = "Retrieval-Augmented Generation (RAG) has emerged as a promising approach to ODQA. A RAG-based ODQA system is typically composed of two components: a retriever that retrieves the passages that are most relevant to a given query, and a generator that generates the answer to the query by combining the information from the retrieved passages. Existing retrievers typically identify the most relevant passages by computing the similarity between the query and each passage in a given collection. In other words, they do not compare which of two passages is more relevant to the given query. We hypothesize, however, that we can improve RAG-based ODQA systems by modeling the relationship among the passages to be retrieved, specifically by learning which passages are more relevant than the others to the given query. To do so, we propose a ranking-based approach to passage retrieval, where we first rank the candidate passages w.r.t. the query and subsequently refine the score associated with each of these passages using a Graph Attention Network. We evaluate our approach to ODQA, RBR (Ranking-Based Retrieval), on two commonly-used ODQA datasets, Natural Questions and TriviaQA. Experimental results show that RBR slightly outperforms PA-RAG, a state-of-the-art ODQA system, by 0.45 points and 1.01 points in Exact Match score on Natural Questions and TriviaQA, respectively."
}Markdown (Informal)
[RBR: RAG-Based Open-Domain Question Answering Using a Ranking Approach to Document Retrieval](https://preview.aclanthology.org/ingest-lrec/2026.lrec-main.60/) (Naravajhula & Ng, LREC 2026)
ACL