@inproceedings{liu-etal-2025-beyond-function,
title = "Beyond Function-Level Search: Repository-Aware Dual-Encoder Code Retrieval with Adversarial Verification",
author = "Liu, Aofan and
Shiyuan, Song and
Li, Haoxuan and
Yang, Cehao and
Qi, Yiyan",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.1147/",
doi = "10.18653/v1/2025.findings-emnlp.1147",
pages = "21034--21049",
ISBN = "979-8-89176-335-7",
abstract = "The escalating complexity of modern codebases has intensified the need for code retrieval systems capable of interpreting cross-component change intents{---}a capability fundamentally absent in conventional function-level search paradigms. While recent research has improved alignment between queries and code snippets, retrieving contextually relevant code for certain change request remains underexplored. To bridge this gap, we present RepoAlignBench, the first benchmark designed to evaluate repository-level code retrieval for change request-driven scenarios, encompassing 52k columns. The benchmark shifts the paradigm from function-centric retrieval to holistic repository analysis. In addition, we propose ReflectCode, an adversarial reflection-augmented dual-tower architecture featuring disentangled code{\_}encoder and doc{\_}encoder towers. Our framework dynamically integrates syntactic patterns, function dependency, and semantic expansion intent through LLM. Comprehensive evaluations demonstrate that ReflectCode achieves 12.2{\%} Top-5 Accuracy and 7.1{\%} Recall improvements over state-of-the-art baselines."
}Markdown (Informal)
[Beyond Function-Level Search: Repository-Aware Dual-Encoder Code Retrieval with Adversarial Verification](https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.1147/) (Liu et al., Findings 2025)
ACL