@inproceedings{guo-etal-2025-lightrag,
title = "{L}ight{RAG}: Simple and Fast Retrieval-Augmented Generation",
author = "Guo, Zirui and
Xia, Lianghao and
Yu, Yanhua and
Ao, Tu and
Huang, Chao",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.568/",
doi = "10.18653/v1/2025.findings-emnlp.568",
pages = "10746--10761",
ISBN = "979-8-89176-335-7",
abstract = "Retrieval-Augmented Generation (RAG) systems enhance large language models (LLMs) by integrating external knowledge sources, enabling more accurate and contextually relevant responses tailored to user needs. However, existing RAG systems have significant limitations, including reliance on flat data representations and inadequate contextual awareness, which can lead to fragmented answers that fail to capture complex interdependencies. To address these challenges, we propose LightRAG, a novel framework that incorporates graph structures into text indexing and retrieval processes. This innovative approach employs a dual-level retrieval system that enhances comprehensive information retrieval from both low- and high-level knowledge discovery. Additionally, the integration of graph structures with vector representations facilitates efficient retrieval of related entities and their relationships, significantly improving response times while maintaining contextual relevance. This capability is further enhanced by an incremental update algorithm that ensures the timely integration of new data, allowing the system to remain effective and responsive in rapidly changing data environments. Extensive experimental validation demonstrates considerable improvements in retrieval accuracy and efficiency compared to existing approaches. We have made our LightRAG framework open source and anonymously available at the link: https://anonymous.4open.science/r/LightRAG-2BEE."
}Markdown (Informal)
[LightRAG: Simple and Fast Retrieval-Augmented Generation](https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.568/) (Guo et al., Findings 2025)
ACL