@inproceedings{hou-etal-2025-synapticrag,
title = "{S}ynaptic{RAG}: Enhancing Temporal Memory Retrieval in Large Language Models through Synaptic Mechanisms",
author = "Hou, Yuki and
Tamoto, Haruki and
Zhao, Qinghua and
Miyashita, Homei",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2025",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/landing_page/2025.findings-acl.1048/",
pages = "20422--20436",
ISBN = "979-8-89176-256-5",
abstract = "Existing retrieval methods in Large Language Models show degradation in accuracy when handling temporally distributed conversations, primarily due to their reliance on simple similarity-based retrieval. Unlike existing memory retrieval methods that rely solely on semantic similarity, we propose SynapticRAG, which uniquely combines temporal association triggers with biologically-inspired synaptic propagation mechanisms. Our approach uses temporal association triggers and synaptic-like stimulus propagation to identify relevant dialogue histories. A dynamic leaky integrate-and-fire mechanism then selects the most contextually appropriate memories. Experiments on four datasets of English, Chinese and Japanese show that compared to state-of-the-art memory retrieval methods, SynapticRAG achieves consistent improvements across multiple metrics up to 14.66{\%} points. This work bridges the gap between cognitive science and language model development, providing a new framework for memory management in conversational systems."
}
Markdown (Informal)
[SynapticRAG: Enhancing Temporal Memory Retrieval in Large Language Models through Synaptic Mechanisms](https://preview.aclanthology.org/landing_page/2025.findings-acl.1048/) (Hou et al., Findings 2025)
ACL