@inproceedings{gao-etal-2025-efficient-context,
title = "An Efficient Context-Dependent Memory Framework for {LLM}-Centric Agents",
author = "Gao, Pengyu and
Zhao, Jinming and
Chen, Xinyue and
Yilin, Long",
editor = "Chen, Weizhu and
Yang, Yi and
Kachuee, Mohammad and
Fu, Xue-Yong",
booktitle = "Proceedings of the 2025 Conference of the Nations of the Americas Chapter of the Association for Computational Linguistics: Human Language Technologies (Volume 3: Industry Track)",
month = apr,
year = "2025",
address = "Albuquerque, New Mexico",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/landing_page/2025.naacl-industry.80/",
pages = "1055--1069",
ISBN = "979-8-89176-194-0",
abstract = "In human cognitive memory psychology, the context-dependent effect helps retrieve key memory cues essential for recalling relevant knowledge in problem-solving. Inspired by this, we introduce the context-dependent memory framework (CDMem), an efficient architecture miming human memory processes through multistage encoding, context-aware storage, and retrieval strategies for LLM-centric agents. We propose multistage memory encoding strategies for acquiring high-quality multilevel knowledge: expert encoding compresses raw trajectories from a domain-expert perspective, short-term encoding consolidates experiences from current tasks, and long-term encoding reflects insights from past tasks. For memory storage and retrieval, we design a graph-structured, context-dependent indexing mechanism that allows agents to efficiently and accurately recall the most relevant multilevel knowledge tailored to the current task and environmental context. Furthermore, the proposed CDMem framework is an online learning architecture, enabling agents to efficiently learn and update memory while adapting to novel environments and tasks in real-world applications. We conducted extensive experiments on two interactive decision-making benchmarks in the navigation and manipulation domain, ALFWorld and ScienceWorld. Using GPT-4o-mini, our method surpasses state-of-the-art online LLM-centric approaches, achieving success rates of 85.8{\%} and 56.0{\%}, respectively. We hope this work will serve as a valuable reference for the academic and industrial communities in advancing agent-based applications."
}
Markdown (Informal)
[An Efficient Context-Dependent Memory Framework for LLM-Centric Agents](https://preview.aclanthology.org/landing_page/2025.naacl-industry.80/) (Gao et al., NAACL 2025)
ACL
- Pengyu Gao, Jinming Zhao, Xinyue Chen, and Long Yilin. 2025. An Efficient Context-Dependent Memory Framework for LLM-Centric Agents. In Proceedings of the 2025 Conference of the Nations of the Americas Chapter of the Association for Computational Linguistics: Human Language Technologies (Volume 3: Industry Track), pages 1055–1069, Albuquerque, New Mexico. Association for Computational Linguistics.