@inproceedings{verma-etal-2025-codessm,
title = "{C}ode{SSM}: Towards State Space Models for Code Understanding",
author = "Verma, Shweta and
Anand, Abhinav and
Mezini, Mira",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Proceedings of the 2025 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.emnlp-main.1735/",
doi = "10.18653/v1/2025.emnlp-main.1735",
pages = "34207--34223",
ISBN = "979-8-89176-332-6",
abstract = "Although transformers dominate many code-specific tasks, they have significant limitations. This paper explores State Space Models (SSMs) as a promising alternative for code understanding tasks such as retrieval, classification, and clone detection. We introduce CodeSSM, the first SSM-based model trained on code corpora to assess its effectiveness. Our results demonstrate that SSMs are more sample-efficient and can extrapolate to longer contexts beyond the pretraining length. Extensive experiments show that SSMs offer a viable alternative to transformers, addressing several their limitations. Additionally, CodeSSM reduces memory usage by up to 64{\%} compared to transformers at a context length of 2048, with greater savings as context length grows.The code is available [here](https://github.com/abx04/CodeSSM)."
}Markdown (Informal)
[CodeSSM: Towards State Space Models for Code Understanding](https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.emnlp-main.1735/) (Verma et al., EMNLP 2025)
ACL