@inproceedings{wang-etal-2025-sparsifying,
title = "Sparsifying Mamba",
author = "Wang, An and
Xie, Ruobing and
Li, Shuaipeng and
Sun, Xingwu and
Kang, Zhanhui",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.959/",
doi = "10.18653/v1/2025.findings-emnlp.959",
pages = "17661--17667",
ISBN = "979-8-89176-335-7",
abstract = "The Transformer architecture has long dominated the development of large language models, but its quadratic complexity in sequence length presents scalability challenges. Recent advances in State Space Models, particularly Mamba series, offer a promising alternative with linear-time inference and competitive performance. While scaling model capacity via sparsification, exemplified by Mixture-of-Experts, has proven effective in reducing computation while expanding knowledge capacity, the integration of sparsification with Mamba remains largely unexplored. Existing attempts typically apply naive block-level stacking, failing to leverage Mamba{'}s internal structure for fine-grained sparsification. In this work, we mainly explore how to sparsify the parameters inside Mamba. We found that the effects of using sparsification strategies on parameters related to various mechanisms inside mamba are significantly different. Our proposed Mamba-MoZ framework introduces a flexible and effective sparsification mechanism inside Mamba, which can independently achieve parameter scalability and has stronger performance."
}Markdown (Informal)
[Sparsifying Mamba](https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.959/) (Wang et al., Findings 2025)
ACL
- An Wang, Ruobing Xie, Shuaipeng Li, Xingwu Sun, and Zhanhui Kang. 2025. Sparsifying Mamba. In Findings of the Association for Computational Linguistics: EMNLP 2025, pages 17661–17667, Suzhou, China. Association for Computational Linguistics.