@inproceedings{ghattas-etal-2025-pruning,
title = "On Pruning State-Space {LLM}s",
author = "Ghattas, Tamer and
Hassid, Michael and
Schwartz, Roy",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Proceedings of the 2025 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-emnlp/2025.emnlp-main.950/",
pages = "18811--18825",
ISBN = "979-8-89176-332-6",
abstract = "Recent work proposed state-space models (SSMs) as an efficient alternative to transformer-based LLMs. Can these models be pruned to further reduce their computation costs? We adapt several pruning methods to the SSM structure, and apply them to four SSM-based LLMs across multiple tasks. We find that such models are quite robust to some pruning methods (e.g., WANDA), while using other methods lead to fast performance degradation."
}Markdown (Informal)
[On Pruning State-Space LLMs](https://preview.aclanthology.org/ingest-emnlp/2025.emnlp-main.950/) (Ghattas et al., EMNLP 2025)
ACL
- Tamer Ghattas, Michael Hassid, and Roy Schwartz. 2025. On Pruning State-Space LLMs. In Proceedings of the 2025 Conference on Empirical Methods in Natural Language Processing, pages 18811–18825, Suzhou, China. Association for Computational Linguistics.