@inproceedings{choenni-titov-2025-wanda,
title = "{M}-{W}anda: Improving One-Shot Pruning for Multilingual {LLM}s",
author = "Choenni, Rochelle and
Titov, Ivan",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Proceedings of the 2025 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-emnlp/2025.emnlp-main.1369/",
pages = "26939--26952",
ISBN = "979-8-89176-332-6",
abstract = "Multilingual LLM performance is often critically dependent on model size. With an eye on efficiency, this has led to a surge in interest in one-shot pruning methods that retain the benefits of large-scale pretraining while shrinking the model size. However, as pruning tends to come with performance loss, it is important to understand the trade-offs between multilinguality and sparsification. In this work, we study multilingual performance under different sparsity constraints and show that moderate ratios already substantially harm performance. To help bridge this gap, we propose M-Wanda, a pruning method that models cross-lingual variation by incorporating language-aware activation statistics into its pruning criterion and dynamically adjusts layerwise sparsity based on cross-lingual importance. We show that M-Wanda consistently improves performance at minimal additional costs. We are the first to explicitly optimize pruning to retain multilingual performance, and hope to inspire future advances in multilingual pruning."
}Markdown (Informal)
[M-Wanda: Improving One-Shot Pruning for Multilingual LLMs](https://preview.aclanthology.org/ingest-emnlp/2025.emnlp-main.1369/) (Choenni & Titov, EMNLP 2025)
ACL