@inproceedings{pitorro-treviso-2025-latim,
title = "{L}a{TIM}: Measuring Latent Token-to-Token Interactions in Mamba Models",
author = "Pitorro, Hugo and
Treviso, Marcos Vinicius",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.1194/",
pages = "24478--24493",
ISBN = "979-8-89176-251-0",
abstract = "State space models (SSMs), such as Mamba, have emerged as an efficient alternative to transformers for long-context sequence modeling. However, despite their growing adoption, SSMs lack the interpretability tools that have been crucial for understanding and improving attention-based architectures. While recent efforts provide insights into Mamba{'}s internal mechanisms, they struggle to capture precisetoken-level interactions at the layer level, leaving gaps in understanding how Mamba selectively processes sequences across layers. In this work, we introduce LaTIM, a novel token-level decomposition method for both Mamba-1 and Mamba-2 that enables fine-grained interpretability. We extensively evaluate our method across diverse tasks, including machine translation, copying, and retrieval-based generation, demonstrating its effectiveness in revealing Mamba{'}s token-to-token interaction patterns."
}
Markdown (Informal)
[LaTIM: Measuring Latent Token-to-Token Interactions in Mamba Models](https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.1194/) (Pitorro & Treviso, ACL 2025)
ACL