@inproceedings{ruscio-etal-2025-beyond,
title = "Beyond Position: the emergence of wavelet-like properties in Transformers",
author = "Ruscio, Valeria and
Nanni, Umberto and
Silvestri, Fabrizio",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.303/",
pages = "6074--6088",
ISBN = "979-8-89176-251-0",
abstract = "This paper studies how Transformer models with Rotary Position Embeddings (RoPE) develop emergent, wavelet-like properties that compensate for the positional encoding{'}s theoretical limitations. Through an analysis spanning model scales, architectures, and training checkpoints, we show that attention heads evolve to implement multi-resolution processing analogous to wavelet transforms. We demonstrate that this scale-invariant behavior is unique to RoPE, emerges through distinct evolutionary phases during training, and statistically adheres to the fundamental uncertainty principle. Our findings suggest that the effectiveness of modern Transformers stems from their remarkable ability to spontaneously develop optimal, multi-resolution decompositions to address inherent architectural constraints."
}
Markdown (Informal)
[Beyond Position: the emergence of wavelet-like properties in Transformers](https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.303/) (Ruscio et al., ACL 2025)
ACL