@inproceedings{machina-mercer-2024-anisotropy,
title = "Anisotropy is Not Inherent to Transformers",
author = "Machina, Anemily and
Mercer, Robert",
editor = "Duh, Kevin and
Gomez, Helena and
Bethard, Steven",
booktitle = "Proceedings of the 2024 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies (Volume 1: Long Papers)",
month = jun,
year = "2024",
address = "Mexico City, Mexico",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Author-page-Marten-During-lu/2024.naacl-long.274/",
doi = "10.18653/v1/2024.naacl-long.274",
pages = "4892--4907",
abstract = "Isotropy is the property that embeddings are uniformly distributed around the origin. Previous work has shown that Transformer embedding spaces are anisotropic, which is called the representation degradation problem. This degradation has been assumed to be inherent to the standard language modeling tasks and to apply to all Transformer models regardless of their architecture. In this work we identify a set of Transformer models with isotropic embedding spaces, the large Pythia models. We examine the isotropy of Pythia models and explore how isotropy and anisotropy develop as a model is trained. We find that anisotropic models do not develop as previously theorized, using our own analysis to show that the large Pythia models optimize their final Layer Norm for isotropy, and provide reasoning why previous theoretical justifications for anisotropy were insufficient. The identification of a set of isotropic Transformer models calls previous assumptions into question, provides a set of models to contrast existing analysis, and should lead to deeper insight into isotropy."
}
Markdown (Informal)
[Anisotropy is Not Inherent to Transformers](https://preview.aclanthology.org/Author-page-Marten-During-lu/2024.naacl-long.274/) (Machina & Mercer, NAACL 2024)
ACL
- Anemily Machina and Robert Mercer. 2024. Anisotropy is Not Inherent to Transformers. In Proceedings of the 2024 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies (Volume 1: Long Papers), pages 4892–4907, Mexico City, Mexico. Association for Computational Linguistics.