@inproceedings{tsukagoshi-sasano-2025-redundancy,
title = "Redundancy, Isotropy, and Intrinsic Dimensionality of Prompt-based Text Embeddings",
author = "Tsukagoshi, Hayato and
Sasano, Ryohei",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2025",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/display_plenaries/2025.findings-acl.1330/",
pages = "25915--25930",
ISBN = "979-8-89176-256-5",
abstract = "Prompt-based text embedding models, which generate task-specific embeddings upon receiving tailored prompts, have recently demonstrated remarkable performance. However, their resulting embeddings often have thousands of dimensions, leading to high storage costs and increased computational costs of embedding-based operations. In this paper, we investigate how post-hoc dimensionality reduction applied to the embeddings affects the performance of various tasks that leverage these embeddings, specifically classification, clustering, retrieval, and semantic textual similarity (STS) tasks. Our experiments show that even a naive dimensionality reduction, which keeps only the first 25{\%} of the dimensions of the embeddings, results in a very slight performance degradation, indicating that these embeddings are highly redundant. Notably, for classification and clustering, even when embeddings are reduced to less than 0.5{\%} of the original dimensionality the performance degradation is very small. To quantitatively analyze this redundancy, we perform an analysis based on the intrinsic dimensionality and isotropy of the embeddings. Our analysis reveals that embeddings for classification and clustering, which are considered to have very high dimensional redundancy, exhibit lower intrinsic dimensionality and less isotropy compared with those for retrieval and STS."
}
Markdown (Informal)
[Redundancy, Isotropy, and Intrinsic Dimensionality of Prompt-based Text Embeddings](https://preview.aclanthology.org/display_plenaries/2025.findings-acl.1330/) (Tsukagoshi & Sasano, Findings 2025)
ACL