@inproceedings{yoon-arik-2025-embedding,
title = "Embedding-Converter: A Unified Framework for Cross-Model Embedding Transformation",
author = "Yoon, Jinsung and
Arik, Sercan O",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.1237/",
pages = "25464--25482",
ISBN = "979-8-89176-251-0",
abstract = "Embedding models play a crucial role in machine learning. However, the continuous development of new models presents a major challenge: migrating to a potentially superior model often requires the computationally expensive process of re-embedding entire datasets{---}without any guarantee of performance improvement. This paper presents Embedding-Converter, a novel framework for efficiently transforming embeddings between different models, thus avoiding costly `re-embedding'. The proposed approach achieves 100 times faster and cheaper computations in real-world applications. Experiments show that Embedding-Converter not only streamlines transitions to new models, but can also improve upon the source model{'}s performance, approaching that of the target model. This facilitates efficient evaluation and broader adoption of new embedding models by significantly reducing the overhead of model switching. Furthermore, Embedding-Converter addresses latency limitations by enabling the use of smaller models for online tasks while still benefiting from the performance of larger models offline. By promoting the release of converters alongside new embedding models, Embedding-Converter fosters a more dynamic and accessible ecosystem for embedding model development and deployment."
}
Markdown (Informal)
[Embedding-Converter: A Unified Framework for Cross-Model Embedding Transformation](https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.1237/) (Yoon & Arik, ACL 2025)
ACL