@inproceedings{bhat-sen-2025-xtr,
title = "{XTR} meets {C}ol{BERT}v2: Adding {C}ol{BERT}v2 Optimizations to {XTR}",
author = "Bhat, Riyaz Ahmad and
Sen, Jaydeep",
editor = "Rambow, Owen and
Wanner, Leo and
Apidianaki, Marianna and
Al-Khalifa, Hend and
Eugenio, Barbara Di and
Schockaert, Steven and
Darwish, Kareem and
Agarwal, Apoorv",
booktitle = "Proceedings of the 31st International Conference on Computational Linguistics: Industry Track",
month = jan,
year = "2025",
address = "Abu Dhabi, UAE",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2025.coling-industry.30/",
pages = "358--365",
abstract = "XTR (Lee et al., 2023) introduced an efficient multi-vector retrieval method that addresses the limitations of the ColBERT (Khattab and Zaharia, 2020model by simplifying retrieval into a single stage through a modified learning objective. While XTR eliminates the need for multistage retrieval, it doesn{'}t incorporate the efficiency optimizations from ColBERTv2 (Santhanam et al., 2022, which improve indexing and retrieval speed. In this work, we enhance XTR by integrating ColBERTv2{'}s optimizations, showing that the combined approach preserves the strengths of both models. This results in a more efficient and scalable solution for multi-vector retrieval, while maintaining XTR{'}s streamlined retrieval process."
}
Markdown (Informal)
[XTR meets ColBERTv2: Adding ColBERTv2 Optimizations to XTR](https://preview.aclanthology.org/fix-sig-urls/2025.coling-industry.30/) (Bhat & Sen, COLING 2025)
ACL