@inproceedings{alomari-etal-2025-maximal,
title = "Maximal Matching Matters: Preventing Representation Collapse for Robust Cross-Modal Retrieval",
author = "Alomari, Hani and
Sivakumar, Anushka and
Zhang, Andrew and
Thomas, Chris",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.1533/",
pages = "31769--31785",
ISBN = "979-8-89176-251-0",
abstract = "Cross-modal image-text retrieval is challenging because of the diverse possible associations between content from different modalities. Traditional methods learn a single-vector embedding to represent semantics of each sample, but struggle to capture nuanced and diverse relationships that can exist across modalities. Set-based approaches, which represent each sample with multiple embeddings, offer a promising alternative, as they can capture richer and more diverse relationships. In this paper, we show that, despite their promise, these set-based representations continue to face issues including sparse supervision and set collapse, which limits their effectiveness. To address these challenges, we propose Maximal Pair Assignment Similarity to optimize one-to-one matching between embedding sets which preserve semantic diversity within the set. We also introduce two loss functions to further enhance the representations: Global Discriminative Loss to enhance distinction among embeddings, and Intra-Set Divergence Loss to prevent collapse within each set. Our method achieves state-of-the-art performance on MS-COCO and Flickr30k without relying on external data."
}
Markdown (Informal)
[Maximal Matching Matters: Preventing Representation Collapse for Robust Cross-Modal Retrieval](https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.1533/) (Alomari et al., ACL 2025)
ACL