@inproceedings{ren-etal-2022-empowering,
title = "Empowering Dual-Encoder with Query Generator for Cross-Lingual Dense Retrieval",
author = "Ren, Houxing and
Shou, Linjun and
Wu, Ning and
Gong, Ming and
Jiang, Daxin",
editor = "Goldberg, Yoav and
Kozareva, Zornitsa and
Zhang, Yue",
booktitle = "Proceedings of the 2022 Conference on Empirical Methods in Natural Language Processing",
month = dec,
year = "2022",
address = "Abu Dhabi, United Arab Emirates",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2022.emnlp-main.203/",
doi = "10.18653/v1/2022.emnlp-main.203",
pages = "3107--3121",
abstract = "In monolingual dense retrieval, lots of works focus on how to distill knowledge from cross-encoder re-ranker to dual-encoder retriever and these methods achieve better performance due to the effectiveness of cross-encoder re-ranker. However, we find that the performance of the cross-encoder re-ranker is heavily influenced by the number of training samples and the quality of negative samples, which is hard to obtain in the cross-lingual setting. In this paper, we propose to use a query generator as the teacher in the cross-lingual setting, which is less dependent on enough training samples and high-quality negative samples. In addition to traditional knowledge distillation, we further propose a novel enhancement method, which uses the query generator to help the dual-encoder align queries from different languages, but does not need any additional parallel sentences. The experimental results show that our method outperforms the state-of-the-art methods on two benchmark datasets."
}
Markdown (Informal)
[Empowering Dual-Encoder with Query Generator for Cross-Lingual Dense Retrieval](https://preview.aclanthology.org/add-emnlp-2024-awards/2022.emnlp-main.203/) (Ren et al., EMNLP 2022)
ACL