@inproceedings{wang-etal-2022-learning-decoupled,
title = "Learning Decoupled Retrieval Representation for Nearest Neighbour Neural Machine Translation",
author = "Wang, Qiang and
Weng, Rongxiang and
Chen, Ming",
editor = "Calzolari, Nicoletta and
Huang, Chu-Ren and
Kim, Hansaem and
Pustejovsky, James and
Wanner, Leo and
Choi, Key-Sun and
Ryu, Pum-Mo and
Chen, Hsin-Hsi and
Donatelli, Lucia and
Ji, Heng and
Kurohashi, Sadao and
Paggio, Patrizia and
Xue, Nianwen and
Kim, Seokhwan and
Hahm, Younggyun and
He, Zhong and
Lee, Tony Kyungil and
Santus, Enrico and
Bond, Francis and
Na, Seung-Hoon",
booktitle = "Proceedings of the 29th International Conference on Computational Linguistics",
month = oct,
year = "2022",
address = "Gyeongju, Republic of Korea",
publisher = "International Committee on Computational Linguistics",
url = "https://preview.aclanthology.org/Author-page-Marten-During-lu/2022.coling-1.456/",
pages = "5142--5147",
abstract = "K-Nearest Neighbor Neural Machine Translation (kNNMT) successfully incorporates external corpus by retrieving word-level representations at test time. Generally, kNNMT borrows the off-the-shelf context representation in the translation task, e.g., the output of the last decoder layer, as the query vector of the retrieval task. In this work, we highlight that coupling the representations of these two tasks is sub-optimal for fine-grained retrieval. To alleviate it, we leverage supervised contrastive learning to learn the distinctive retrieval representation derived from the original context representation. We also propose a fast and effective approach to constructing hard negative samples. Experimental results on five domains show that our approach improves the retrieval accuracy and BLEU score compared to vanilla kNNMT."
}
Markdown (Informal)
[Learning Decoupled Retrieval Representation for Nearest Neighbour Neural Machine Translation](https://preview.aclanthology.org/Author-page-Marten-During-lu/2022.coling-1.456/) (Wang et al., COLING 2022)
ACL