@inproceedings{zhu-etal-2022-enhanced,
title = "Enhanced Representation with Contrastive Loss for Long-Tail Query Classification in e-commerce",
author = "Zhu, Lvxing and
Chen, Hao and
Wei, Chao and
Zhang, Weiru",
editor = "Malmasi, Shervin and
Rokhlenko, Oleg and
Ueffing, Nicola and
Guy, Ido and
Agichtein, Eugene and
Kallumadi, Surya",
booktitle = "Proceedings of the Fifth Workshop on e-Commerce and NLP (ECNLP 5)",
month = may,
year = "2022",
address = "Dublin, Ireland",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2022.ecnlp-1.17/",
doi = "10.18653/v1/2022.ecnlp-1.17",
pages = "141--150",
abstract = "Query classification is a fundamental task in an e-commerce search engine, which assigns one or multiple predefined product categories in response to each search query. Taking click-through logs as training data in deep learning methods is a common and effective approach for query classification. However, the frequency distribution of queries typically has long-tail property, which means that there are few logs for most of the queries. The lack of reliable user feedback information results in worse performance of long-tail queries compared with frequent queries. To solve the above problem, we propose a novel method that leverages an auxiliary module to enhance the representations of long-tail queries by taking advantage of reliable supervised information of variant frequent queries. The long-tail queries are guided by the contrastive loss to obtain category-aligned representations in the auxiliary module, where the variant frequent queries serve as anchors in the representation space. We train our model with real-world click data from AliExpress and conduct evaluation on both offline labeled data and online AB test. The results and further analysis demonstrate the effectiveness of our proposed method."
}
Markdown (Informal)
[Enhanced Representation with Contrastive Loss for Long-Tail Query Classification in e-commerce](https://preview.aclanthology.org/fix-sig-urls/2022.ecnlp-1.17/) (Zhu et al., ECNLP 2022)
ACL