@inproceedings{wang-etal-2025-svd,
title = "{SVD}-{GCL}: A Noise-Augmented Hybrid Graph Contrastive Learning Framework for Recommendation",
author = "Wang, Liping and
Li, Shichao and
Wang, Hui and
Gao, Yuyan and
Wei, Mingyao",
editor = "Rambow, Owen and
Wanner, Leo and
Apidianaki, Marianna and
Al-Khalifa, Hend and
Eugenio, Barbara Di and
Schockaert, Steven",
booktitle = "Proceedings of the 31st International Conference on Computational Linguistics",
month = jan,
year = "2025",
address = "Abu Dhabi, UAE",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2025.coling-main.35/",
pages = "529--539",
abstract = "Recently, deep graph neural networks (GNNs) have emerged as the predominant architecture for recommender systems based on collaborative filtering. Nevertheless, numerous GNN-based approaches confront challenges such as complex computations and skewed feature distributions, especially with high-dimensional, sparse, and noisy data, making it difficult to accurately capture user preferences. To tackle these issues, we introduce SVD-GCL, a streamlined graph contrastive learning recommendation model based on noise augmentation that integrates truncated singular value decomposition in the feature engineering stage. This hybrid optimization approach reduces the dimensionality and denoises the original data. Through extracting self-supervised signals and gradually adding noise to embeddings in the training phase to enrich data samples, the data sparsity is effectively alleviated. Experimental outcomes on three large public benchmark datasets illustrate that SVD-GCL effectively manages high-dimensional sparse data, remains stable in the presence of noise, and provides significant advantages in computational efficiency, recommendation performance, and robustness."
}
Markdown (Informal)
[SVD-GCL: A Noise-Augmented Hybrid Graph Contrastive Learning Framework for Recommendation](https://preview.aclanthology.org/jlcl-multiple-ingestion/2025.coling-main.35/) (Wang et al., COLING 2025)
ACL