@inproceedings{yang-etal-2023-improved,
title = "Improved Training of Deep Text Clustering",
author = "Yang, Zonghao and
Hu, Wenpeng and
Tan, Yushan and
Luo, Zhunchen",
editor = "Bouamor, Houda and
Pino, Juan and
Bali, Kalika",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2023",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2023.findings-emnlp.163/",
doi = "10.18653/v1/2023.findings-emnlp.163",
pages = "2490--2499",
abstract = "The classical deep clustering optimization methods basically leverage information such as clustering centers, mutual information, and distance metrics to construct implicit generalized labels to establish information feedback (weak supervision) and thus optimize the deep model. However, the resulting generalized labels have different degrees of errors in the whole clustering process due to the limitation of clustering accuracy, which greatly interferes with the clustering process. To this end, this paper proposes a general deep clustering optimization method from the perspective of empirical risk minimization, using the correlation relationship between the samples. Experiments on two classical deep clustering methods demonstrate the necessity and effectiveness of the method. Code is available at https://github.com/yangzonghao1024/DCGLU."
}
Markdown (Informal)
[Improved Training of Deep Text Clustering](https://preview.aclanthology.org/add-emnlp-2024-awards/2023.findings-emnlp.163/) (Yang et al., Findings 2023)
ACL
- Zonghao Yang, Wenpeng Hu, Yushan Tan, and Zhunchen Luo. 2023. Improved Training of Deep Text Clustering. In Findings of the Association for Computational Linguistics: EMNLP 2023, pages 2490–2499, Singapore. Association for Computational Linguistics.