@inproceedings{li-etal-2023-towards-better,
title = "Towards Better Representations for Multi-Label Text Classification with Multi-granularity Information",
author = "Li, Fangfang and
Su, Puzhen and
Duan, Junwen and
Xiao, Weidong",
editor = "Bouamor, Houda and
Pino, Juan and
Bali, Kalika",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2023",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2023.findings-emnlp.635/",
doi = "10.18653/v1/2023.findings-emnlp.635",
pages = "9470--9480",
abstract = "Multi-label text classification (MLTC) aims to assign multiple labels to a given text. Previous works have focused on text representation learning and label correlations modeling using pre-trained language models (PLMs). However, studies have shown that PLMs generate word frequency-oriented text representations, causing texts with different labels to be closely distributed in a narrow region, which is difficult to classify. To address this, we present a novel framework $\textbf{CL}$($\underline{C}$ontrastive $\underline{L}$earning)-$\textbf{MIL}$ ($\underline{M}$ulti-granularity $\underline{I}$nformation $\underline{L}$earning) to refine the text representation for MLTC task. We first use contrastive learning to generate uniform initial text representation and incorporate label frequency implicitly. Then, we design a multi-task learning module to integrate multi-granularity (diverse text-labels correlations, label-label relations and label frequency) information into text representations, enhancing their discriminative ability. Experimental results demonstrate the complementarity of the modules in CL-MIL, improving the quality of text representations and yielding stable and competitive improvements for MLTC."
}
Markdown (Informal)
[Towards Better Representations for Multi-Label Text Classification with Multi-granularity Information](https://preview.aclanthology.org/add-emnlp-2024-awards/2023.findings-emnlp.635/) (Li et al., Findings 2023)
ACL