@inproceedings{mekala-etal-2021-coarse2fine,
title = "{C}oarse2{F}ine: Fine-grained Text Classification on Coarsely-grained Annotated Data",
author = "Mekala, Dheeraj and
Gangal, Varun and
Shang, Jingbo",
editor = "Moens, Marie-Francine and
Huang, Xuanjing and
Specia, Lucia and
Yih, Scott Wen-tau",
booktitle = "Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2021",
address = "Online and Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2021.emnlp-main.46/",
doi = "10.18653/v1/2021.emnlp-main.46",
pages = "583--594",
abstract = "Existing text classification methods mainly focus on a fixed label set, whereas many real-world applications require extending to new fine-grained classes as the number of samples per label increases. To accommodate such requirements, we introduce a new problem called coarse-to-fine grained classification, which aims to perform fine-grained classification on coarsely annotated data. Instead of asking for new fine-grained human annotations, we opt to leverage label surface names as the only human guidance and weave in rich pre-trained generative language models into the iterative weak supervision strategy. Specifically, we first propose a label-conditioned fine-tuning formulation to attune these generators for our task. Furthermore, we devise a regularization objective based on the coarse-fine label constraints derived from our problem setting, giving us even further improvements over the prior formulation. Our framework uses the fine-tuned generative models to sample pseudo-training data for training the classifier, and bootstraps on real unlabeled data for model refinement. Extensive experiments and case studies on two real-world datasets demonstrate superior performance over SOTA zero-shot classification baselines."
}
Markdown (Informal)
[Coarse2Fine: Fine-grained Text Classification on Coarsely-grained Annotated Data](https://preview.aclanthology.org/add-emnlp-2024-awards/2021.emnlp-main.46/) (Mekala et al., EMNLP 2021)
ACL