@inproceedings{zhang-etal-2025-hierprompt,
title = "{H}ier{P}rompt: Zero-Shot Hierarchical Text Classification with {LLM}-Enhanced Prototypes",
author = "Zhang, Qian and
Su, Qinliang and
Zhu, Wei and
Yachun, Pang",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.207/",
doi = "10.18653/v1/2025.findings-emnlp.207",
pages = "3846--3859",
ISBN = "979-8-89176-335-7",
abstract = "Hierarchical Text Classification is a challenging task which classifies texts into categories arranged in a hierarchy. Zero{-}Shot Hierarchical Text Classification (ZS-HTC) further assumes only the availability of hierarchical taxonomy, without any training data. Existing works of ZS-HTC are typically built on the prototype-based framework by embedding the category names into prototypes, which, however, do not perform very well due to the ambiguity and impreciseness of category names. In this paper, we propose HierPrompt, a method that leverages hierarchy-aware prompts to instruct LLM to produce more representative and informative prototypes. Specifically, we first introduce Example Text Prototype (ETP), in conjunction with Category Name Prototype (CNP), to enrich the information contained in hierarchical prototypes. A Maximum Similarity Propagation (MSP) technique is also proposed to consider the hierarchy in similarity calculation. Then, the hierarchical prototype refinement module is utilized to (i) contextualize the category names for more accurate CNPs and (ii) produce detailed example texts for each leaf category to form ETPs. Experiments on three benchmark datasets demonstrate that HierPrompt substantially outperforms existing ZS{-}HTC methods."
}Markdown (Informal)
[HierPrompt: Zero-Shot Hierarchical Text Classification with LLM-Enhanced Prototypes](https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.207/) (Zhang et al., Findings 2025)
ACL