@inproceedings{karl-scherp-2025-hydra,
title = "{HYDRA}: A Multi-Head Encoder-only Architecture for Hierarchical Text Classification",
author = "Karl, Fabian and
Scherp, Ansgar",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Proceedings of the 2025 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-emnlp/2025.emnlp-main.472/",
pages = "9303--9314",
ISBN = "979-8-89176-332-6",
abstract = "We introduce HYDRA, a simple yet effective multi-head encoder-only architecture for hierarchical text classification that treats each level in the hierarchy as a separate classification task with its own label space. State-of-the-art approaches rely on complex components like graph encoders, label semantics, and autoregressive decoders. We demonstrate that such complexity is often unnecessary. Through parameter sharing and level-specific parameterization, HYDRA enables flat models to incorporate hierarchical awareness without architectural complexity. Experiments on four benchmarks (NYT, RCV1-V2, BGC, and WOS) demonstrate that HYDRA always increases the performance over flat models and matches or exceeds the performance of complex state-of-the-art methods."
}Markdown (Informal)
[HYDRA: A Multi-Head Encoder-only Architecture for Hierarchical Text Classification](https://preview.aclanthology.org/ingest-emnlp/2025.emnlp-main.472/) (Karl & Scherp, EMNLP 2025)
ACL