@inproceedings{wang-liu-2025-dianchi,
title = "Dianchi at {S}em{E}val-2025 Task 11: Multilabel Emotion Recognition via Orthogonal Knowledge Distillation",
author = "Wang, Zhenlan and
Liu, Jiaxuan",
editor = "Rosenthal, Sara and
Ros{\'a}, Aiala and
Ghosh, Debanjan and
Zampieri, Marcos",
booktitle = "Proceedings of the 19th International Workshop on Semantic Evaluation (SemEval-2025)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/transition-to-people-yaml/2025.semeval-1.146/",
pages = "1108--1112",
ISBN = "979-8-89176-273-2",
abstract = "This paper presents KDBERT-MLDistill, a novel framework for multi-label emotion recognition developed for SemEval-2025 Task 11. Addressing challenges of fine-grained emotion misdetection and small-data overfitting, the method synergizes BERT-based text encoding with orthogonal knowledge distillation. Key innovations include: (1) Orthogonal regularization on classifier weights to minimize redundant feature correlations, coupled with dynamic pseudo-labeling for periodic data augmentation; (2) A hierarchical distillation mechanism where dual teacher-student models iteratively exchange parameters to balance knowledge retention and exploration."
}
Markdown (Informal)
[Dianchi at SemEval-2025 Task 11: Multilabel Emotion Recognition via Orthogonal Knowledge Distillation](https://preview.aclanthology.org/transition-to-people-yaml/2025.semeval-1.146/) (Wang & Liu, SemEval 2025)
ACL