@inproceedings{v-etal-2025-rssn,
title = "{RSSN} at {S}em{E}val-2025 Task 11: Optimizing Multi-Label Emotion Detection with Transformer-Based Models and Threshold Tuning",
author = "V, Ravindran and
Sivanaiah, Rajalakshmi and
S, Angel Deborah",
editor = "Rosenthal, Sara and
Ros{\'a}, Aiala and
Ghosh, Debanjan and
Zampieri, Marcos",
booktitle = "Proceedings of the 19th International Workshop on Semantic Evaluation (SemEval-2025)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/corrections-2025-08/2025.semeval-1.105/",
pages = "773--779",
ISBN = "979-8-89176-273-2",
abstract = "Our study explores multi-label emotion classification using fine-tuned BERT models, achieving superior performance over traditional methods such as logistic regression. The intricate nature of overlapping emotional expressions in text necessitates a robust classification framework. Fine-tuning BERT with weighted binary cross-entropy loss enhances predictive accuracy, particularly for underrepresented emotions like anger and joy. Moreover, threshold optimization plays a pivotal role in refining decision boundaries, boosting recall, and increasing the macro F1-score. Comparative analysis against RoBERTa and XGBoost further underscores the effectiveness of contextual embeddings in capturing subtle emotional nuances. Despite these improvements, challenges such as class imbalance and inter-class confusion persist, highlighting the need for future advancements in ensemble learning, contrastive pretraining, and domain-adaptive fine-tuning."
}
Markdown (Informal)
[RSSN at SemEval-2025 Task 11: Optimizing Multi-Label Emotion Detection with Transformer-Based Models and Threshold Tuning](https://preview.aclanthology.org/corrections-2025-08/2025.semeval-1.105/) (V et al., SemEval 2025)
ACL