@inproceedings{saleh-biltawi-2025-htu,
title = "{HTU} at {S}em{E}val-2025 Task 11: Divide and Conquer - Multi-Label emotion classification using 6 {D}ziri{BERT}s submodels with Label-fused Iterative Mask Filling technique for low-resource data augmentation.",
author = "Saleh, Abdallah and
Biltawi, Mariam",
editor = "Rosenthal, Sara and
Ros{\'a}, Aiala and
Ghosh, Debanjan and
Zampieri, Marcos",
booktitle = "Proceedings of the 19th International Workshop on Semantic Evaluation (SemEval-2025)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/corrections-2025-08/2025.semeval-1.94/",
pages = "675--683",
ISBN = "979-8-89176-273-2",
abstract = "In this paper, the authors address the challenges of multi-label emotion detection in the Algerian dialect by proposing a novel Label-fused Iterative Mask Filling (L-IMF) data augmentation technique combined with a multi-model architecture. The approach leverages DziriBERT, a BERT variant pre-trained on Algerian text, to generate contextually and label-sensitive aug- mented data, mitigating class imbalance while preserving label consistency. The proposed method uses six independent classifiers, each trained on balanced datasets for dedicated la- bel, to improve performance. The results show significant improvement on mutli-label classification task using Deep Learning, with an F1 macro score of 0.536 on the validation dataset and 0.486 on the test dataset, the sys- tem ranked 28/41 on the Algerian dialect score- board; which is more than 7{\%} higher than the task baseline using RemBERT."
}
Markdown (Informal)
[HTU at SemEval-2025 Task 11: Divide and Conquer - Multi-Label emotion classification using 6 DziriBERTs submodels with Label-fused Iterative Mask Filling technique for low-resource data augmentation.](https://preview.aclanthology.org/corrections-2025-08/2025.semeval-1.94/) (Saleh & Biltawi, SemEval 2025)
ACL