@inproceedings{rifat-dewan-2026-reignite,
title = "{REIGNITE} at {A}bjad{M}ed: Imbalance-Aware Fine-Tuning of Pretrained {A}rabic Transformers for {A}rabic Medical Text Classification Task",
author = "Rifat, Nahid Montasir and
Dewan, Foyez Ahmed",
booktitle = "Proceedings of the 2nd Workshop on {NLP} for Languages Using {A}rabic Script",
month = mar,
year = "2026",
address = "Rabat, Morocco",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/manual-author-scripts/2026.abjadnlp-1.19/",
pages = "132--136",
abstract = "This paper presents our system developed for the AbjadNLP Shared Task 4 on Medical Text Classification in Arabic, which aims to assign Arabic medical question-answer pairs to a predefined set of medical categories. The task poses significant challenges due to severe class imbalance across 82 categories and the linguistic complexity of domain-specific Arabic medical text. To address these challenges, we propose an imbalance-aware training framework that combines targeted data augmentation for minority classes with class-weighted focal loss during fine-tuning. We evaluate multiple Arabic pretrained transformer models under a unified training configuration and further improve robustness through a majority-voting ensemble of the best-performing models. Our approach achieves competitive performance, ranking 15th on the private leaderboard with a macro F1 score of 0.4052, demonstrating the effectiveness of combining different data augmentation techniques, imbalance-aware training objectives, and ensemble learning for large-scale, highly imbalanced Arabic medical text classification. The code is available on GitHub."
}Markdown (Informal)
[REIGNITE at AbjadMed: Imbalance-Aware Fine-Tuning of Pretrained Arabic Transformers for Arabic Medical Text Classification Task](https://preview.aclanthology.org/manual-author-scripts/2026.abjadnlp-1.19/) (Rifat & Dewan, AbjadNLP 2026)
ACL