@inproceedings{riyadh-nabhani-2024-mela,
title = "Mela at {A}r{AIE}val Shared Task: Propagandistic Techniques Detection in {A}rabic with a Multilingual Approach",
author = "Riyadh, Md Abdur Razzaq and
Nabhani, Sara",
editor = "Habash, Nizar and
Bouamor, Houda and
Eskander, Ramy and
Tomeh, Nadi and
Abu Farha, Ibrahim and
Abdelali, Ahmed and
Touileb, Samia and
Hamed, Injy and
Onaizan, Yaser and
Alhafni, Bashar and
Antoun, Wissam and
Khalifa, Salam and
Haddad, Hatem and
Zitouni, Imed and
AlKhamissi, Badr and
Almatham, Rawan and
Mrini, Khalil",
booktitle = "Proceedings of the Second Arabic Natural Language Processing Conference",
month = aug,
year = "2024",
address = "Bangkok, Thailand",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2024.arabicnlp-1.47/",
doi = "10.18653/v1/2024.arabicnlp-1.47",
pages = "478--482",
abstract = "This paper presents our system submitted for Task 1 of the ArAIEval Shared Task on Unimodal (Text) Propagandistic Technique Detection in Arabic. Task 1 involves identifying all employed propaganda techniques in a given text from a set of possible techniques or detecting that no propaganda technique is present. Additionally, the task requires identifying the specific spans of text where these techniques occur. We explored the capabilities of a multilingual BERT model for this task, focusing on the effectiveness of using outputs from different hidden layers within the model. By fine-tuning the multilingual BERT, we aimed to improve the model{'}s ability to recognize and locate various propaganda techniques. Our experiments showed that leveraging the hidden layers of the BERT model enhanced detection performance. Our system achieved competitive results, ranking second in the shared task, demonstrating that multilingual BERT models, combined with outputs from hidden layers, can effectively detect and identify spans of propaganda techniques in Arabic text."
}
Markdown (Informal)
[Mela at ArAIEval Shared Task: Propagandistic Techniques Detection in Arabic with a Multilingual Approach](https://preview.aclanthology.org/fix-sig-urls/2024.arabicnlp-1.47/) (Riyadh & Nabhani, ArabicNLP 2024)
ACL