@inproceedings{nedilko-2023-generative,
title = "Generative Pretrained Transformers for Emotion Detection in a Code-Switching Setting",
author = "Nedilko, Andrew",
editor = "Barnes, Jeremy and
De Clercq, Orph{\'e}e and
Klinger, Roman",
booktitle = "Proceedings of the 13th Workshop on Computational Approaches to Subjectivity, Sentiment, {\&} Social Media Analysis",
month = jul,
year = "2023",
address = "Toronto, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2023.wassa-1.61/",
doi = "10.18653/v1/2023.wassa-1.61",
pages = "616--620",
abstract = "This paper describes the approach that we utilized to participate in the shared task for multi-label and multi-class emotion classification organized as part of WASSA 2023 at ACL 2023. The objective was to build mod- els that can predict 11 classes of emotions, or the lack thereof (neutral class) based on code- mixed Roman Urdu and English SMS text messages. We participated in Track 2 of this task - multi-class emotion classification (MCEC). We used generative pretrained transformers, namely ChatGPT because it has a commercially available full-scale API, for the emotion detec- tion task by leveraging the prompt engineer- ing and zero-shot / few-shot learning method- ologies based on multiple experiments on the dev set. Although this was the first time we used a GPT model for the purpose, this ap- proach allowed us to beat our own baseline character-based XGBClassifier, as well as the baseline model trained by the organizers (bert- base-multilingual-cased). We ranked 4th and achieved the macro F1 score of 0.7038 and the accuracy of 0.7313 on the blind test set."
}
Markdown (Informal)
[Generative Pretrained Transformers for Emotion Detection in a Code-Switching Setting](https://preview.aclanthology.org/add-emnlp-2024-awards/2023.wassa-1.61/) (Nedilko, WASSA 2023)
ACL