@inproceedings{rahman-uzuner-2023-m1437,
title = "M1437 at {BLP}-2023 Task 2: Harnessing {B}angla Text for Sentiment Analysis: A Transformer-based Approach",
author = "Rahman, Majidur and
Uzuner, Ozlem",
editor = "Alam, Firoj and
Kar, Sudipta and
Chowdhury, Shammur Absar and
Sadeque, Farig and
Amin, Ruhul",
booktitle = "Proceedings of the First Workshop on Bangla Language Processing (BLP-2023)",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2023.banglalp-1.36/",
doi = "10.18653/v1/2023.banglalp-1.36",
pages = "279--285",
abstract = "Analyzing public sentiment on social media is helpful in understanding the public{'}s emotions about any given topic. While numerous studies have been conducted in this field, there has been limited research on Bangla social media data. Team M1437 from George Mason University participated in the Sentiment Analysis shared task of the Bangla Language Processing (BLP) Workshop at EMNLP-2023. The team fine-tuned various BERT-based Transformer architectures to solve the task. This article shows that $BanglaBERT_{large}$, a language model pre-trained on Bangla text, outperformed other BERT-based models. This model achieved an F1 score of 73.15{\%} and top position in the development phase, was further tuned with external training data, and achieved an F1 score of 70.36{\%} in the evaluation phase, securing the fourteenth place on the leaderboard. The F1 score on the test set, when $BanglaBERT_{large}$ was trained without external training data, was 71.54{\%}."
}
Markdown (Informal)
[M1437 at BLP-2023 Task 2: Harnessing Bangla Text for Sentiment Analysis: A Transformer-based Approach](https://preview.aclanthology.org/fix-sig-urls/2023.banglalp-1.36/) (Rahman & Uzuner, BanglaLP 2023)
ACL