@inproceedings{gupta-etal-2021-task,
title = "Task-Specific Pre-Training and Cross Lingual Transfer for Sentiment Analysis in {D}ravidian Code-Switched Languages",
author = "Gupta, Akshat and
Rallabandi, Sai Krishna and
Black, Alan W",
editor = "Chakravarthi, Bharathi Raja and
Priyadharshini, Ruba and
Kumar M, Anand and
Krishnamurthy, Parameswari and
Sherly, Elizabeth",
booktitle = "Proceedings of the First Workshop on Speech and Language Technologies for Dravidian Languages",
month = apr,
year = "2021",
address = "Kyiv",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest_wac_2008/2021.dravidianlangtech-1.9/",
pages = "73--79",
abstract = "Sentiment analysis in Code-Mixed languages has garnered a lot of attention in recent years. It is an important task for social media monitoring and has many applications, as a large chunk of social media data is Code-Mixed. In this paper, we work on the problem of sentiment analysis for Dravidian Code-Switched languages - Tamil-Engish and Malayalam-English, using three different BERT based models. We leverage task-specific pre-training and cross-lingual transfer to improve on previously reported results, with significant improvement for the Tamil-Engish dataset. We also present a multilingual sentiment classification model that has competitive performance on both Tamil-English and Malayalam-English datasets."
}
Markdown (Informal)
[Task-Specific Pre-Training and Cross Lingual Transfer for Sentiment Analysis in Dravidian Code-Switched Languages](https://preview.aclanthology.org/ingest_wac_2008/2021.dravidianlangtech-1.9/) (Gupta et al., DravidianLangTech 2021)
ACL