@inproceedings{smid-etal-2024-llama,
title = "{LL}a{MA}-Based Models for Aspect-Based Sentiment Analysis",
author = "{\v{S}}m{\'i}d, Jakub and
Priban, Pavel and
Kral, Pavel",
editor = "De Clercq, Orph{\'e}e and
Barriere, Valentin and
Barnes, Jeremy and
Klinger, Roman and
Sedoc, Jo{\~a}o and
Tafreshi, Shabnam",
booktitle = "Proceedings of the 14th Workshop on Computational Approaches to Subjectivity, Sentiment, {\&} Social Media Analysis",
month = aug,
year = "2024",
address = "Bangkok, Thailand",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2024.wassa-1.6/",
doi = "10.18653/v1/2024.wassa-1.6",
pages = "63--70",
abstract = "While large language models (LLMs) show promise for various tasks, their performance in compound aspect-based sentiment analysis (ABSA) tasks lags behind fine-tuned models. However, the potential of LLMs fine-tuned for ABSA remains unexplored. This paper examines the capabilities of open-source LLMs fine-tuned for ABSA, focusing on LLaMA-based models. We evaluate the performance across four tasks and eight English datasets, finding that the fine-tuned Orca 2 model surpasses state-of-the-art results in all tasks. However, all models struggle in zero-shot and few-shot scenarios compared to fully fine-tuned ones. Additionally, we conduct error analysis to identify challenges faced by fine-tuned models."
}
Markdown (Informal)
[LLaMA-Based Models for Aspect-Based Sentiment Analysis](https://preview.aclanthology.org/add-emnlp-2024-awards/2024.wassa-1.6/) (Šmíd et al., WASSA 2024)
ACL
- Jakub Šmíd, Pavel Priban, and Pavel Kral. 2024. LLaMA-Based Models for Aspect-Based Sentiment Analysis. In Proceedings of the 14th Workshop on Computational Approaches to Subjectivity, Sentiment, & Social Media Analysis, pages 63–70, Bangkok, Thailand. Association for Computational Linguistics.