@inproceedings{munker-etal-2025-zero,
title = "Zero-shot prompt-based classification: topic labeling in times of foundation models in {G}erman Tweets",
author = {M{\"u}nker, Simon and
Kugler, Kai and
Rettinger, Achim},
editor = "Zhao, Jin and
Wang, Mingyang and
Liu, Zhu",
booktitle = "Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 4: Student Research Workshop)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/landing_page/2025.acl-srw.4/",
pages = "53--63",
ISBN = "979-8-89176-254-1",
abstract = "Filtering and annotating textual data are routine tasks in many areas, like social media or news analytics. Automating these tasks allows to scale the analyses wrt. speed and breadth of content covered and decreases the manual effort required. Due to technical advancements in Natural Language Processing, specifically the success of large foundation models, a new tool for automating such annotation processes by using a text-to-text interface given written guidelines without providing training samples has become available. In this work, we assess these advancements \textit{in-the-wild} by empirically testing them in an annotation task on German Twitter data about social and political European crises. We compare the prompt-based results with our human annotation and preceding classification approaches, including Naive Bayes and a BERT-based fine-tuning/domain adaptation pipeline. Our results show that the prompt-based approach {--} despite being limited by local computation resources during the model selection {--} is comparable with the fine-tuned BERT but without any annotated training data. Our findings emphasize the ongoing paradigm shift in the NLP landscape, i.e., the unification of downstream tasks and elimination of the need for pre-labeled training data."
}
Markdown (Informal)
[Zero-shot prompt-based classification: topic labeling in times of foundation models in German Tweets](https://preview.aclanthology.org/landing_page/2025.acl-srw.4/) (Münker et al., ACL 2025)
ACL