@article{bayer-etal-2026-activellm,
title = "{A}ctive{LLM}: Large Language Model-Based Active Learning for Textual Few-Shot Scenarios",
author = "Bayer, Markus and
Lutz, Justin and
Reuter, Christian",
journal = "Transactions of the Association for Computational Linguistics",
volume = "14",
year = "2026",
address = "Cambridge, MA",
publisher = "MIT Press",
url = "https://preview.aclanthology.org/ingest-eacl/2026.tacl-1.1/",
doi = "10.1162/tacl.a.63",
pages = "1--22",
abstract = "Active learning is designed to minimize annotation efforts by prioritizing instances that most enhance learning. However, many active learning strategies struggle with a `cold-start' problem, needing substantial initial data to be effective. This limitation reduces their utility in the increasingly relevant few-shot scenarios, where the instance selection has a substantial impact. To address this, we introduce ActiveLLM, a novel active learning approach that leverages Large Language Models such as GPT-4, o1, Llama 3, or Mistral Large for selecting instances. We demonstrate that ActiveLLM significantly enhances the classification performance of BERT classifiers in few-shot scenarios, outperforming traditional active learning methods as well as improving the few-shot learning methods ADAPET, PERFECT, and SetFit. Additionally, ActiveLLM can be extended to non-few-shot scenarios, allowing for iterative selections. In this way, ActiveLLM can even help other active learning strategies to overcome their cold-start problem. Our results suggest that ActiveLLM offers a promising solution for improving model performance across various learning setups."
}Markdown (Informal)
[ActiveLLM: Large Language Model-Based Active Learning for Textual Few-Shot Scenarios](https://preview.aclanthology.org/ingest-eacl/2026.tacl-1.1/) (Bayer et al., TACL 2026)
ACL