@inproceedings{liu-etal-2025-take,
title = "Take the essence and discard the dross: A Rethinking on Data Selection for Fine-Tuning Large Language Models",
author = "Liu, Ziche and
Ke, Rui and
Liu, Yajiao and
Jiang, Feng and
Li, Haizhou",
editor = "Chiruzzo, Luis and
Ritter, Alan and
Wang, Lu",
booktitle = "Proceedings of the 2025 Conference of the Nations of the Americas Chapter of the Association for Computational Linguistics: Human Language Technologies (Volume 1: Long Papers)",
month = apr,
year = "2025",
address = "Albuquerque, New Mexico",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2025.naacl-long.336/",
pages = "6595--6611",
ISBN = "979-8-89176-189-6",
abstract = "Data selection for fine-tuning large language models (LLMs) aims to choose a high-quality subset from existing datasets, allowing the trained model to outperform baselines trained on the full dataset. However, the expanding body of research lacks a clear, unified framework, and the variability in experimental settings complicates systematic comparisons.While existing surveys comprehensively overview the stages and methods of data selection, they often overlook an in-depth exploration of the fine-tuning phase. In this paper, we conduct a focused review of recent data selection techniques for fine-tuning LLMs, analyzing a dozen key studies. We introduce a novel three-stage scheme{---}comprising feature extraction, criteria design, and selector evaluation{---}to systematically categorize and evaluate these methods. Additionally, we propose a unified comparison approach that incorporates ratio-based efficiency and ranking-based feasibility metrics to address inconsistencies across experiments. Our findings reveal that methods emphasizing more targeted quality measurement achieve higher efficiency but at the cost of feasibility. Finally, we discuss trends and highlight four key challenges in fine-tuning data selection, offering potential directions for future research."
}
Markdown (Informal)
[Take the essence and discard the dross: A Rethinking on Data Selection for Fine-Tuning Large Language Models](https://preview.aclanthology.org/fix-sig-urls/2025.naacl-long.336/) (Liu et al., NAACL 2025)
ACL