@inproceedings{gu-etal-2025-semantic,
title = "A Semantic-Aware Layer-Freezing Approach to Computation-Efficient Fine-Tuning of Language Models",
author = "Gu, Jian and
Aleti, Aldeida and
Chen, Chunyang and
Zhang, Hongyu",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2025",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/mtsummit-25-ingestion/2025.findings-acl.420/",
doi = "10.18653/v1/2025.findings-acl.420",
pages = "8019--8033",
ISBN = "979-8-89176-256-5",
abstract = "Finetuning language models (LMs) is crucial for adapting the models to downstream data and tasks. However, full finetuning is usually costly. Existing work, such as parameter-efficient finetuning (PEFT), often focuses on \textit{how to finetune} but neglects the issue of \textit{where to finetune}. As a pioneering work on reducing the cost of backpropagation (at the layer level) by answering where to finetune, we conduct a semantic analysis of the LM inference process. We first propose using transition traces of the latent representation to compute deviations (or loss). Then, using a derived formula of scaling law, we estimate the gain of each layer in reducing deviation (or loss). Further, we narrow down the scope for finetuning, and also, study the cost-benefit balance of LM finetuning. We perform extensive experiments across well-known LMs and datasets. The results show that our approach is effective and efficient, and outperforms the existing baselines. Our approach is orthogonal to other techniques for improving finetuning efficiency, such as PEFT methods, offering practical values on LM finetuning."
}
Markdown (Informal)
[A Semantic-Aware Layer-Freezing Approach to Computation-Efficient Fine-Tuning of Language Models](https://preview.aclanthology.org/mtsummit-25-ingestion/2025.findings-acl.420/) (Gu et al., Findings 2025)
ACL