@inproceedings{wen-etal-2025-knowledge,
title = "Knowledge Distillation for Language Models",
author = "Wen, Yuqiao and
Shi, Freda and
Mou, Lili",
editor = "Lomeli, Maria and
Swayamdipta, Swabha and
Zhang, Rui",
booktitle = "Proceedings of the 2025 Annual Conference of the Nations of the Americas Chapter of the Association for Computational Linguistics: Human Language Technologies (Volume 5: Tutorial Abstracts)",
month = may,
year = "2025",
address = "Albuquerque, New Mexico",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/moar-dois/2025.naacl-tutorial.4/",
doi = "10.18653/v1/2025.naacl-tutorial.4",
pages = "25--29",
ISBN = "979-8-89176-193-3",
abstract = "Knowledge distillation (KD) aims to transfer the knowledge of a teacher (usually a large model) to a student (usually a small one). In this tutorial, our goal is to provide participants with a comprehensive understanding of the techniques and applications of KD for language models. After introducing the basic concepts including intermediate-layer matching and prediction matching, we will present advanced techniques such as reinforcement learning-based KD and multi-teacher distillation. For applications, we will focus on KD for large language models (LLMs), covering topics ranging from LLM sequence compression to LLM self-distillation. The target audience is expected to know the basics of machine learning and NLP, but do not have to be familiar with the details of math derivation and neural models"
}
Markdown (Informal)
[Knowledge Distillation for Language Models](https://preview.aclanthology.org/moar-dois/2025.naacl-tutorial.4/) (Wen et al., NAACL 2025)
ACL
- Yuqiao Wen, Freda Shi, and Lili Mou. 2025. Knowledge Distillation for Language Models. In Proceedings of the 2025 Annual Conference of the Nations of the Americas Chapter of the Association for Computational Linguistics: Human Language Technologies (Volume 5: Tutorial Abstracts), pages 25–29, Albuquerque, New Mexico. Association for Computational Linguistics.