@inproceedings{wang-etal-2025-balancing, title = "Balancing Forget Quality and Model Utility: A Reverse {KL}-Divergence Knowledge Distillation Approach for Better Unlearning in {LLM}s", author = "Wang, Bichen and Zi, Yuzhe and Sun, Yixin and Zhao, Yanyan and Qin, Bing", editor = "Chiruzzo, Luis and Ritter, Alan and Wang, Lu", booktitle = "Proceedings of the 2025 Conference of the Nations of the Americas Chapter of the Association for Computational Linguistics: Human Language Technologies (Volume 1: Long Papers)", month = apr, year = "2025", address = "Albuquerque, New Mexico", publisher = "Association for Computational Linguistics", url = "https://preview.aclanthology.org/Ingest-2025-COMPUTEL/2025.naacl-long.60/", pages = "1306--1321", ISBN = "979-8-89176-189-6" }