@inproceedings{liang-etal-2025-thinkswitcher,
title = "{T}hink{S}witcher: When to Think Hard, When to Think Fast",
author = "Liang, Guosheng and
Zhong, Longguang and
Yang, Ziyi and
Quan, Xiaojun",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.278/",
doi = "10.18653/v1/2025.findings-emnlp.278",
pages = "5185--5201",
ISBN = "979-8-89176-335-7",
abstract = "Large reasoning models (LRMs) excel at solving complex tasks by leveraging long chain-of-thought (CoT) reasoning. However, this often leads to overthinking on simple tasks, resulting in unnecessary computational overhead. We observe that LRMs inherently possess the capability for efficient short CoT reasoning, which can be reliably elicited through prompt design. To leverage this capability, we propose ThinkSwitcher, a framework that enables a single LRM to dynamically switch between short and long CoT modes based on task complexity. ThinkSwitcher introduces a lightweight switching module trained with supervision signals derived from the relative performance of each reasoning mode across tasks. Experiments on multiple reasoning benchmarks show that ThinkSwitcher reduces computational cost by 20-30{\%} while maintaining high accuracy on complex tasks. This demonstrates the effectiveness of ThinkSwitcher as a scalable and efficient solution for unified LRM deployment."
}Markdown (Informal)
[ThinkSwitcher: When to Think Hard, When to Think Fast](https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.278/) (Liang et al., Findings 2025)
ACL
- Guosheng Liang, Longguang Zhong, Ziyi Yang, and Xiaojun Quan. 2025. ThinkSwitcher: When to Think Hard, When to Think Fast. In Findings of the Association for Computational Linguistics: EMNLP 2025, pages 5185–5201, Suzhou, China. Association for Computational Linguistics.