@inproceedings{chen-etal-2025-tagrouter,
title = "{T}ag{R}outer: Learning Route to {LLM}s through Tags for Open-Domain Text Generation Tasks",
author = "Chen, Zhou and
Wei, Zhiqiang and
Bai, Yuqi and
Xiong, Xue and
Wu, Jianmin",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2025",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/mtsummit-25-ingestion/2025.findings-acl.1110/",
doi = "10.18653/v1/2025.findings-acl.1110",
pages = "21539--21564",
ISBN = "979-8-89176-256-5",
abstract = "Model routing allocates queries to the suitable model, improving system performance while reducing costs. However, existing routing methods face practical limitations that hinder scalability in large-scale applications and struggle to keep up with the rapid growth of the large language model (LLM) ecosystem. To tackle these challenges, we propose TagRouter, a training-free model routing method designed to optimize the synergy among multiple LLMs for open-domain text generation tasks. Experimental results demonstrate that TagRouter outperforms 13 baseline methods, increasing the accept rate of system by 6.15{\%} and reducing costs by 17.20{\%}, achieving optimal cost-efficiency. Our findings provides the LLM community with an efficient and scalable solution for model ensembling, offering users an evolvable ``super model.''"
}
Markdown (Informal)
[TagRouter: Learning Route to LLMs through Tags for Open-Domain Text Generation Tasks](https://preview.aclanthology.org/mtsummit-25-ingestion/2025.findings-acl.1110/) (Chen et al., Findings 2025)
ACL