@inproceedings{honda-etal-2025-distilling,
title = "Distilling Many-Shot In-Context Learning into a Cheat Sheet",
author = "Honda, Ukyo and
Murakami, Soichiro and
Zhang, Peinan",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.930/",
doi = "10.18653/v1/2025.findings-emnlp.930",
pages = "17158--17178",
ISBN = "979-8-89176-335-7",
abstract = "Recent advances in large language models (LLMs) enable effective in-context learning (ICL) with many-shot examples, but at the cost of high computational demand due to longer input tokens. To address this, we propose cheat-sheet ICL, which distills the information from many-shot ICL into a concise textual summary (cheat sheet) used as the context at inference time. Experiments on challenging reasoning tasks show that cheat-sheet ICL achieves comparable or better performance than many-shot ICL with far fewer tokens, and matches retrieval-based ICL without requiring test-time retrieval. These findings demonstrate that cheat-sheet ICL is a practical alternative for leveraging LLMs in downstream tasks."
}Markdown (Informal)
[Distilling Many-Shot In-Context Learning into a Cheat Sheet](https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.930/) (Honda et al., Findings 2025)
ACL