@inproceedings{chen-etal-2024-induct,
title = "Induct-Learn: Short Phrase Prompting with Instruction Induction",
author = "Chen, Po-Chun and
Wei, Sheng-Lun and
Huang, Hen-Hsen and
Chen, Hsin-Hsi",
editor = "Al-Onaizan, Yaser and
Bansal, Mohit and
Chen, Yun-Nung",
booktitle = "Proceedings of the 2024 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2024",
address = "Miami, Florida, USA",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2024.emnlp-main.297/",
doi = "10.18653/v1/2024.emnlp-main.297",
pages = "5204--5231",
abstract = "Large Language Models (LLMs) have demonstrated capability in {\textquotedblleft}instruction induction,{\textquotedblright} generating instructions from demonstrations (input-output pairs). However, existing methods often rely on large datasets or numerous examples, which is impractical and costly in real-world scenarios. In this work, we propose a low-cost, task-level framework called Induct-Learn. It induces pseudo instructions from a few demonstrations and a short phrase, adding a CoT process into existing demonstrations. When encountering new problems, the learned pseudo instructions and demonstrations with the pseudo CoT process can be combined into a prompt to guide the LLM`s problem-solving process. We validate our approach on the BBH-Induct and Evals-Induct datasets, and the results show that the Induct-Learn framework outperforms state-of-the-art methods. We also exhibit cross-model adaptability and achieve superior performance at a lower cost compared to existing methods."
}
Markdown (Informal)
[Induct-Learn: Short Phrase Prompting with Instruction Induction](https://preview.aclanthology.org/jlcl-multiple-ingestion/2024.emnlp-main.297/) (Chen et al., EMNLP 2024)
ACL