@inproceedings{hu-etal-2025-fine-tuning,
title = "Fine-Tuning Large Language Models with Sequential Instructions",
author = "Hu, Hanxu and
Yu, Simon and
Chen, Pinzhen and
Ponti, Edoardo",
editor = "Chiruzzo, Luis and
Ritter, Alan and
Wang, Lu",
booktitle = "Proceedings of the 2025 Conference of the Nations of the Americas Chapter of the Association for Computational Linguistics: Human Language Technologies (Volume 1: Long Papers)",
month = apr,
year = "2025",
address = "Albuquerque, New Mexico",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2025.naacl-long.288/",
pages = "5589--5610",
ISBN = "979-8-89176-189-6",
abstract = "We find that existing instruction-tuned models usually struggle to adhere to a query with multiple intentions, which impairs their performance when the completion of several tasks is demanded by a single command. Hence, this paper teaches models to respond to sequential instructions. Our first attempt stems from a task-driven perspective, manually creating additional intermediate tasks to train multilingual and visual question answering. Next, we develop an automatic and generic process that turns instructions in existing data into diverse and complex task chains. Models that underwent sequential instruction tuning follow a list of instructions better and deliver higher results in coding, maths, and open-ended generation. Moreover, we put forward a new benchmark named SeqEval to evaluate a model{'}s ability to follow all the instructions in a sequence, which further corroborates the benefits of our sequential instruction tuning method."
}
Markdown (Informal)
[Fine-Tuning Large Language Models with Sequential Instructions](https://preview.aclanthology.org/fix-sig-urls/2025.naacl-long.288/) (Hu et al., NAACL 2025)
ACL
- Hanxu Hu, Simon Yu, Pinzhen Chen, and Edoardo Ponti. 2025. Fine-Tuning Large Language Models with Sequential Instructions. In Proceedings of the 2025 Conference of the Nations of the Americas Chapter of the Association for Computational Linguistics: Human Language Technologies (Volume 1: Long Papers), pages 5589–5610, Albuquerque, New Mexico. Association for Computational Linguistics.