@inproceedings{kodama-oda-2026-comprehensive,
title = "Comprehensive Study of Bilingual and Multi-category Instruction Pre-training",
author = "Kodama, Takashi and
Oda, Yusuke",
editor = "Demberg, Vera and
Inui, Kentaro and
Marquez, Llu{\'i}s",
booktitle = "Findings of the {A}ssociation for {C}omputational {L}inguistics: {EACL} 2026",
month = mar,
year = "2026",
address = "Rabat, Morocco",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-eacl/2026.findings-eacl.68/",
pages = "1323--1340",
ISBN = "979-8-89176-386-9",
abstract = "Instruction pre-training (IPT) has recently emerged as an effective intermediate stage between vanilla pre-training and post-training for large language models (LLMs). However, the optimal design of IPT corpora{---}such as the balance between raw and instruction-response data, languages, and task categories{---}remains unclear. We systematically study IPT corpus composition using a bilingual (English and Japanese) and multi-category (coding, general, math, and reasoning) instruction-response dataset. Through extensive IPT experiments across four base models, including both English-centric and bilingual LLMs, we find that: (1) more instruction-response data generally enhances model performance, particularly for models with large VPT budgets; (2) Japanese instruction data can improve English performance through cross-lingual transfer; and (3) the effectiveness of post-training varies across categories: coding performance is largely determined during IPT, while math and reasoning continue to improve during post-training."
}Markdown (Informal)
[Comprehensive Study of Bilingual and Multi-category Instruction Pre-training](https://preview.aclanthology.org/ingest-eacl/2026.findings-eacl.68/) (Kodama & Oda, Findings 2026)
ACL