@inproceedings{lee-etal-2026-mapcoder,
title = "{M}ap{C}oder-Lite: Distilling Multi-Agent Coding into a Single Small {LLM}",
author = "Lee, Woongkyu and
Cho, Junhee and
Choi, Jungwook",
editor = "Demberg, Vera and
Inui, Kentaro and
Marquez, Llu{\'i}s",
booktitle = "Findings of the {A}ssociation for {C}omputational {L}inguistics: {EACL} 2026",
month = mar,
year = "2026",
address = "Rabat, Morocco",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-eacl/2026.findings-eacl.346/",
pages = "6569--6596",
ISBN = "979-8-89176-386-9",
abstract = "Large language models (LLMs) have advanced code generation from single-function tasks to competitive-programming problems, but existing multi-agent solutions either rely on costly large-scale ($> 30 B$) models or collapse when downsized to small open-source models. We present MapCoder-Lite, a framework for distilling the complex reasoning of large, multi-agent coding systems into a single 7B model. Our contribution is a novel, three-pillar methodology that synergistically generates, refines, and encodes multi-agent knowledge: (i) pass-based trajectory distillation from strong LLMs fixes format fragility in retrieval and reduces failures in debugging, (ii) supervisor-guided correction with global feedback strengthens planning and coding agents, and (iii) agent-wise LoRA fine-tuning delivers memory-efficient specialisation.Comprehensive evaluation on xCodeEval, APPS, and CodeContests shows that MapCoder-Lite more than doubles xCodeEval accuracy (13.2{\%} {\textrightarrow} 28.3{\%}), eliminates all format failures, while reducing GPU memory and token-generation time by $4\times$ compared to a 32B model. It also achieves over 10{\%} gains on simpler coding benchmarks, demonstrating broad improvements beyond competitive programming. These results demonstrate that careful agent-wise fine-tuning unleashes high-quality multi-agent coding on a small language model. Our code is publicly available at https://github.com/aiha-lab/MapCoder-Lite."
}Markdown (Informal)
[MapCoder-Lite: Distilling Multi-Agent Coding into a Single Small LLM](https://preview.aclanthology.org/ingest-eacl/2026.findings-eacl.346/) (Lee et al., Findings 2026)
ACL