@inproceedings{guo-etal-2025-nested,
title = "Nested-Refinement Metamorphosis: Reflective Evolution for Efficient Optimization of Networking Problems",
author = "Guo, Shuhan and
Yin, Nan and
Kwok, James and
Yao, Quanming",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2025",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/display_plenaries/2025.findings-acl.895/",
pages = "17398--17429",
ISBN = "979-8-89176-256-5",
abstract = "Large Language Models (LLMs) excel in network algorithm design but suffer from inefficient iterative coding and high computational costs. Drawing inspiration from butterfly metamorphosis{---}where structured developmental phases (Phase I: larval nutrient accumulation {\textrightarrow} Phase II: pupal transformation) enable adaptive evolution{---}we propose Nested-Refinement Metamorphosis (NeRM). Building on this principle, we introduce Metamorphosis on Prompts (MoP) to iteratively refine task descriptions (e.g. latency / bandwidth constraints) and Metamorphosis on Algorithms (MoA) to generate more effective solutions (e.g. appropriate network processing architecture). Their nested refinement ensures task-algorithm alignment, systematically improving both task descriptions and algorithmic solutions for more efficient algorithm design. To further enhance efficiency, we incorporate predictor-assisted code evaluation, mimicking natural selection by filtering out weak candidates early and reducing computational costs. Experimental results on TSP (routing), MKP (resource allocation), and CVRP (service-network coordination) demonstrate that NeRM consistently outperforms state-of-the-art approaches in both performance and efficiency."
}
Markdown (Informal)
[Nested-Refinement Metamorphosis: Reflective Evolution for Efficient Optimization of Networking Problems](https://preview.aclanthology.org/display_plenaries/2025.findings-acl.895/) (Guo et al., Findings 2025)
ACL