@inproceedings{trirat-lee-2025-monaq,
title = "{MONAQ}: Multi-Objective Neural Architecture Querying for Time-Series Analysis on Resource-Constrained Devices",
author = "Trirat, Patara and
Lee, Jae-Gil",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.918/",
doi = "10.18653/v1/2025.findings-emnlp.918",
pages = "16922--16950",
ISBN = "979-8-89176-335-7",
abstract = "The growing use of smartphones and IoT devices necessitates efficient time-series analysis on resource-constrained hardware, which is critical for sensing applications such as human activity recognition and air quality prediction. Recent efforts in hardware-aware neural architecture search (NAS) automate architecture discovery for specific platforms; however, none focus on general time-series analysis with edge deployment. Leveraging the problem-solving and reasoning capabilities of large language models (LLM), we propose ***MONAQ***, a novel framework that reformulates NAS into ***M***ulti-***O***bjective ***N***eural ***A***rchitecture ***Q***uerying tasks. *MONAQ* is equipped with *multimodal query generation* for processing multimodal time-series inputs and hardware constraints, alongside an *LLM agent-based multi-objective search* to achieve deployment-ready models via code generation. By integrating numerical data, time-series images, and textual descriptions, *MONAQ* improves an LLM{'}s understanding of time-series data. Experiments on fifteen datasets demonstrate that *MONAQ*-discovered models outperform both handcrafted models and NAS baselines while being more efficient."
}Markdown (Informal)
[MONAQ: Multi-Objective Neural Architecture Querying for Time-Series Analysis on Resource-Constrained Devices](https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.918/) (Trirat & Lee, Findings 2025)
ACL