@inproceedings{liu-etal-2025-picture,
title = "A Picture is Worth A Thousand Numbers: Enabling {LLM}s Reason about Time Series via Visualization",
author = "Liu, Haoxin and
Liu, Chenghao and
Prakash, B. Aditya",
editor = "Chiruzzo, Luis and
Ritter, Alan and
Wang, Lu",
booktitle = "Proceedings of the 2025 Conference of the Nations of the Americas Chapter of the Association for Computational Linguistics: Human Language Technologies (Volume 1: Long Papers)",
month = apr,
year = "2025",
address = "Albuquerque, New Mexico",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2025.naacl-long.383/",
pages = "7486--7518",
ISBN = "979-8-89176-189-6",
abstract = "Large language models (LLMs), with demonstrated reasoning abilities across multiple domains, have been largely underexplored fortime-series reasoning (TsR), which is ubiquitous in the real world. In this work, wepropose TimerBed, the first comprehensivetestbed for evaluating LLMs' TsR performance.Specifically, TimerBed includes stratified reasoning patterns with real-world tasks, diversecombinations of LLMs and reasoning strategies, and various supervised models as comparison anchors. We perform extensive experiments with TimerBed, test multiple current beliefs, and observe the initial failuresof LLMs in TsR, as evidenced by the ineffectiveness of zero shot (ZST) and performancedegradation of few shot in-context learning(ICL). Further, we identify one possible rootcause: the numerical modeling of data. Toaddress this, we propose a prompt-based solution VL-Time, with visualization-modeled dataand language-guided reasoning. Experimental results demonstrate that VL-Time enablesmultimodal LLMs to be non-trivial ZST andpowerful ICL reasoners for time series, achieving about 140{\%} average performance improvement and 99{\%} average token costs reduction.TimerBed and VL-Time are available at https://github.com/AdityaLab/DeepTime/."
}
Markdown (Informal)
[A Picture is Worth A Thousand Numbers: Enabling LLMs Reason about Time Series via Visualization](https://preview.aclanthology.org/fix-sig-urls/2025.naacl-long.383/) (Liu et al., NAACL 2025)
ACL