@inproceedings{kaur-etal-2025-lets,
title = "{LETS}-{C}: Leveraging Text Embedding for Time Series Classification",
author = "Kaur, Rachneet and
Zeng, Zhen and
Balch, Tucker and
Veloso, Manuela",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.1557/",
pages = "32365--32399",
ISBN = "979-8-89176-251-0",
abstract = "Recent advancements in language modeling have shown promising results when applied to time series data. In particular, fine-tuning pre-trained large language models (LLMs) for time series classification tasks has achieved state-of-the-art (SOTA) performance on standard benchmarks. However, these LLM-based models have a significant drawback due to the large model size, with the number of trainable parameters in the millions. In this paper, we propose an alternative approach to leveraging the success of language modeling in the time series domain. Instead of fine-tuning LLMs, we utilize a text embedding model to embed time series and then pair the embeddings with a simple classification head composed of convolutional neural networks (CNN) and multilayer perceptron (MLP). We conducted extensive experiments on a well-established time series classification benchmark. We demonstrated LETS-C not only outperforms the current SOTA in classification accuracy but also offers a lightweight solution, using only 14.5{\%} of the trainable parameters on average compared to the SOTA model. Our findings suggest that leveraging text embedding models to encode time series data, combined with a simple yet effective classification head, offers a promising direction for achieving high-performance time series classification while maintaining a lightweight model architecture."
}
Markdown (Informal)
[LETS-C: Leveraging Text Embedding for Time Series Classification](https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.1557/) (Kaur et al., ACL 2025)
ACL
- Rachneet Kaur, Zhen Zeng, Tucker Balch, and Manuela Veloso. 2025. LETS-C: Leveraging Text Embedding for Time Series Classification. In Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers), pages 32365–32399, Vienna, Austria. Association for Computational Linguistics.