@inproceedings{zhu-etal-2025-turbocharging,
title = "Turbocharging Web Automation: The Impact of Compressed History States",
author = "Zhu, Xiyue and
Tang, Peng and
Liao, Haofu and
Appalaraju, Srikar",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2025",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/mtsummit-25-ingestion/2025.findings-acl.187/",
doi = "10.18653/v1/2025.findings-acl.187",
pages = "3644--3651",
ISBN = "979-8-89176-256-5",
abstract = "Language models have led to leap forward in web automation. The current web automation approaches take the current web state, history actions, and language instruction as inputs to predict the next action, overlooking the importance of history states. However, the highly verbose nature of web page states can result in long input sequence and sparse information, hampering the effective utilization of history states. In this paper, we propose a novel web history compressor approach to turbocharge web automation using history states. Our approach employs a history compressor module that distills the most task-relevant information from each history state into a fixed-length short representation, mitigating the challenges posed by the highly verbose history states. Experiments are conducted on the Mind2Web and WebLINX datasets to evaluate the effectiveness of our approach. Results show that our approach obtains 1.2-5.4{\%} absolute accuracy improvements compared to the baseline approach without history inputs."
}
Markdown (Informal)
[Turbocharging Web Automation: The Impact of Compressed History States](https://preview.aclanthology.org/mtsummit-25-ingestion/2025.findings-acl.187/) (Zhu et al., Findings 2025)
ACL