@inproceedings{liu-yu-2025-hsgm,
title = "{HSGM}: Hierarchical Segment-Graph Memory for Scalable Long-Text Semantics",
author = "Liu, Dong and
Yu, Yanxuan",
editor = "Frermann, Lea and
Stevenson, Mark",
booktitle = "Proceedings of the 14th Joint Conference on Lexical and Computational Semantics (*SEM 2025)",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-emnlp/2025.starsem-1.26/",
pages = "328--337",
ISBN = "979-8-89176-340-1",
abstract = "Semantic parsing of long documents remains challenging due to quadratic growth in pairwise composition and memory requirements. We introduce \textbf{Hierarchical Segment-Graph Memory (HSGM)}, a novel framework that decomposes an input of length $N$ into $M$ meaningful segments, constructs \textit{Local Semantic Graphs} on each segment, and extracts compact \textit{summary nodes} to form a \textit{Global Graph Memory}. HSGM supports \textit{incremental updates}{---}only newly arrived segments incur local graph construction and summary-node integration{---}while \textit{Hierarchical Query Processing} locates relevant segments via top-$K$ retrieval over summary nodes and then performs fine-grained reasoning within their local graphs.Theoretically, HSGM reduces worst-case complexity from $O(N^2)$ to $O\bigl(N\,k + (N/k)^2\bigr)$,with segment size $k \ll N$, and we derive Frobenius-norm bounds on the approximation error introduced by node summarization and sparsification thresholds. Empirically, on three benchmarks{---}long-document AMR parsing, segment-level semantic role labeling (OntoNotes), and legal event extraction{---}HSGM achieves \textit{2{--}4{\texttimes} inference speedup}, \textit{ $>$60{\%} reduction} in peak memory, and \textit{ $\ge95\%$} of baseline accuracy. Our approach unlocks scalable, accurate semantic modeling for ultra-long texts, enabling real-time and resource-constrained NLP applications."
}Markdown (Informal)
[HSGM: Hierarchical Segment-Graph Memory for Scalable Long-Text Semantics](https://preview.aclanthology.org/ingest-emnlp/2025.starsem-1.26/) (Liu & Yu, *SEM 2025)
ACL