@inproceedings{yoshida-etal-2025-investigating,
title = "Investigating Psychometric Predictive Power of Syntactic Attention",
author = "Yoshida, Ryo and
Sugimoto, Yushi and
Oseki, Yohei",
editor = "Boleda, Gemma and
Roth, Michael",
booktitle = "Proceedings of the 29th Conference on Computational Natural Language Learning",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/landing_page/2025.conll-1.27/",
doi = "10.18653/v1/2025.conll-1.27",
pages = "407--418",
ISBN = "979-8-89176-271-8",
abstract = "In computational psycholinguistics, Merkx and Frank (2021) demonstrated that surprisal values from Transformers exhibit a closer fit to measures of human reading effort than those from Recurrent Neural Networks (RNNs), suggesting that Transformers' attention mechanisms may capture cue-based retrieval-like operations in human sentence processing. Meanwhile, explicit integration of syntactic structures has been shown to improve language models' ability to model human sentence processing{---}for example, Hale et al. (2018) demonstrated that Recurrent Neural Network Grammars (RNNGs), which integrate RNNs with explicit syntactic structures, account for human brain activities that vanilla RNNs cannot capture. In this paper, we investigate the psychometric predictive power of Composition Attention Grammars (CAGs), which integrate Transformers with explicit syntactic structures, to test whether they provide a better fit to human reading times than both vanilla Transformers and RNNGs. We hypothesized that CAGs' syntactic attention mechanisms capture cue-based retrieval-like operations over syntactic memory representations{---}operations that may be involved in human sentence processing. The results of our strictly controlled experiments demonstrate that CAGs outperformed vanilla Transformers and RNNGs, suggesting that the syntactic attention mechanisms of CAGs may serve as a mechanistic implementation of cue-based retrieval from syntactic memory."
}
Markdown (Informal)
[Investigating Psychometric Predictive Power of Syntactic Attention](https://preview.aclanthology.org/landing_page/2025.conll-1.27/) (Yoshida et al., CoNLL 2025)
ACL