@inproceedings{kozachenko-etal-2025-role,
title = "On the Role of Linguistic Features in {LLM} Performance on Theory of Mind Tasks",
author = "Kozachenko, Ekaterina and
Guiomar, Gon{\c{c}}alo and
Stanczak, Karolina",
editor = "Evang, Kilian and
Kallmeyer, Laura and
Pogodalla, Sylvain",
booktitle = "Proceedings of the 16th International Conference on Computational Semantics",
month = sep,
year = "2025",
address = {D{\"u}sseldorf, Germany},
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/iwcs-25-ingestion/2025.iwcs-1.27/",
pages = "308--316",
ISBN = "979-8-89176-316-6",
abstract = "Theory of Mind presents a fundamental challenge for Large Language Models (LLMs), revealing gaps in processing intensional contexts where beliefs diverge from reality. We analyze six LLMs across 2,860 annotated stories, measuring factors such as idea density, mental state verb distribution, and perspectival complexity markers. Notably, and in contrast to humans, we find that LLMs show positive correlations with linguistic complexity. In fact, they achieve high accuracy (74-95{\%}) on high complexity stories with explicit mental state scaffolding, yet struggle with low complexity tasks requiring implicit reasoning (51-77{\%}). Furthermore, we find that linguistic markers systematically influence performance, with contrast markers decreasing accuracy by 5-9{\%} and knowledge verbs increasing it by 4-10{\%}. This inverse relationship between linguistic complexity and performance, contrary to human cognition, may suggest that current LLMs rely on surface-level linguistic cues rather than genuine mental state reasoning."
}
Markdown (Informal)
[On the Role of Linguistic Features in LLM Performance on Theory of Mind Tasks](https://preview.aclanthology.org/iwcs-25-ingestion/2025.iwcs-1.27/) (Kozachenko et al., IWCS 2025)
ACL