@inproceedings{kendrick-etal-2025-investigating,
title = "Investigating noun-noun compound relation representations in autoregressive large language models",
author = "Kendrick, Saffron and
Ormerod, Mark and
Wang, Hui and
Devereux, Barry",
editor = "Kuribayashi, Tatsuki and
Rambelli, Giulia and
Takmaz, Ece and
Wicke, Philipp and
Li, Jixing and
Oh, Byung-Doh",
booktitle = "Proceedings of the Workshop on Cognitive Modeling and Computational Linguistics",
month = may,
year = "2025",
address = "Albuquerque, New Mexico, USA",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2025.cmcl-1.30/",
pages = "253--263",
ISBN = "979-8-89176-227-5",
abstract = "This paper uses autoregressive large language models to explore at which points in a given input sentence the semantic information is decodable. Using representational similarity analysis and probing, the results show that autoregressive models are capable of extracting the semantic relation information from a dataset of noun-noun compounds. When considering the effect of processing the head and modifier nouns in context, the extracted representations show greater correlation after processing both constituent nouns in the same sentence. The linguistic properties of the head nouns may influence the ability of LLMs to extract relation information when the head and modifier words are processed separately. Probing suggests that Phi-1 and LLaMA-3.2 are exposed to relation information during training, as they are able to predict the relation vectors for compounds from separate word representations to a similar degree as using compositional compound representations. However, the difference in processing condition for GPT-2 and DeepSeek-R1 indicates that these models are actively processing the contextual semantic relation information of the compound."
}
Markdown (Informal)
[Investigating noun-noun compound relation representations in autoregressive large language models](https://preview.aclanthology.org/fix-sig-urls/2025.cmcl-1.30/) (Kendrick et al., CMCL 2025)
ACL