@inproceedings{sun-etal-2025-introducing,
title = "Introducing Graph Context into Language Models through Parameter-Efficient Fine-Tuning for Lexical Relation Mining",
author = "Sun, Jingwen and
Tian, Zhiyi and
He, Yu and
Sun, Jingwei and
Sun, Guangzhong",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.511/",
pages = "10359--10374",
ISBN = "979-8-89176-251-0",
abstract = "Lexical relation refers to the way words are related within a language. Prior work has demonstrated that pretrained language models (PLMs) can effectively mine lexical relations between word pairs. However, they overlook the potential of graph structures composed of lexical relations, which can be integrated with the semantic knowledge of PLMs. In this work, we propose a parameter-efficient fine-tuning method through graph context, which integrates graph features and semantic representations for lexical relation classification (LRC) and lexical entailment (LE) tasks. Our experiments show that graph features can help PLMs better understand more complex lexical relations, establishing a new state-of-the-art for LRC and LE. Finally, we perform an error analysis, identifying the bottlenecks of language models in lexical relation mining tasks and providing insights for future improvements."
}
Markdown (Informal)
[Introducing Graph Context into Language Models through Parameter-Efficient Fine-Tuning for Lexical Relation Mining](https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.511/) (Sun et al., ACL 2025)
ACL