@inproceedings{wang-etal-2025-path,
title = "Path-enhanced Pre-trained Language Model for Knowledge Graph Completion",
author = "Wang, Hao and
Song, Dandan and
Wu, Zhijing and
Tian, Yuhang and
Yang, Pan",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.243/",
doi = "10.18653/v1/2025.findings-emnlp.243",
pages = "4528--4540",
ISBN = "979-8-89176-335-7",
abstract = "Pre-trained language models (PLMs) have achieved remarkable knowledge graph completion(KGC) success. However, most methods derive KGC results mainly from triple-level and text-described learning, which lack the capability to capture long-term relational and structural information. Moreover, the absence of a visible reasoning process leads to poor interpretability and credibility of the completions. In this paper, we propose a path-enhanced pre-trained language model-based knowledge graph completion method (PEKGC), which employs multi-view generation to infer missing facts in triple-level and path-level simultaneously to address lacking long-term relational information and interpretability issues. Furthermore, a neighbor selector module is proposed to filter neighbor triples to provide the adjacent structural information. Besides, we propose a fact-level re-evaluation and a heuristic fusion ranking strategy for candidate answers to fuse multi-view predictions. Extensive experiments on the benchmark datasets demonstrate that our model significantly improves the performance of the KGC task."
}Markdown (Informal)
[Path-enhanced Pre-trained Language Model for Knowledge Graph Completion](https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.243/) (Wang et al., Findings 2025)
ACL