@inproceedings{luo-etal-2025-hdiff,
title = "{HD}iff: Confidence-Guided Denoising Diffusion for Robust Hyper-relational Link Prediction",
author = "Luo, Xiangfeng and
Zheng, Ruoxin and
Huang, Jianqiang and
Yu, Hang",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/name-variant-enfa-fane/2025.findings-emnlp.391/",
doi = "10.18653/v1/2025.findings-emnlp.391",
pages = "7417--7434",
ISBN = "979-8-89176-335-7",
abstract = "Although Hyper-relational Knowledge Graphs (HKGs) can model complex facts better than traditional KGs, the Hyper-relational Knowledge Graph Completion (HKGC) is more sensitive to inherent noise, particularly struggling with two prevalent HKG-specific noise types: Intra-fact Inconsistency and Cross-fact Association Noise.To address these challenges, we propose **HDiff**, a novel conditional denoising diffusion framework for robust HKGC that learns to reverse structured noise corruption. HDiff integrates a **Consistency-Enhanced Global Encoder (CGE)** using contrastive learning to enforce intra-fact consistency and a **Context-Guided Denoiser (CGD)** performing iterative refinement. The CGD features dual conditioning leveraging CGE{'}s global context and local confidence estimates, effectively combatting both noise types. Extensive experiments demonstrate that HDiff substantially outperforms state-of-the-art HKGC methods, highlighting its effectiveness and significant robustness, particularly under noisy conditions."
}Markdown (Informal)
[HDiff: Confidence-Guided Denoising Diffusion for Robust Hyper-relational Link Prediction](https://preview.aclanthology.org/name-variant-enfa-fane/2025.findings-emnlp.391/) (Luo et al., Findings 2025)
ACL