@inproceedings{wu-yin-2025-meta,
title = "Meta-Semantics Augmented Few-Shot Relational Learning",
author = "Wu, Han and
Yin, Jie",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Proceedings of the 2025 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/name-variant-enfa-fane/2025.emnlp-main.1569/",
doi = "10.18653/v1/2025.emnlp-main.1569",
pages = "30811--30823",
ISBN = "979-8-89176-332-6",
abstract = "Few-shot relational learning on knowledge graph (KGs) aims to perform reasoning over relations with only a few training examples. While current methods have focused primarily on leveraging specific relational information, rich semantics inherent in KGs have been largely overlooked. To bridge this gap, we propose PromptMeta, a novel prompted meta-learning framework that seamlessly integrates meta-semantics with relational information for few-shot relational learning. PromptMeta introduces two core innovations: (1) a Meta-Semantic Prompt (MSP) pool that learns and consolidates high-level meta-semantics shared across tasks, enabling effective knowledge transfer and adaptation to newly emerging relations; and (2) a learnable fusion mechanism that dynamically combines meta-semantics with task-specific relational information tailored to different few-shot tasks. Both components are optimized jointly with model parameters within a meta-learning framework. Extensive experiments and analyses on two real-world KG benchmarks validate the effectiveness of PromptMeta in adapting to new relations with limited supervision."
}Markdown (Informal)
[Meta-Semantics Augmented Few-Shot Relational Learning](https://preview.aclanthology.org/name-variant-enfa-fane/2025.emnlp-main.1569/) (Wu & Yin, EMNLP 2025)
ACL