@inproceedings{tang-etal-2023-multilingual,
title = "Multilingual Knowledge Graph Completion with Language-Sensitive Multi-Graph Attention",
author = "Tang, Rongchuan and
Zhao, Yang and
Zong, Chengqing and
Zhou, Yu",
editor = "Rogers, Anna and
Boyd-Graber, Jordan and
Okazaki, Naoaki",
booktitle = "Proceedings of the 61st Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2023",
address = "Toronto, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2023.acl-long.586/",
doi = "10.18653/v1/2023.acl-long.586",
pages = "10508--10519",
abstract = "Multilingual Knowledge Graph Completion (KGC) aims to predict missing links with multilingual knowledge graphs. However, existing approaches suffer from two main drawbacks: (a) alignment dependency: the multilingual KGC is always realized with joint entity or relation alignment, which introduces additional alignment models and increases the complexity of the whole framework; (b) training inefficiency: the trained model will only be used for the completion of one target KG, although the data from all KGs are used simultaneously. To address these drawbacks, we propose a novel multilingual KGC framework with language-sensitive multi-graph attention such that the missing links on all given KGs can be inferred by a universal knowledge completion model. Specifically, we first build a relational graph neural network by sharing the embeddings of aligned nodes to transfer language-independent knowledge. Meanwhile, a language-sensitive multi-graph attention (LSMGA) is proposed to deal with the information inconsistency among different KGs. Experimental results show that our model achieves significant improvements on the DBP-5L and E-PKG datasets."
}
Markdown (Informal)
[Multilingual Knowledge Graph Completion with Language-Sensitive Multi-Graph Attention](https://preview.aclanthology.org/fix-sig-urls/2023.acl-long.586/) (Tang et al., ACL 2023)
ACL