@inproceedings{zhu-shimodaira-2024-block,
title = "Block-Diagonal Orthogonal Relation and Matrix Entity for Knowledge Graph Embedding",
author = "Zhu, Yihua and
Shimodaira, Hidetoshi",
editor = "Al-Onaizan, Yaser and
Bansal, Mohit and
Chen, Yun-Nung",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2024",
month = nov,
year = "2024",
address = "Miami, Florida, USA",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2024.findings-emnlp.987/",
doi = "10.18653/v1/2024.findings-emnlp.987",
pages = "16956--16972",
abstract = "The primary aim of Knowledge Graph Embeddings (KGE) is to learn low-dimensional representations of entities and relations for predicting missing facts. While rotation-based methods like RotatE and QuatE perform well in KGE, they face two challenges: limited model flexibility requiring proportional increases in relation size with entity dimension, and difficulties in generalizing the model for higher-dimensional rotations. To address these issues, we introduce OrthogonalE, a novel KGE model employing matrices for entities and block-diagonal orthogonal matrices with Riemannian optimization for relations. This approach not only enhances the generality and flexibility of KGE models but also captures several relation patterns that rotation-based methods can identify. Experimental results indicate that our new KGE model, OrthogonalE, offers generality and flexibility, captures several relation patterns, and significantly outperforms state-of-the-art KGE models while substantially reducing the number of relation parameters."
}
Markdown (Informal)
[Block-Diagonal Orthogonal Relation and Matrix Entity for Knowledge Graph Embedding](https://preview.aclanthology.org/add-emnlp-2024-awards/2024.findings-emnlp.987/) (Zhu & Shimodaira, Findings 2024)
ACL