@inproceedings{xu-etal-2021-temporal,
title = "Temporal Knowledge Graph Completion using a Linear Temporal Regularizer and Multivector Embeddings",
author = "Xu, Chengjin and
Chen, Yung-Yu and
Nayyeri, Mojtaba and
Lehmann, Jens",
editor = "Toutanova, Kristina and
Rumshisky, Anna and
Zettlemoyer, Luke and
Hakkani-Tur, Dilek and
Beltagy, Iz and
Bethard, Steven and
Cotterell, Ryan and
Chakraborty, Tanmoy and
Zhou, Yichao",
booktitle = "Proceedings of the 2021 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies",
month = jun,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.naacl-main.202/",
doi = "10.18653/v1/2021.naacl-main.202",
pages = "2569--2578",
abstract = "Representation learning approaches for knowledge graphs have been mostly designed for static data. However, many knowledge graphs involve evolving data, e.g., the fact (The President of the United States is Barack Obama) is valid only from 2009 to 2017. This introduces important challenges for knowledge representation learning since the knowledge graphs change over time. In this paper, we present a novel time-aware knowledge graph embebdding approach, TeLM, which performs 4th-order tensor factorization of a Temporal knowledge graph using a Linear temporal regularizer and Multivector embeddings. Moreover, we investigate the effect of the temporal dataset`s time granularity on temporal knowledge graph completion. Experimental results demonstrate that our proposed models trained with the linear temporal regularizer achieve the state-of-the-art performances on link prediction over four well-established temporal knowledge graph completion benchmarks."
}
Markdown (Informal)
[Temporal Knowledge Graph Completion using a Linear Temporal Regularizer and Multivector Embeddings](https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.naacl-main.202/) (Xu et al., NAACL 2021)
ACL