@inproceedings{yao-etal-2020-zero,
title = "Zero-shot Entity Linking with Efficient Long Range Sequence Modeling",
author = "Yao, Zonghai and
Cao, Liangliang and
Pan, Huapu",
editor = "Cohn, Trevor and
He, Yulan and
Liu, Yang",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2020",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2020.findings-emnlp.228/",
doi = "10.18653/v1/2020.findings-emnlp.228",
pages = "2517--2522",
abstract = "This paper considers the problem of zero-shot entity linking, in which a link in the test time may not present in training. Following the prevailing BERT-based research efforts, we find a simple yet effective way is to expand the long-range sequence modeling. Unlike many previous methods, our method does not require expensive pre-training of BERT with long position embeddings. Instead, we propose an efficient position embeddings initialization method called Embedding-repeat, which initializes larger position embeddings based on BERT-Base. On the zero-shot entity linking dataset, our method improves the STOA from 76.06{\%} to 79.08{\%}, and for its long data, the corresponding improvement is from 74.57{\%} to 82.14{\%}. Our experiments suggest the effectiveness of long-range sequence modeling without retraining the BERT model."
}
Markdown (Informal)
[Zero-shot Entity Linking with Efficient Long Range Sequence Modeling](https://preview.aclanthology.org/fix-sig-urls/2020.findings-emnlp.228/) (Yao et al., Findings 2020)
ACL