@inproceedings{fu-etal-2020-design,
title = "Design Challenges in Low-resource Cross-lingual Entity Linking",
author = "Fu, Xingyu and
Shi, Weijia and
Yu, Xiaodong and
Zhao, Zian and
Roth, Dan",
editor = "Webber, Bonnie and
Cohn, Trevor and
He, Yulan and
Liu, Yang",
booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing (EMNLP)",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2020.emnlp-main.521/",
doi = "10.18653/v1/2020.emnlp-main.521",
pages = "6418--6432",
abstract = "Cross-lingual Entity Linking (XEL), the problem of grounding mentions of entities in a foreign language text into an English knowledge base such as Wikipedia, has seen a lot of research in recent years, with a range of promising techniques. However, current techniques do not rise to the challenges introduced by text in low-resource languages (LRL) and, surprisingly, fail to generalize to text not taken from Wikipedia, on which they are usually trained. This paper provides a thorough analysis of low-resource XEL techniques, focusing on the key step of identifying candidate English Wikipedia titles that correspond to a given foreign language mention. Our analysis indicates that current methods are limited by their reliance on Wikipedia`s interlanguage links and thus suffer when the foreign language`s Wikipedia is small. We conclude that the LRL setting requires the use of outside-Wikipedia cross-lingual resources and present a simple yet effective zero-shot XEL system, QuEL, that utilizes search engines query logs. With experiments on 25 languages, QuEL shows an average increase of 25{\%} in gold candidate recall and of 13{\%} in end-to-end linking accuracy over state-of-the-art baselines."
}
Markdown (Informal)
[Design Challenges in Low-resource Cross-lingual Entity Linking](https://preview.aclanthology.org/add-emnlp-2024-awards/2020.emnlp-main.521/) (Fu et al., EMNLP 2020)
ACL