@inproceedings{bevilacqua-navigli-2019-quasi,
title = "Quasi Bidirectional Encoder Representations from Transformers for Word Sense Disambiguation",
author = "Bevilacqua, Michele and
Navigli, Roberto",
editor = "Mitkov, Ruslan and
Angelova, Galia",
booktitle = "Proceedings of the International Conference on Recent Advances in Natural Language Processing (RANLP 2019)",
month = sep,
year = "2019",
address = "Varna, Bulgaria",
publisher = "INCOMA Ltd.",
url = "https://preview.aclanthology.org/fix-sig-urls/R19-1015/",
doi = "10.26615/978-954-452-056-4_015",
pages = "122--131",
abstract = "While contextualized embeddings have produced performance breakthroughs in many Natural Language Processing (NLP) tasks, Word Sense Disambiguation (WSD) has not benefited from them yet. In this paper, we introduce QBERT, a Transformer-based architecture for contextualized embeddings which makes use of a co-attentive layer to produce more deeply bidirectional representations, better-fitting for the WSD task. As a result, we are able to train a WSD system that beats the state of the art on the concatenation of all evaluation datasets by over 3 points, also outperforming a comparable model using ELMo."
}
Markdown (Informal)
[Quasi Bidirectional Encoder Representations from Transformers for Word Sense Disambiguation](https://preview.aclanthology.org/fix-sig-urls/R19-1015/) (Bevilacqua & Navigli, RANLP 2019)
ACL