@inproceedings{yap-etal-2020-adapting,
title = "Adapting {BERT} for Word Sense Disambiguation with Gloss Selection Objective and Example Sentences",
author = "Yap, Boon Peng and
Koh, Andrew and
Chng, Eng Siong",
editor = "Cohn, Trevor and
He, Yulan and
Liu, Yang",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2020",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/landing_page/2020.findings-emnlp.4/",
doi = "10.18653/v1/2020.findings-emnlp.4",
pages = "41--46",
abstract = "Domain adaptation or transfer learning using pre-trained language models such as BERT has proven to be an effective approach for many natural language processing tasks. In this work, we propose to formulate word sense disambiguation as a relevance ranking task, and fine-tune BERT on sequence-pair ranking task to select the most probable sense definition given a context sentence and a list of candidate sense definitions. We also introduce a data augmentation technique for WSD using existing example sentences from WordNet. Using the proposed training objective and data augmentation technique, our models are able to achieve state-of-the-art results on the English all-words benchmark datasets."
}
Markdown (Informal)
[Adapting BERT for Word Sense Disambiguation with Gloss Selection Objective and Example Sentences](https://preview.aclanthology.org/landing_page/2020.findings-emnlp.4/) (Yap et al., Findings 2020)
ACL