@inproceedings{sainz-rigau-2021-ask2transformers,
title = "{A}sk2{T}ransformers: Zero-Shot Domain labelling with Pretrained Language Models",
author = "Sainz, Oscar and
Rigau, German",
editor = "Vossen, Piek and
Fellbaum, Christiane",
booktitle = "Proceedings of the 11th Global Wordnet Conference",
month = jan,
year = "2021",
address = "University of South Africa (UNISA)",
publisher = "Global Wordnet Association",
url = "https://preview.aclanthology.org/fix-sig-urls/2021.gwc-1.6/",
pages = "44--52",
abstract = "In this paper we present a system that exploits different pre-trained Language Models for assigning domain labels to WordNet synsets without any kind of supervision. Furthermore, the system is not restricted to use a particular set of domain labels. We exploit the knowledge encoded within different off-the-shelf pre-trained Language Models and task formulations to infer the domain label of a particular WordNet definition. The proposed zero-shot system achieves a new state-of-the-art on the English dataset used in the evaluation."
}
Markdown (Informal)
[Ask2Transformers: Zero-Shot Domain labelling with Pretrained Language Models](https://preview.aclanthology.org/fix-sig-urls/2021.gwc-1.6/) (Sainz & Rigau, GWC 2021)
ACL