@inproceedings{renner-etal-2023-wordnet,
title = "{W}ord{N}et Is All You Need: A Surprisingly Effective Unsupervised Method for Graded Lexical Entailment",
author = "Renner, Joseph and
Denis, Pascal and
Gilleron, R{\'e}mi",
editor = "Bouamor, Houda and
Pino, Juan and
Bali, Kalika",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2023",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2023.findings-emnlp.615/",
doi = "10.18653/v1/2023.findings-emnlp.615",
pages = "9176--9182",
abstract = "We propose a simple unsupervised approach which exclusively relies on WordNet (Miller,1995) for predicting graded lexical entailment (GLE) in English. Inspired by the seminal work of Resnik (1995), our method models GLE as the sum of two information-theoretic scores: a symmetric semantic similarity score and an asymmetric specificity loss score, both exploiting the hierarchical synset structure of WordNet. Our approach also includes a simple disambiguation mechanism to handle polysemy in a given word pair. Despite its simplicity, our method achieves performance above the state of the art (Spearman $\rho$ = 0.75) on HyperLex (Vulic et al., 2017), the largest GLE dataset, outperforming all previous methods, including specialized word embeddings approaches that use WordNet as weak supervision."
}
Markdown (Informal)
[WordNet Is All You Need: A Surprisingly Effective Unsupervised Method for Graded Lexical Entailment](https://preview.aclanthology.org/fix-sig-urls/2023.findings-emnlp.615/) (Renner et al., Findings 2023)
ACL