@inproceedings{tufa-etal-2023-wordnet,
title = "A {W}ord{N}et View on Crosslingual Transformers",
author = "Tufa, Wondimagegnhue and
Beinborn, Lisa and
Vossen, Piek",
editor = "Rigau, German and
Bond, Francis and
Rademaker, Alexandre",
booktitle = "Proceedings of the 12th Global Wordnet Conference",
month = jan,
year = "2023",
address = "University of the Basque Country, Donostia - San Sebastian, Basque Country",
publisher = "Global Wordnet Association",
url = "https://aclanthology.org/2023.gwc-1.2",
pages = "14--24",
abstract = "WordNet is a database that represents relations between words and concepts as an abstraction of the contexts in which words are used. Contextualized language models represent words in contexts but leave the underlying concepts implicit. In this paper, we investigate how different layers of a pre-trained language model shape the abstract lexical relationship toward the actual contextual concept. Can we define the amount of contextualized concept forming needed given the abstracted representation of a word? Specifically, we consider samples of words with different polysemy profiles shared across three languages, assuming that words with a different polysemy profile require a different degree of concept shaping by context. We conduct probing experiments to investigate the impact of prior polysemy profiles on the representation in different layers. We analyze how contextualized models can approximate meaning through context and examine crosslingual interference effects.",
}
Markdown (Informal)
[A WordNet View on Crosslingual Transformers](https://aclanthology.org/2023.gwc-1.2) (Tufa et al., GWC 2023)
ACL
- Wondimagegnhue Tufa, Lisa Beinborn, and Piek Vossen. 2023. A WordNet View on Crosslingual Transformers. In Proceedings of the 12th Global Wordnet Conference, pages 14–24, University of the Basque Country, Donostia - San Sebastian, Basque Country. Global Wordnet Association.