@inproceedings{michael-etal-2020-asking,
title = "Asking without Telling: Exploring Latent Ontologies in Contextual Representations",
author = "Michael, Julian and
Botha, Jan A. and
Tenney, Ian",
editor = "Webber, Bonnie and
Cohn, Trevor and
He, Yulan and
Liu, Yang",
booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing (EMNLP)",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2020.emnlp-main.552/",
doi = "10.18653/v1/2020.emnlp-main.552",
pages = "6792--6812",
abstract = "The success of pretrained contextual encoders, such as ELMo and BERT, has brought a great deal of interest in what these models learn: do they, without explicit supervision, learn to encode meaningful notions of linguistic structure? If so, how is this structure encoded? To investigate this, we introduce latent subclass learning (LSL): a modification to classifier-based probing that induces a latent categorization (or ontology) of the probe`s inputs. Without access to fine-grained gold labels, LSL extracts emergent structure from input representations in an interpretable and quantifiable form. In experiments, we find strong evidence of familiar categories, such as a notion of personhood in ELMo, as well as novel ontological distinctions, such as a preference for fine-grained semantic roles on core arguments. Our results provide unique new evidence of emergent structure in pretrained encoders, including departures from existing annotations which are inaccessible to earlier methods."
}
Markdown (Informal)
[Asking without Telling: Exploring Latent Ontologies in Contextual Representations](https://preview.aclanthology.org/add-emnlp-2024-awards/2020.emnlp-main.552/) (Michael et al., EMNLP 2020)
ACL