@inproceedings{lin-ng-2022-bert,
title = "Does {BERT} Know that the {IS}-A Relation Is Transitive?",
author = "Lin, Ruixi and
Ng, Hwee Tou",
editor = "Muresan, Smaranda and
Nakov, Preslav and
Villavicencio, Aline",
booktitle = "Proceedings of the 60th Annual Meeting of the Association for Computational Linguistics (Volume 2: Short Papers)",
month = may,
year = "2022",
address = "Dublin, Ireland",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.acl-short.11/",
doi = "10.18653/v1/2022.acl-short.11",
pages = "94--99",
abstract = "The success of a natural language processing (NLP) system on a task does not amount to fully understanding the complexity of the task, typified by many deep learning models. One such question is: can a black-box model make logically consistent predictions for transitive relations? Recent studies suggest that pre-trained BERT can capture lexico-semantic clues from words in the context. However, to what extent BERT captures the transitive nature of some lexical relations is unclear. From a probing perspective, we examine WordNet word senses and the IS-A relation, which is a transitive relation. That is, for senses A, B, and C, A is-a B and B is-a C entail A is-a C. We aim to quantify how much BERT agrees with the transitive property of IS-A relations, via a minimalist probing setting. Our investigation reveals that BERT`s predictions do not fully obey the transitivity property of the IS-A relation."
}
Markdown (Informal)
[Does BERT Know that the IS-A Relation Is Transitive?](https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.acl-short.11/) (Lin & Ng, ACL 2022)
ACL
- Ruixi Lin and Hwee Tou Ng. 2022. Does BERT Know that the IS-A Relation Is Transitive?. In Proceedings of the 60th Annual Meeting of the Association for Computational Linguistics (Volume 2: Short Papers), pages 94–99, Dublin, Ireland. Association for Computational Linguistics.