@inproceedings{ren-zhu-2022-specializing,
title = "Specializing Pre-trained Language Models for Better Relational Reasoning via Network Pruning",
author = "Ren, Siyu and
Zhu, Kenny",
editor = "Carpuat, Marine and
de Marneffe, Marie-Catherine and
Meza Ruiz, Ivan Vladimir",
booktitle = "Findings of the Association for Computational Linguistics: NAACL 2022",
month = jul,
year = "2022",
address = "Seattle, United States",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2022.findings-naacl.169/",
doi = "10.18653/v1/2022.findings-naacl.169",
pages = "2195--2207",
abstract = "Pretrained masked language models (PLMs) were shown to be inheriting a considerable amount of relational knowledge from the source corpora. In this paper, we present an in-depth and comprehensive study concerning specializing PLMs into relational models from the perspective of network pruning. We show that it is possible to find subnetworks capable of representing grounded commonsense relations at non-trivial sparsity while being more generalizable than original PLMs in scenarios requiring knowledge of single or multiple commonsense relations."
}
Markdown (Informal)
[Specializing Pre-trained Language Models for Better Relational Reasoning via Network Pruning](https://preview.aclanthology.org/fix-sig-urls/2022.findings-naacl.169/) (Ren & Zhu, Findings 2022)
ACL