@inproceedings{rahimi-surdeanu-2022-transformer,
title = "Do Transformer Networks Improve the Discovery of Rules from Text?",
author = "Rahimi, Mahdi and
Surdeanu, Mihai",
editor = "Calzolari, Nicoletta and
B{\'e}chet, Fr{\'e}d{\'e}ric and
Blache, Philippe and
Choukri, Khalid and
Cieri, Christopher and
Declerck, Thierry and
Goggi, Sara and
Isahara, Hitoshi and
Maegaard, Bente and
Mariani, Joseph and
Mazo, H{\'e}l{\`e}ne and
Odijk, Jan and
Piperidis, Stelios",
booktitle = "Proceedings of the Thirteenth Language Resources and Evaluation Conference",
month = jun,
year = "2022",
address = "Marseille, France",
publisher = "European Language Resources Association",
url = "https://preview.aclanthology.org/Author-page-Marten-During-lu/2022.lrec-1.395/",
pages = "3706--3714",
abstract = "With their Discovery of Inference Rules from Text (DIRT) algorithm, Lin and Pantel (2001) made a seminal contribution to the field of rule acquisition from text, by adapting the distributional hypothesis of Harris (1954) to rules that model binary relations such as X treat Y. DIRT`s relevance is renewed in today`s neural era given the recent focus on interpretability in the field of natural language processing. We propose a novel take on the DIRT algorithm, where we implement the distributional hypothesis using the contextualized embeddings provided by BERT, a transformer-network-based language model (Vaswani et al. 2017; Devlin et al. 2018). In particular, we change the similarity measure between pairs of slots (i.e., the set of words matched by a rule) from the original formula that relies on lexical items to a formula computed using contextualized embeddings. We empirically demonstrate that this new similarity method yields a better implementation of the distributional hypothesis, and this, in turn, yields rules that outperform the original algorithm in the question answering-based evaluation proposed by Lin and Pantel (2001)."
}
Markdown (Informal)
[Do Transformer Networks Improve the Discovery of Rules from Text?](https://preview.aclanthology.org/Author-page-Marten-During-lu/2022.lrec-1.395/) (Rahimi & Surdeanu, LREC 2022)
ACL