@inproceedings{paul-frank-2020-social,
title = "Social Commonsense Reasoning with Multi-Head Knowledge Attention",
author = "Paul, Debjit and
Frank, Anette",
editor = "Cohn, Trevor and
He, Yulan and
Liu, Yang",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2020",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.findings-emnlp.267/",
doi = "10.18653/v1/2020.findings-emnlp.267",
pages = "2969--2980",
abstract = "Social Commonsense Reasoning requires understanding of text, knowledge about social events and their pragmatic implications, as well as commonsense reasoning skills. In this work we propose a novel multi-head knowledge attention model that encodes semi-structured commonsense inference rules and learns to incorporate them in a transformer-based reasoning cell. We assess the model`s performance on two tasks that require different reasoning skills: Abductive Natural Language Inference and Counterfactual Invariance Prediction as a new task. We show that our proposed model improves performance over strong state-of-the-art models (i.e., RoBERTa) across both reasoning tasks. Notably we are, to the best of our knowledge, the first to demonstrate that a model that learns to perform counterfactual reasoning helps predicting the best explanation in an abductive reasoning task. We validate the robustness of the model`s reasoning capabilities by perturbing the knowledge and provide qualitative analysis on the model`s knowledge incorporation capabilities."
}
Markdown (Informal)
[Social Commonsense Reasoning with Multi-Head Knowledge Attention](https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.findings-emnlp.267/) (Paul & Frank, Findings 2020)
ACL