@inproceedings{yang-etal-2023-end,
title = "End-to-end Case-Based Reasoning for Commonsense Knowledge Base Completion",
author = "Yang, Zonglin and
Du, Xinya and
Cambria, Erik and
Cardie, Claire",
editor = "Vlachos, Andreas and
Augenstein, Isabelle",
booktitle = "Proceedings of the 17th Conference of the European Chapter of the Association for Computational Linguistics",
month = may,
year = "2023",
address = "Dubrovnik, Croatia",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2023.eacl-main.255/",
doi = "10.18653/v1/2023.eacl-main.255",
pages = "3509--3522",
abstract = "Pretrained language models have been shown to store knowledge in their parameters and have achieved reasonable performance in commonsense knowledge base completion (CKBC) tasks. However, CKBC is knowledge-intensive and it is reported that pretrained language models' performance in knowledge-intensive tasks are limited because of their incapability of accessing and manipulating knowledge. As a result, we hypothesize that providing retrieved passages that contain relevant knowledge as additional input to the CKBC task will improve performance. In particular, we draw insights from Case-Based Reasoning (CBR) {--} which aims to solve a new problem by reasoning with retrieved relevant cases, and investigate the direct application of it to CKBC. On two benchmark datasets, we demonstrate through automatic and human evaluations that our End-to-end Case-Based Reasoning Framework (ECBRF) generates more valid, informative, and novel knowledge than the state-of-the-art COMET model for CKBC in both the fully supervised and few-shot settings. We provide insights on why previous retrieval-based methods only achieve merely the same performance with COMET. From the perspective of CBR, our framework addresses a fundamental question on whether CBR methodology can be utilized to improve deep learning models."
}
Markdown (Informal)
[End-to-end Case-Based Reasoning for Commonsense Knowledge Base Completion](https://preview.aclanthology.org/add-emnlp-2024-awards/2023.eacl-main.255/) (Yang et al., EACL 2023)
ACL