@inproceedings{sen-etal-2023-knowledge,
title = "Knowledge Graph-augmented Language Models for Complex Question Answering",
author = "Sen, Priyanka and
Mavadia, Sandeep and
Saffari, Amir",
editor = "Dalvi Mishra, Bhavana and
Durrett, Greg and
Jansen, Peter and
Neves Ribeiro, Danilo and
Wei, Jason",
booktitle = "Proceedings of the 1st Workshop on Natural Language Reasoning and Structured Explanations (NLRSE)",
month = jun,
year = "2023",
address = "Toronto, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.nlrse-1.1/",
doi = "10.18653/v1/2023.nlrse-1.1",
pages = "1--8",
abstract = "Large language models have shown impressive abilities to reason over input text, however, they are prone to hallucinations. On the other hand, end-to-end knowledge graph question answering (KGQA) models output responses grounded in facts, but they still struggle with complex reasoning, such as comparison or ordinal questions. In this paper, we propose a new method for complex question answering where we combine a knowledge graph retriever based on an end-to-end KGQA model with a language model that reasons over the retrieved facts to return an answer. We observe that augmenting language model prompts with retrieved KG facts improves performance over using a language model alone by an average of 83{\%}. In particular, we see improvements on complex questions requiring count, intersection, or multi-hop reasoning operations."
}
Markdown (Informal)
[Knowledge Graph-augmented Language Models for Complex Question Answering](https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.nlrse-1.1/) (Sen et al., NLRSE 2023)
ACL