@inproceedings{gao-etal-2025-beyond,
title = "Beyond Seen Data: Improving {KBQA} Generalization Through Schema-Guided Logical Form Generation",
author = "Gao, Shengxiang and
Lau, Jey Han and
Qi, Jianzhong",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Proceedings of the 2025 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-emnlp/2025.emnlp-main.442/",
pages = "8764--8783",
ISBN = "979-8-89176-332-6",
abstract = "Knowledge base question answering (KBQA) aims to answer user questions in natural language using rich human knowledge stored in large KBs. As current KBQA methods struggle with unseen knowledge base elements and their novel compositions at test time, we introduce SG-KBQA {---} a novel model that injects schema contexts into entity retrieval and logical form generation to tackle this issue. It exploits information about the semantics and structure of the knowledge base provided by schema contexts to enhance generalizability. We show that achieves strong generalizability, outperforming state-of-the-art models on two commonly used benchmark datasets across a variety of test settings. Our source code is available at \url{https://github.com/gaosx2000/SG_KBQA}."
}Markdown (Informal)
[Beyond Seen Data: Improving KBQA Generalization Through Schema-Guided Logical Form Generation](https://preview.aclanthology.org/ingest-emnlp/2025.emnlp-main.442/) (Gao et al., EMNLP 2025)
ACL