@inproceedings{chen-etal-2019-bidirectional,
title = "Bidirectional Attentive Memory Networks for Question Answering over Knowledge Bases",
author = "Chen, Yu and
Wu, Lingfei and
Zaki, Mohammed J.",
editor = "Burstein, Jill and
Doran, Christy and
Solorio, Thamar",
booktitle = "Proceedings of the 2019 Conference of the North {A}merican Chapter of the Association for Computational Linguistics: Human Language Technologies, Volume 1 (Long and Short Papers)",
month = jun,
year = "2019",
address = "Minneapolis, Minnesota",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/N19-1299/",
doi = "10.18653/v1/N19-1299",
pages = "2913--2923",
abstract = "When answering natural language questions over knowledge bases (KBs), different question components and KB aspects play different roles. However, most existing embedding-based methods for knowledge base question answering (KBQA) ignore the subtle inter-relationships between the question and the KB (e.g., entity types, relation paths and context). In this work, we propose to directly model the two-way flow of interactions between the questions and the KB via a novel Bidirectional Attentive Memory Network, called BAMnet. Requiring no external resources and only very few hand-crafted features, on the WebQuestions benchmark, our method significantly outperforms existing information-retrieval based methods, and remains competitive with (hand-crafted) semantic parsing based methods. Also, since we use attention mechanisms, our method offers better interpretability compared to other baselines."
}
Markdown (Informal)
[Bidirectional Attentive Memory Networks for Question Answering over Knowledge Bases](https://preview.aclanthology.org/jlcl-multiple-ingestion/N19-1299/) (Chen et al., NAACL 2019)
ACL