@inproceedings{xu-etal-2019-enhancing,
title = "Enhancing Key-Value Memory Neural Networks for Knowledge Based Question Answering",
author = "Xu, Kun and
Lai, Yuxuan and
Feng, Yansong and
Wang, Zhiguo",
editor = "Burstein, Jill and
Doran, Christy and
Solorio, Thamar",
booktitle = "Proceedings of the 2019 Conference of the North {A}merican Chapter of the Association for Computational Linguistics: Human Language Technologies, Volume 1 (Long and Short Papers)",
month = jun,
year = "2019",
address = "Minneapolis, Minnesota",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest_wac_2008/N19-1301/",
doi = "10.18653/v1/N19-1301",
pages = "2937--2947",
abstract = "Traditional Key-value Memory Neural Networks (KV-MemNNs) are proved to be effective to support shallow reasoning over a collection of documents in domain specific Question Answering or Reading Comprehension tasks. However, extending KV-MemNNs to Knowledge Based Question Answering (KB-QA) is not trivia, which should properly decompose a complex question into a sequence of queries against the memory, and update the query representations to support multi-hop reasoning over the memory. In this paper, we propose a novel mechanism to enable conventional KV-MemNNs models to perform interpretable reasoning for complex questions. To achieve this, we design a new query updating strategy to mask previously-addressed memory information from the query representations, and introduce a novel STOP strategy to avoid invalid or repeated memory reading without strong annotation signals. This also enables KV-MemNNs to produce structured queries and work in a semantic parsing fashion. Experimental results on benchmark datasets show that our solution, trained with question-answer pairs only, can provide conventional KV-MemNNs models with better reasoning abilities on complex questions, and achieve state-of-art performances."
}
Markdown (Informal)
[Enhancing Key-Value Memory Neural Networks for Knowledge Based Question Answering](https://preview.aclanthology.org/ingest_wac_2008/N19-1301/) (Xu et al., NAACL 2019)
ACL