@inproceedings{yin-etal-2016-simple,
title = "Simple Question Answering by Attentive Convolutional Neural Network",
author = {Yin, Wenpeng and
Yu, Mo and
Xiang, Bing and
Zhou, Bowen and
Sch{\"u}tze, Hinrich},
editor = "Matsumoto, Yuji and
Prasad, Rashmi",
booktitle = "Proceedings of {COLING} 2016, the 26th International Conference on Computational Linguistics: Technical Papers",
month = dec,
year = "2016",
address = "Osaka, Japan",
publisher = "The COLING 2016 Organizing Committee",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/C16-1164/",
pages = "1746--1756",
abstract = "This work focuses on answering single-relation factoid questions over Freebase. Each question can acquire the answer from a single fact of form (subject, predicate, object) in Freebase. This task, simple question answering (SimpleQA), can be addressed via a two-step pipeline: entity linking and fact selection. In fact selection, we match the subject entity in a fact candidate with the entity mention in the question by a character-level convolutional neural network (char-CNN), and match the predicate in that fact with the question by a word-level CNN (word-CNN). This work makes two main contributions. (i) A simple and effective entity linker over Freebase is proposed. Our entity linker outperforms the state-of-the-art entity linker over SimpleQA task. (ii) A novel attentive maxpooling is stacked over word-CNN, so that the predicate representation can be matched with the predicate-focused question representation more effectively. Experiments show that our system sets new state-of-the-art in this task."
}
Markdown (Informal)
[Simple Question Answering by Attentive Convolutional Neural Network](https://preview.aclanthology.org/jlcl-multiple-ingestion/C16-1164/) (Yin et al., COLING 2016)
ACL