@inproceedings{zhang-etal-2018-subword,
title = "Subword-augmented Embedding for Cloze Reading Comprehension",
author = "Zhang, Zhuosheng and
Huang, Yafang and
Zhao, Hai",
editor = "Bender, Emily M. and
Derczynski, Leon and
Isabelle, Pierre",
booktitle = "Proceedings of the 27th International Conference on Computational Linguistics",
month = aug,
year = "2018",
address = "Santa Fe, New Mexico, USA",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/C18-1153/",
pages = "1802--1814",
abstract = "Representation learning is the foundation of machine reading comprehension. In state-of-the-art models, deep learning methods broadly use word and character level representations. However, character is not naturally the minimal linguistic unit. In addition, with a simple concatenation of character and word embedding, previous models actually give suboptimal solution. In this paper, we propose to use subword rather than character for word embedding enhancement. We also empirically explore different augmentation strategies on subword-augmented embedding to enhance the cloze-style reading comprehension model (reader). In detail, we present a reader that uses subword-level representation to augment word embedding with a short list to handle rare words effectively. A thorough examination is conducted to evaluate the comprehensive performance and generalization ability of the proposed reader. Experimental results show that the proposed approach helps the reader significantly outperform the state-of-the-art baselines on various public datasets."
}
Markdown (Informal)
[Subword-augmented Embedding for Cloze Reading Comprehension](https://preview.aclanthology.org/fix-sig-urls/C18-1153/) (Zhang et al., COLING 2018)
ACL