@inproceedings{osama-etal-2019-question,
title = "Question Answering Using Hierarchical Attention on Top of {BERT} Features",
author = "Osama, Reham and
El-Makky, Nagwa and
Torki, Marwan",
editor = "Fisch, Adam and
Talmor, Alon and
Jia, Robin and
Seo, Minjoon and
Choi, Eunsol and
Chen, Danqi",
booktitle = "Proceedings of the 2nd Workshop on Machine Reading for Question Answering",
month = nov,
year = "2019",
address = "Hong Kong, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/D19-5825/",
doi = "10.18653/v1/D19-5825",
pages = "191--195",
abstract = "The model submitted works as follows. When supplied a question and a passage it makes use of the BERT embedding along with the hierarchical attention model which consists of 2 parts, the co-attention and the self-attention, to locate a continuous span of the passage that is the answer to the question."
}
Markdown (Informal)
[Question Answering Using Hierarchical Attention on Top of BERT Features](https://preview.aclanthology.org/add-emnlp-2024-awards/D19-5825/) (Osama et al., 2019)
ACL