@inproceedings{andor-etal-2019-giving,
title = "Giving {BERT} a Calculator: Finding Operations and Arguments with Reading Comprehension",
author = "Andor, Daniel and
He, Luheng and
Lee, Kenton and
Pitler, Emily",
editor = "Inui, Kentaro and
Jiang, Jing and
Ng, Vincent and
Wan, Xiaojun",
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP)",
month = nov,
year = "2019",
address = "Hong Kong, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/D19-1609/",
doi = "10.18653/v1/D19-1609",
pages = "5947--5952",
abstract = "Reading comprehension models have been successfully applied to extractive text answers, but it is unclear how best to generalize these models to abstractive numerical answers. We enable a BERT-based reading comprehension model to perform lightweight numerical reasoning. We augment the model with a predefined set of executable `programs' which encompass simple arithmetic as well as extraction. Rather than having to learn to manipulate numbers directly, the model can pick a program and execute it. On the recent Discrete Reasoning Over Passages (DROP) dataset, designed to challenge reading comprehension models, we show a 33{\%} absolute improvement by adding shallow programs. The model can learn to predict new operations when appropriate in a math word problem setting (Roy and Roth, 2015) with very few training examples."
}
Markdown (Informal)
[Giving BERT a Calculator: Finding Operations and Arguments with Reading Comprehension](https://preview.aclanthology.org/fix-sig-urls/D19-1609/) (Andor et al., EMNLP-IJCNLP 2019)
ACL