@inproceedings{guo-etal-2021-improving-numerical,
title = "Improving Numerical Reasoning Skills in the Modular Approach for Complex Question Answering on Text",
author = "Guo, Xiao-Yu and
Li, Yuan-Fang and
Haffari, Gholamreza",
editor = "Moens, Marie-Francine and
Huang, Xuanjing and
Specia, Lucia and
Yih, Scott Wen-tau",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2021",
month = nov,
year = "2021",
address = "Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Author-page-Marten-During-lu/2021.findings-emnlp.231/",
doi = "10.18653/v1/2021.findings-emnlp.231",
pages = "2713--2718",
abstract = "Numerical reasoning skills are essential for complex question answering (CQA) over text. It requires opertaions including counting, comparison, addition and subtraction. A successful approach to CQA on text, Neural Module Networks (NMNs), follows the programmer-interpreter paradigm and leverages specialised modules to perform compositional reasoning. However, the NMNs framework does not consider the relationship between numbers and entities in both questions and paragraphs. We propose effective techniques to improve NMNs' numerical reasoning capabilities by making the interpreter question-aware and capturing the relationship between entities and numbers. On the same subset of the DROP dataset for CQA on text, experimental results show that our additions outperform the original NMNs by 3.0 points for the overall F1 score."
}
Markdown (Informal)
[Improving Numerical Reasoning Skills in the Modular Approach for Complex Question Answering on Text](https://preview.aclanthology.org/Author-page-Marten-During-lu/2021.findings-emnlp.231/) (Guo et al., Findings 2021)
ACL