@inproceedings{kim-etal-2022-ept,
title = "{EPT}-{X}: An Expression-Pointer Transformer model that generates e{X}planations for numbers",
author = "Kim, Bugeun and
Ki, Kyung Seo and
Rhim, Sangkyu and
Gweon, Gahgene",
editor = "Muresan, Smaranda and
Nakov, Preslav and
Villavicencio, Aline",
booktitle = "Proceedings of the 60th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = may,
year = "2022",
address = "Dublin, Ireland",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2022.acl-long.305/",
doi = "10.18653/v1/2022.acl-long.305",
pages = "4442--4458",
abstract = "In this paper, we propose a neural model EPT-X (Expression-Pointer Transformer with Explanations), which utilizes natural language explanations to solve an algebraic word problem. To enhance the explainability of the encoding process of a neural model, EPT-X adopts the concepts of plausibility and faithfulness which are drawn from math word problem solving strategies by humans. A plausible explanation is one that includes contextual information for the numbers and variables that appear in a given math word problem. A faithful explanation is one that accurately represents the reasoning process behind the model{'}s solution equation. The EPT-X model yields an average baseline performance of 69.59{\%} on our PEN dataset and produces explanations with quality that is comparable to human output. The contribution of this work is two-fold. (1) EPT-X model: An explainable neural model that sets a baseline for algebraic word problem solving task, in terms of model{'}s correctness, plausibility, and faithfulness. (2) New dataset: We release a novel dataset PEN (Problems with Explanations for Numbers), which expands the existing datasets by attaching explanations to each number/variable."
}
Markdown (Informal)
[EPT-X: An Expression-Pointer Transformer model that generates eXplanations for numbers](https://preview.aclanthology.org/fix-sig-urls/2022.acl-long.305/) (Kim et al., ACL 2022)
ACL