@inproceedings{ek-etal-2020-punctuation,
title = "How does Punctuation Affect Neural Models in Natural Language Inference",
author = "Ek, Adam and
Bernardy, Jean-Philippe and
Chatzikyriakidis, Stergios",
editor = "Howes, Christine and
Chatzikyriakidis, Stergios and
Ek, Adam and
Somashekarappa, Vidya",
booktitle = "Proceedings of the Probability and Meaning Conference (PaM 2020)",
month = jun,
year = "2020",
address = "Gothenburg",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2020.pam-1.15/",
pages = "109--116",
abstract = "Natural Language Inference models have reached almost human-level performance but their generalisation capabilities have not been yet fully characterized. In particular, sensitivity to small changes in the data is a current area of investigation. In this paper, we focus on the effect of punctuation on such models. Our findings can be broadly summarized as follows: (1) irrelevant changes in punctuation are correctly ignored by the recent transformer models (BERT) while older RNN-based models were sensitive to them. (2) All models, both transformers and RNN-based models, are incapable of taking into account small relevant changes in the punctuation."
}
Markdown (Informal)
[How does Punctuation Affect Neural Models in Natural Language Inference](https://preview.aclanthology.org/fix-sig-urls/2020.pam-1.15/) (Ek et al., PaM 2020)
ACL