@article{kirov-cotterell-2018-recurrent,
title = "Recurrent Neural Networks in Linguistic Theory: Revisiting Pinker and Prince (1988) and the Past Tense Debate",
author = "Kirov, Christo and
Cotterell, Ryan",
editor = "Lee, Lillian and
Johnson, Mark and
Toutanova, Kristina and
Roark, Brian",
journal = "Transactions of the Association for Computational Linguistics",
volume = "6",
year = "2018",
address = "Cambridge, MA",
publisher = "MIT Press",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/Q18-1045/",
doi = "10.1162/tacl_a_00247",
pages = "651--665",
abstract = "Can advances in NLP help advance cognitive modeling? We examine the role of artificial neural networks, the current state of the art in many common NLP tasks, by returning to a classic case study. In 1986, Rumelhart and McClelland famously introduced a neural architecture that learned to transduce English verb stems to their past tense forms. Shortly thereafter in 1988, Pinker and Prince presented a comprehensive rebuttal of many of Rumelhart and McClelland`s claims. Much of the force of their attack centered on the empirical inadequacy of the Rumelhart and McClelland model. Today, however, that model is severely outmoded. We show that the Encoder-Decoder network architectures used in modern NLP systems obviate most of Pinker and Prince`s criticisms without requiring any simplification of the past tense mapping problem. We suggest that the empirical performance of modern networks warrants a reexamination of their utility in linguistic and cognitive modeling."
}
Markdown (Informal)
[Recurrent Neural Networks in Linguistic Theory: Revisiting Pinker and Prince (1988) and the Past Tense Debate](https://preview.aclanthology.org/jlcl-multiple-ingestion/Q18-1045/) (Kirov & Cotterell, TACL 2018)
ACL