@inproceedings{prickett-etal-2018-seq2seq,
title = "{S}eq2{S}eq Models with Dropout can Learn Generalizable Reduplication",
author = "Prickett, Brandon and
Traylor, Aaron and
Pater, Joe",
editor = "Kuebler, Sandra and
Nicolai, Garrett",
booktitle = "Proceedings of the Fifteenth Workshop on Computational Research in Phonetics, Phonology, and Morphology",
month = oct,
year = "2018",
address = "Brussels, Belgium",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/W18-5810/",
doi = "10.18653/v1/W18-5810",
pages = "93--100",
abstract = "Natural language reduplication can pose a challenge to neural models of language, and has been argued to require variables (Marcus et al., 1999). Sequence-to-sequence neural networks have been shown to perform well at a number of other morphological tasks (Cotterell et al., 2016), and produce results that highly correlate with human behavior (Kirov, 2017; Kirov {\&} Cotterell, 2018) but do not include any explicit variables in their architecture. We find that they can learn a reduplicative pattern that generalizes to novel segments if they are trained with dropout (Srivastava et al., 2014). We argue that this matches the scope of generalization observed in human reduplication."
}
Markdown (Informal)
[Seq2Seq Models with Dropout can Learn Generalizable Reduplication](https://preview.aclanthology.org/jlcl-multiple-ingestion/W18-5810/) (Prickett et al., EMNLP 2018)
ACL