@article{gorman-sproat-2016-minimally,
title = "Minimally Supervised Number Normalization",
author = "Gorman, Kyle and
Sproat, Richard",
editor = "Lee, Lillian and
Johnson, Mark and
Toutanova, Kristina",
journal = "Transactions of the Association for Computational Linguistics",
volume = "4",
year = "2016",
address = "Cambridge, MA",
publisher = "MIT Press",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/Q16-1036/",
doi = "10.1162/tacl_a_00114",
pages = "507--519",
abstract = "We propose two models for verbalizing numbers, a key component in speech recognition and synthesis systems. The first model uses an end-to-end recurrent neural network. The second model, drawing inspiration from the linguistics literature, uses finite-state transducers constructed with a minimal amount of training data. While both models achieve near-perfect performance, the latter model can be trained using several orders of magnitude less data than the former, making it particularly useful for low-resource languages."
}
Markdown (Informal)
[Minimally Supervised Number Normalization](https://preview.aclanthology.org/jlcl-multiple-ingestion/Q16-1036/) (Gorman & Sproat, TACL 2016)
ACL