@inproceedings{forst-kaplan-2006-importance,
title = "The importance of precise tokenizing for deep grammars",
author = "Forst, Martin and
Kaplan, Ronald M.",
editor = "Calzolari, Nicoletta and
Choukri, Khalid and
Gangemi, Aldo and
Maegaard, Bente and
Mariani, Joseph and
Odijk, Jan and
Tapias, Daniel",
booktitle = "Proceedings of the Fifth International Conference on Language Resources and Evaluation ({LREC}`06)",
month = may,
year = "2006",
address = "Genoa, Italy",
publisher = "European Language Resources Association (ELRA)",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/L06-1092/",
abstract = "We present a non-deterministic finite-state transducer that acts as a tokenizer and normalizer for free text that is input to a broad-coverage LFG of German. We compare the basic tokenizer used in an earlier version of the grammar and the more sophisticated tokenizer that we now use. The revised tokenizer increases the coverage of the grammar in terms of full parses from 68.3{\%} to 73.4{\%} on sentences 8,001 through 10,000 of the TiGer Corpus."
}
Markdown (Informal)
[The importance of precise tokenizing for deep grammars](https://preview.aclanthology.org/jlcl-multiple-ingestion/L06-1092/) (Forst & Kaplan, LREC 2006)
ACL