@inproceedings{cho-etal-2019-paraphrase,
title = "Paraphrase Generation for Semi-Supervised Learning in {NLU}",
author = "Cho, Eunah and
Xie, He and
Campbell, William M.",
editor = "Bosselut, Antoine and
Celikyilmaz, Asli and
Ghazvininejad, Marjan and
Iyer, Srinivasan and
Khandelwal, Urvashi and
Rashkin, Hannah and
Wolf, Thomas",
booktitle = "Proceedings of the Workshop on Methods for Optimizing and Evaluating Neural Language Generation",
month = jun,
year = "2019",
address = "Minneapolis, Minnesota",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/W19-2306/",
doi = "10.18653/v1/W19-2306",
pages = "45--54",
abstract = "Semi-supervised learning is an efficient way to improve performance for natural language processing systems. In this work, we propose Para-SSL, a scheme to generate candidate utterances using paraphrasing and methods from semi-supervised learning. In order to perform paraphrase generation in the context of a dialog system, we automatically extract paraphrase pairs to create a paraphrase corpus. Using this data, we build a paraphrase generation system and perform one-to-many generation, followed by a validation step to select only the utterances with good quality. The paraphrase-based semi-supervised learning is applied to five functionalities in a natural language understanding system. Our proposed method for semi-supervised learning using paraphrase generation does not require user utterances and can be applied prior to releasing a new functionality to a system. Experiments show that we can achieve up to 19{\%} of relative slot error reduction without an access to user utterances, and up to 35{\%} when leveraging live traffic utterances."
}
Markdown (Informal)
[Paraphrase Generation for Semi-Supervised Learning in NLU](https://preview.aclanthology.org/jlcl-multiple-ingestion/W19-2306/) (Cho et al., NAACL 2019)
ACL
- Eunah Cho, He Xie, and William M. Campbell. 2019. Paraphrase Generation for Semi-Supervised Learning in NLU. In Proceedings of the Workshop on Methods for Optimizing and Evaluating Neural Language Generation, pages 45–54, Minneapolis, Minnesota. Association for Computational Linguistics.