@inproceedings{lee-etal-2021-improving-text-auto,
title = "Improving Text Auto-Completion with Next Phrase Prediction",
author = "Lee, Dong-Ho and
Hu, Zhiqiang and
Lee, Roy Ka-Wei",
editor = "Moens, Marie-Francine and
Huang, Xuanjing and
Specia, Lucia and
Yih, Scott Wen-tau",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2021",
month = nov,
year = "2021",
address = "Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2021.findings-emnlp.378/",
doi = "10.18653/v1/2021.findings-emnlp.378",
pages = "4434--4438",
abstract = "Language models such as GPT-2 have performed well on constructing syntactically sound sentences for text auto-completion tasks. However, such models often require considerable training effort to adapt to specific writing domains (e.g., medical). In this paper, we propose an intermediate training strategy to enhance pre-trained language models' performance in the text auto-completion task and fastly adapt them to specific domains. Our strategy includes a novel self-supervised training objective called Next Phrase Prediction (NPP), which encourages a language model to complete the partial query with enriched phrases and eventually improve the model{'}s text auto-completion performance. Preliminary experiments have shown that our approach is able to outperform the baselines in auto-completion for email and academic-writing domains."
}
Markdown (Informal)
[Improving Text Auto-Completion with Next Phrase Prediction](https://preview.aclanthology.org/fix-sig-urls/2021.findings-emnlp.378/) (Lee et al., Findings 2021)
ACL