@inproceedings{zhang-etal-2022-promptgen,
title = "{P}rompt{G}en: Automatically Generate Prompts using Generative Models",
author = "Zhang, Yue and
Fei, Hongliang and
Li, Dingcheng and
Li, Ping",
editor = "Carpuat, Marine and
de Marneffe, Marie-Catherine and
Meza Ruiz, Ivan Vladimir",
booktitle = "Findings of the Association for Computational Linguistics: NAACL 2022",
month = jul,
year = "2022",
address = "Seattle, United States",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2022.findings-naacl.3/",
doi = "10.18653/v1/2022.findings-naacl.3",
pages = "30--37",
abstract = "Recently, prompt learning has received significant attention, where the downstream tasks are reformulated to the mask-filling task with the help of a textual prompt. The key point of prompt learning is finding the most appropriate prompt. This paper proposes a novel model PromptGen, which can automatically generate prompts conditional on the input sentence. PromptGen is the first work considering dynamic prompt generation for knowledge probing, based on a pre-trained generative model. To mitigate any label information leaking from the pre-trained generative model, when given a generated prompt, we replace the query input with ``None''. We pursue that this perturbed context-free prompt cannot trigger the correct label. We evaluate our model on the knowledge probing LAMA benchmark, and show that PromptGen significantly outperforms other baselines."
}
Markdown (Informal)
[PromptGen: Automatically Generate Prompts using Generative Models](https://preview.aclanthology.org/fix-sig-urls/2022.findings-naacl.3/) (Zhang et al., Findings 2022)
ACL