@inproceedings{goswami-etal-2023-switchprompt,
title = "{S}witch{P}rompt: Learning Domain-Specific Gated Soft Prompts for Classification in Low-Resource Domains",
author = "Goswami, Koustava and
Lange, Lukas and
Araki, Jun and
Adel, Heike",
editor = "Vlachos, Andreas and
Augenstein, Isabelle",
booktitle = "Proceedings of the 17th Conference of the European Chapter of the Association for Computational Linguistics",
month = may,
year = "2023",
address = "Dubrovnik, Croatia",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2023.eacl-main.197/",
doi = "10.18653/v1/2023.eacl-main.197",
pages = "2689--2695",
abstract = "Prompting pre-trained language models leads to promising results across natural language processing tasks but is less effective when applied in low-resource domains, due to the domain gap between the pre-training data and the downstream task. In this work, we bridge this gap with a novel and lightweight prompting methodology called SwitchPrompt for the adaptation of language models trained on datasets from the general domain to diverse low-resource domains. Using domain-specific keywords with a trainable gated prompt, SwitchPrompt offers domain-oriented prompting, that is, effective guidance on the target domains for general-domain language models. Our few-shot experiments on three text classification benchmarks demonstrate the efficacy of the general-domain pre-trained language models when used with SwitchPrompt. They often even outperform their domain-specific counterparts trained with baseline state-of-the-art prompting methods by up to 10.7{\%} performance increase in accuracy. This result indicates that SwitchPrompt effectively reduces the need for domain-specific language model pre-training."
}
Markdown (Informal)
[SwitchPrompt: Learning Domain-Specific Gated Soft Prompts for Classification in Low-Resource Domains](https://preview.aclanthology.org/add-emnlp-2024-awards/2023.eacl-main.197/) (Goswami et al., EACL 2023)
ACL