@inproceedings{wang-etal-2023-towards-alleviating,
title = "Towards Alleviating the Object Bias in Prompt Tuning-based Factual Knowledge Extraction",
author = "Wang, Yuhang and
Lu, Dongyuan and
Kong, Chao and
Sang, Jitao",
editor = "Rogers, Anna and
Boyd-Graber, Jordan and
Okazaki, Naoaki",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2023",
month = jul,
year = "2023",
address = "Toronto, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2023.findings-acl.270/",
doi = "10.18653/v1/2023.findings-acl.270",
pages = "4420--4432",
abstract = "Many works employed prompt tuning methods to automatically optimize prompt queries and extract the factual knowledge stored in Pre-trained Language Models. In this paper, we observe that the optimized prompts, including discrete prompts and continuous prompts, exhibit undesirable object bias. To handle this problem, we propose a novel prompt tuning method called MeCoD consisting of three modules: Prompt Encoder, Object Equalization and Biased Object Obstruction. Experimental results show that MeCoD can significantly reduce the object bias and at the same time improve accuracy of factual knowledge extraction."
}
Markdown (Informal)
[Towards Alleviating the Object Bias in Prompt Tuning-based Factual Knowledge Extraction](https://preview.aclanthology.org/fix-sig-urls/2023.findings-acl.270/) (Wang et al., Findings 2023)
ACL