@inproceedings{qi-etal-2024-ppdac,
title = "{PPDAC}: A Plug-and -Play Data Augmentation Component for Few-shot Extractive Question Answering",
author = "Qi, Huang and
Han, Fu and
Wenbin, Luo and
Mingwen, Wang and
Kaiwei, Luo",
editor = "Sun, Maosong and
Liang, Jiye and
Han, Xianpei and
Liu, Zhiyuan and
He, Yulan",
booktitle = "Proceedings of the 23rd Chinese National Conference on Computational Linguistics (Volume 1: Main Conference)",
month = jul,
year = "2024",
address = "Taiyuan, China",
publisher = "Chinese Information Processing Society of China",
url = "https://preview.aclanthology.org/fix-sig-urls/2024.ccl-1.102/",
pages = "1320--1333",
language = "eng",
abstract = "``Extractive Question Answering (EQA) in the few-shot learning scenario is one of the most chal-lenging tasks of Machine Reading Comprehension (MRC). Some previous works employ exter-nal knowledge for data augmentation to improve the performance of few-shot extractive ques-tion answering. However, there are not always available external knowledge or language- anddomain-specific NLP tools to deal with external knowledge such as part-of-speech taggers, syn-tactic parsers, and named-entity recognizers. In this paper, we present a novel Plug-and-PlayData Augmentation Component (PPDAC) for the few-shot extractive question answering, whichincludes a paraphrase generator and a paraphrase selector. Specifically, we generate multipleparaphrases of the question in the (question, passage, answer) triples using the paraphrase gener-ator and then obtain highly similar statements via paraphrase selector to form more training datafor fine-tuning. Extensive experiments on multiple EQA datasets show that our proposed plug-and-play data augmentation component significantly improves question-answering performance,and consistently outperforms state-of-the-art approaches in few-shot settings by a large margin.''"
}
Markdown (Informal)
[PPDAC: A Plug-and -Play Data Augmentation Component for Few-shot Extractive Question Answering](https://preview.aclanthology.org/fix-sig-urls/2024.ccl-1.102/) (Qi et al., CCL 2024)
ACL