@inproceedings{zhou-etal-2022-flipda,
title = "{F}lip{DA}: Effective and Robust Data Augmentation for Few-Shot Learning",
author = "Zhou, Jing and
Zheng, Yanan and
Tang, Jie and
Jian, Li and
Yang, Zhilin",
editor = "Muresan, Smaranda and
Nakov, Preslav and
Villavicencio, Aline",
booktitle = "Proceedings of the 60th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = may,
year = "2022",
address = "Dublin, Ireland",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2022.acl-long.592/",
doi = "10.18653/v1/2022.acl-long.592",
pages = "8646--8665",
abstract = "Most previous methods for text data augmentation are limited to simple tasks and weak baselines. We explore data augmentation on hard tasks (i.e., few-shot natural language understanding) and strong baselines (i.e., pretrained models with over one billion parameters). Under this setting, we reproduced a large number of previous augmentation methods and found that these methods bring marginal gains at best and sometimes degrade the performance much. To address this challenge, we propose a novel data augmentation method FlipDA that jointly uses a generative model and a classifier to generate label-flipped data. Central to the idea of FlipDA is the discovery that generating label-flipped data is more crucial to the performance than generating label-preserved data. Experiments show that FlipDA achieves a good tradeoff between effectiveness and robustness{---}it substantially improves many tasks while not negatively affecting the others."
}
Markdown (Informal)
[FlipDA: Effective and Robust Data Augmentation for Few-Shot Learning](https://preview.aclanthology.org/fix-sig-urls/2022.acl-long.592/) (Zhou et al., ACL 2022)
ACL