@inproceedings{chen-etal-2022-sat,
title = "{SAT}: Improving Semi-Supervised Text Classification with Simple Instance-Adaptive Self-Training",
author = "Chen, Hui and
Han, Wei and
Poria, Soujanya",
editor = "Goldberg, Yoav and
Kozareva, Zornitsa and
Zhang, Yue",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2022",
month = dec,
year = "2022",
address = "Abu Dhabi, United Arab Emirates",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2022.findings-emnlp.456/",
doi = "10.18653/v1/2022.findings-emnlp.456",
pages = "6141--6146",
abstract = "Self-training methods have been explored in recent years and have exhibited great performance in improving semi-supervised learning. This work presents a simple instance-adaptive self-training method (SAT) for semi-supervised text classification. SAT first generates two augmented views for each unlabeled data, and then trains a meta learner to automatically identify the relative strength of augmentations based on the similarity between the original view and the augmented views. The weakly-augmented view is fed to the model to produce a pseudo-label and the strongly-augmented view is used to train the model to predict the same pseudo-label. We conducted extensive experiments and analyses on three text classification datasets and found that with varying sizes of labeled training data, SAT consistently shows competitive performance compared to existing semi-supervised learning methods."
}
Markdown (Informal)
[SAT: Improving Semi-Supervised Text Classification with Simple Instance-Adaptive Self-Training](https://preview.aclanthology.org/fix-sig-urls/2022.findings-emnlp.456/) (Chen et al., Findings 2022)
ACL