@inproceedings{nijkamp-etal-2021-script,
title = "{SCRIPT}: Self-Critic {P}re{T}raining of Transformers",
author = "Nijkamp, Erik and
Pang, Bo and
Wu, Ying Nian and
Xiong, Caiming",
editor = "Toutanova, Kristina and
Rumshisky, Anna and
Zettlemoyer, Luke and
Hakkani-Tur, Dilek and
Beltagy, Iz and
Bethard, Steven and
Cotterell, Ryan and
Chakraborty, Tanmoy and
Zhou, Yichao",
booktitle = "Proceedings of the 2021 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies",
month = jun,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.naacl-main.409/",
doi = "10.18653/v1/2021.naacl-main.409",
pages = "5196--5202",
abstract = "We introduce Self-CRItic Pretraining Transformers (SCRIPT) for representation learning of text. The popular masked language modeling (MLM) pretraining methods like BERT replace some tokens with [MASK] and an encoder is trained to recover them, while ELECTRA trains a discriminator to detect replaced tokens proposed by a generator. In contrast, we train a language model as in MLM and further derive a discriminator or critic on top of the encoder without using any additional parameters. That is, the model itself is a critic. SCRIPT combines MLM training and discriminative training for learning rich representations and compute- and sample-efficiency. We demonstrate improved sample-efficiency in pretraining and enhanced representations evidenced by improved downstream task performance on GLUE and SQuAD over strong baselines. Also, the self-critic scores can be directly used as pseudo-log-likelihood for efficient scoring."
}
Markdown (Informal)
[SCRIPT: Self-Critic PreTraining of Transformers](https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.naacl-main.409/) (Nijkamp et al., NAACL 2021)
ACL
- Erik Nijkamp, Bo Pang, Ying Nian Wu, and Caiming Xiong. 2021. SCRIPT: Self-Critic PreTraining of Transformers. In Proceedings of the 2021 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, pages 5196–5202, Online. Association for Computational Linguistics.