@inproceedings{oh-etal-2021-bertac,
title = "{BERTAC}: Enhancing Transformer-based Language Models with Adversarially Pretrained Convolutional Neural Networks",
author = "Oh, Jong-Hoon and
Iida, Ryu and
Kloetzer, Julien and
Torisawa, Kentaro",
editor = "Zong, Chengqing and
Xia, Fei and
Li, Wenjie and
Navigli, Roberto",
booktitle = "Proceedings of the 59th Annual Meeting of the Association for Computational Linguistics and the 11th International Joint Conference on Natural Language Processing (Volume 1: Long Papers)",
month = aug,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2021.acl-long.164/",
doi = "10.18653/v1/2021.acl-long.164",
pages = "2103--2115",
abstract = "Transformer-based language models (TLMs), such as BERT, ALBERT and GPT-3, have shown strong performance in a wide range of NLP tasks and currently dominate the field of NLP. However, many researchers wonder whether these models can maintain their dominance forever. Of course, we do not have answers now, but, as an attempt to find better neural architectures and training schemes, we pretrain a simple CNN using a GAN-style learning scheme and Wikipedia data, and then integrate it with standard TLMs. We show that on the GLUE tasks, the combination of our pretrained CNN with ALBERT outperforms the original ALBERT and achieves a similar performance to that of SOTA. Furthermore, on open-domain QA (Quasar-T and SearchQA), the combination of the CNN with ALBERT or RoBERTa achieved stronger performance than SOTA and the original TLMs. We hope that this work provides a hint for developing a novel strong network architecture along with its training scheme. Our source code and models are available at \url{https://github.com/nict-wisdom/bertac}."
}
Markdown (Informal)
[BERTAC: Enhancing Transformer-based Language Models with Adversarially Pretrained Convolutional Neural Networks](https://preview.aclanthology.org/fix-sig-urls/2021.acl-long.164/) (Oh et al., ACL-IJCNLP 2021)
ACL