@inproceedings{kang-etal-2020-neural,
title = "Neural Mask Generator: Learning to Generate Adaptive Word Maskings for Language Model Adaptation",
author = "Kang, Minki and
Han, Moonsu and
Hwang, Sung Ju",
editor = "Webber, Bonnie and
Cohn, Trevor and
He, Yulan and
Liu, Yang",
booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing (EMNLP)",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Add-Cong-Liu-Florida-Atlantic-University-author-id/2020.emnlp-main.493/",
doi = "10.18653/v1/2020.emnlp-main.493",
pages = "6102--6120",
abstract = "We propose a method to automatically generate a domain- and task-adaptive maskings of the given text for self-supervised pre-training, such that we can effectively adapt the language model to a particular target task (e.g. question answering). Specifically, we present a novel reinforcement learning-based framework which learns the masking policy, such that using the generated masks for further pre-training of the target language model helps improve task performance on unseen texts. We use off-policy actor-critic with entropy regularization and experience replay for reinforcement learning, and propose a Transformer-based policy network that can consider the relative importance of words in a given text. We validate our Neural Mask Generator (NMG) on several question answering and text classification datasets using BERT and DistilBERT as the language models, on which it outperforms rule-based masking strategies, by automatically learning optimal adaptive maskings."
}
Markdown (Informal)
[Neural Mask Generator: Learning to Generate Adaptive Word Maskings for Language Model Adaptation](https://preview.aclanthology.org/Add-Cong-Liu-Florida-Atlantic-University-author-id/2020.emnlp-main.493/) (Kang et al., EMNLP 2020)
ACL