@inproceedings{chen-etal-2021-pseudo,
title = "Pseudo-Label Guided Unsupervised Domain Adaptation of Contextual Embeddings",
author = "Chen, Tianyu and
Huang, Shaohan and
Wei, Furu and
Li, Jianxin",
editor = "Ben-David, Eyal and
Cohen, Shay and
McDonald, Ryan and
Plank, Barbara and
Reichart, Roi and
Rotman, Guy and
Ziser, Yftah",
booktitle = "Proceedings of the Second Workshop on Domain Adaptation for NLP",
month = apr,
year = "2021",
address = "Kyiv, Ukraine",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.adaptnlp-1.2",
pages = "9--15",
abstract = "Contextual embedding models such as BERT can be easily fine-tuned on labeled samples to create a state-of-the-art model for many downstream tasks. However, the fine-tuned BERT model suffers considerably from unlabeled data when applied to a different domain. In unsupervised domain adaptation, we aim to train a model that works well on a target domain when provided with labeled source samples and unlabeled target samples. In this paper, we propose a pseudo-label guided method for unsupervised domain adaptation. Two models are fine-tuned on labeled source samples as pseudo labeling models. To learn representations for the target domain, one of those models is adapted by masked language modeling from the target domain. Then those models are used to assign pseudo-labels to target samples. We train the final model with those samples. We evaluate our method on named entity segmentation and sentiment analysis tasks. These experiments show that our approach outperforms baseline methods.",
}
Markdown (Informal)
[Pseudo-Label Guided Unsupervised Domain Adaptation of Contextual Embeddings](https://aclanthology.org/2021.adaptnlp-1.2) (Chen et al., AdaptNLP 2021)
ACL