@inproceedings{dao-quang-nguyen-minh-2020-uet,
title = "{UET} at {WNUT}-2020 Task 2: A Study of Combining Transfer Learning Methods for Text Classification with {R}o{BERT}a",
author = "Dao Quang, Huy and
Nguyen Minh, Tam",
editor = "Xu, Wei and
Ritter, Alan and
Baldwin, Tim and
Rahimi, Afshin",
booktitle = "Proceedings of the Sixth Workshop on Noisy User-generated Text (W-NUT 2020)",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.wnut-1.71/",
doi = "10.18653/v1/2020.wnut-1.71",
pages = "475--479",
abstract = "This paper reports our approach and the results of our experiments for W-NUT task 2: Identification of Informative COVID-19 English Tweets. In this paper, we test out the effectiveness of transfer learning method with state of the art language models as RoBERTa on this text classification task. Moreover, we examine the benefit of applying additional fine-tuning and training techniques including fine-tuning discrimination, gradual unfreezing as well as our custom head for the classifier. Our best model results in a high F1-score of 89.89 on the task`s private test dataset and that of 90.96 on public test set without ensembling multiple models and additional data."
}
Markdown (Informal)
[UET at WNUT-2020 Task 2: A Study of Combining Transfer Learning Methods for Text Classification with RoBERTa](https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.wnut-1.71/) (Dao Quang & Nguyen Minh, WNUT 2020)
ACL