@inproceedings{masala-etal-2020-robert,
title = "{R}o{BERT} {--} A {R}omanian {BERT} Model",
author = "Masala, Mihai and
Ruseti, Stefan and
Dascalu, Mihai",
editor = "Scott, Donia and
Bel, Nuria and
Zong, Chengqing",
booktitle = "Proceedings of the 28th International Conference on Computational Linguistics",
month = dec,
year = "2020",
address = "Barcelona, Spain (Online)",
publisher = "International Committee on Computational Linguistics",
url = "https://preview.aclanthology.org/Add-Cong-Liu-Florida-Atlantic-University-author-id/2020.coling-main.581/",
doi = "10.18653/v1/2020.coling-main.581",
pages = "6626--6637",
abstract = "Deep pre-trained language models tend to become ubiquitous in the field of Natural Language Processing (NLP). These models learn contextualized representations by using a huge amount of unlabeled text data and obtain state of the art results on a multitude of NLP tasks, by enabling efficient transfer learning. For other languages besides English, there are limited options of such models, most of which are trained only on multi-lingual corpora. In this paper we introduce a Romanian-only pre-trained BERT model {--} RoBERT {--} and compare it with different multi-lingual models on seven Romanian specific NLP tasks grouped into three categories, namely: sentiment analysis, dialect and cross-dialect topic identification, and diacritics restoration. Our model surpasses the multi-lingual models, as well as a another mono-lingual implementation of BERT, on all tasks."
}
Markdown (Informal)
[RoBERT – A Romanian BERT Model](https://preview.aclanthology.org/Add-Cong-Liu-Florida-Atlantic-University-author-id/2020.coling-main.581/) (Masala et al., COLING 2020)
ACL
- Mihai Masala, Stefan Ruseti, and Mihai Dascalu. 2020. RoBERT – A Romanian BERT Model. In Proceedings of the 28th International Conference on Computational Linguistics, pages 6626–6637, Barcelona, Spain (Online). International Committee on Computational Linguistics.