@inproceedings{lan-etal-2020-empirical,
title = "An Empirical Study of Pre-trained Transformers for {A}rabic Information Extraction",
author = "Lan, Wuwei and
Chen, Yang and
Xu, Wei and
Ritter, Alan",
editor = "Webber, Bonnie and
Cohn, Trevor and
He, Yulan and
Liu, Yang",
booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing (EMNLP)",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2020.emnlp-main.382/",
doi = "10.18653/v1/2020.emnlp-main.382",
pages = "4727--4734",
abstract = "Multilingual pre-trained Transformers, such as mBERT (Devlin et al., 2019) and XLM-RoBERTa (Conneau et al., 2020a), have been shown to enable effective cross-lingual zero-shot transfer. However, their performance on Arabic information extraction (IE) tasks is not very well studied. In this paper, we pre-train a customized bilingual BERT, dubbed GigaBERT, that is designed specifically for Arabic NLP and English-to-Arabic zero-shot transfer learning. We study GigaBERT{'}s effectiveness on zero-short transfer across four IE tasks: named entity recognition, part-of-speech tagging, argument role labeling, and relation extraction. Our best model significantly outperforms mBERT, XLM-RoBERTa, and AraBERT (Antoun et al., 2020) in both the supervised and zero-shot transfer settings. We have made our pre-trained models publicly available at: \url{https://github.com/lanwuwei/GigaBERT}."
}
Markdown (Informal)
[An Empirical Study of Pre-trained Transformers for Arabic Information Extraction](https://preview.aclanthology.org/fix-sig-urls/2020.emnlp-main.382/) (Lan et al., EMNLP 2020)
ACL