@inproceedings{peng-etal-2021-domain,
title = "Is Domain Adaptation Worth Your Investment? Comparing {BERT} and {F}in{BERT} on Financial Tasks",
author = "Peng, Bo and
Chersoni, Emmanuele and
Hsu, Yu-Yin and
Huang, Chu-Ren",
editor = "Hahn, Udo and
Hoste, Veronique and
Stent, Amanda",
booktitle = "Proceedings of the Third Workshop on Economics and Natural Language Processing",
month = nov,
year = "2021",
address = "Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2021.econlp-1.5/",
doi = "10.18653/v1/2021.econlp-1.5",
pages = "37--44",
abstract = "With the recent rise in popularity of Transformer models in Natural Language Processing, research efforts have been dedicated to the development of domain-adapted versions of BERT-like architectures. In this study, we focus on FinBERT, a Transformer model trained on text from the financial domain. By comparing its performances with the original BERT on a wide variety of financial text processing tasks, we found continual pretraining from the original model to be the more beneficial option. Domain-specific pretraining from scratch, conversely, seems to be less effective."
}
Markdown (Informal)
[Is Domain Adaptation Worth Your Investment? Comparing BERT and FinBERT on Financial Tasks](https://preview.aclanthology.org/fix-sig-urls/2021.econlp-1.5/) (Peng et al., ECONLP 2021)
ACL