@inproceedings{chi-etal-2020-monolingual,
title = "Can Monolingual Pretrained Models Help Cross-Lingual Classification?",
author = "Chi, Zewen and
Dong, Li and
Wei, Furu and
Mao, Xianling and
Huang, Heyan",
editor = "Wong, Kam-Fai and
Knight, Kevin and
Wu, Hua",
booktitle = "Proceedings of the 1st Conference of the Asia-Pacific Chapter of the Association for Computational Linguistics and the 10th International Joint Conference on Natural Language Processing",
month = dec,
year = "2020",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.aacl-main.2",
pages = "12--17",
abstract = "Multilingual pretrained language models (such as multilingual BERT) have achieved impressive results for cross-lingual transfer. However, due to the constant model capacity, multilingual pre-training usually lags behind the monolingual competitors. In this work, we present two approaches to improve zero-shot cross-lingual classification, by transferring the knowledge from monolingual pretrained models to multilingual ones. Experimental results on two cross-lingual classification benchmarks show that our methods outperform vanilla multilingual fine-tuning.",
}
Markdown (Informal)
[Can Monolingual Pretrained Models Help Cross-Lingual Classification?](https://aclanthology.org/2020.aacl-main.2) (Chi et al., AACL 2020)
ACL
- Zewen Chi, Li Dong, Furu Wei, Xianling Mao, and Heyan Huang. 2020. Can Monolingual Pretrained Models Help Cross-Lingual Classification?. In Proceedings of the 1st Conference of the Asia-Pacific Chapter of the Association for Computational Linguistics and the 10th International Joint Conference on Natural Language Processing, pages 12–17, Suzhou, China. Association for Computational Linguistics.