@inproceedings{xu-etal-2021-soft,
title = "Soft Layer Selection with Meta-Learning for Zero-Shot Cross-Lingual Transfer",
author = "Xu, Weijia and
Haider, Batool and
Krone, Jason and
Mansour, Saab",
editor = "Lee, Hung-Yi and
Mohtarami, Mitra and
Li, Shang-Wen and
Jin, Di and
Korpusik, Mandy and
Dong, Shuyan and
Vu, Ngoc Thang and
Hakkani-Tur, Dilek",
booktitle = "Proceedings of the 1st Workshop on Meta Learning and Its Applications to Natural Language Processing",
month = aug,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.metanlp-1.2",
doi = "10.18653/v1/2021.metanlp-1.2",
pages = "11--18",
abstract = "Multilingual pre-trained contextual embedding models (Devlin et al., 2019) have achieved impressive performance on zero-shot cross-lingual transfer tasks. Finding the most effective fine-tuning strategy to fine-tune these models on high-resource languages so that it transfers well to the zero-shot languages is a non-trivial task. In this paper, we propose a novel meta-optimizer to soft-select which layers of the pre-trained model to freeze during fine-tuning. We train the meta-optimizer by simulating the zero-shot transfer scenario. Results on cross-lingual natural language inference show that our approach improves over the simple fine-tuning baseline and X-MAML (Nooralahzadeh et al., 2020).",
}
Markdown (Informal)
[Soft Layer Selection with Meta-Learning for Zero-Shot Cross-Lingual Transfer](https://aclanthology.org/2021.metanlp-1.2) (Xu et al., MetaNLP 2021)
ACL