@inproceedings{chi-etal-2023-cross,
title = "Can Cross-Lingual Transferability of Multilingual Transformers Be Activated Without End-Task Data?",
author = "Chi, Zewen and
Huang, Heyan and
Mao, Xian-Ling",
editor = "Rogers, Anna and
Boyd-Graber, Jordan and
Okazaki, Naoaki",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2023",
month = jul,
year = "2023",
address = "Toronto, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.findings-acl.796/",
doi = "10.18653/v1/2023.findings-acl.796",
pages = "12572--12584",
abstract = "Pretrained multilingual Transformers have achieved great success in cross-lingual transfer learning. Current methods typically activate the cross-lingual transferability of multilingual Transformers by fine-tuning them on end-task data. However, the methods cannot perform cross-lingual transfer when end-task data are unavailable. In this work, we explore whether the cross-lingual transferability can be activated without end-task data. We propose a cross-lingual transfer method, named PlugIn-X. PlugIn-X disassembles monolingual and multilingual Transformers into sub-modules, and reassembles them to be the multilingual end-task model. After representation adaptation, PlugIn-X finally performs cross-lingual transfer in a plug-and-play style. Experimental results show that PlugIn-X successfully activates the cross-lingual transferability of multilingual Transformers without accessing end-task data. Moreover, we analyze how the cross-model representation alignment affects the cross-lingual transferability."
}
Markdown (Informal)
[Can Cross-Lingual Transferability of Multilingual Transformers Be Activated Without End-Task Data?](https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.findings-acl.796/) (Chi et al., Findings 2023)
ACL