@inproceedings{ye-etal-2024-mosecrot,
title = "{M}o{SEC}ro{T}: Model Stitching with Static Word Embeddings for Crosslingual Zero-shot Transfer",
author = {Ye, Haotian and
Liu, Yihong and
Ma, Chunlan and
Sch{\"u}tze, Hinrich},
editor = "Tafreshi, Shabnam and
Akula, Arjun and
Sedoc, Jo{\~a}o and
Drozd, Aleksandr and
Rogers, Anna and
Rumshisky, Anna",
booktitle = "Proceedings of the Fifth Workshop on Insights from Negative Results in NLP",
month = jun,
year = "2024",
address = "Mexico City, Mexico",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2024.insights-1.1",
doi = "10.18653/v1/2024.insights-1.1",
pages = "1--7",
abstract = "Transformer-based pre-trained language models (PLMs) have achieved remarkable performance in various natural language processing (NLP) tasks. However, pre-training such models can take considerable resources that are almost only available to high-resource languages. On the contrary, static word embeddings are easier to train in terms of computing resources and the amount of data required. In this paper, we introduce MoSECroT (Model Stitching with Static Word Embeddings for Crosslingual Zero-shot Transfer, a novel and challenging task that is especially relevant to low-resource languages for which static word embeddings are available. To tackle the task, we present the first framework that leverages relative representations to construct a common space for the embeddings of a source language PLM and the static word embeddings of a target language. In this way, we can train the PLM on source-language training data and perform zero-shot transfer to the target language by simply swapping the embedding layer. However, through extensive experiments on two classification datasets, we show that although our proposed framework is competitive with weak baselines when addressing MoSECroT, it fails to achieve competitive results compared with some strong baselines. In this paper, we attempt to explain this negative result and provide several thoughts on possible improvement.",
}
Markdown (Informal)
[MoSECroT: Model Stitching with Static Word Embeddings for Crosslingual Zero-shot Transfer](https://aclanthology.org/2024.insights-1.1) (Ye et al., insights-WS 2024)
ACL