@inproceedings{yerramilli-etal-2021-multi,
title = "Multi-task pre-finetuning for zero-shot cross lingual transfer",
author = "Yerramilli, Moukthika and
Varma, Pritam and
Dwarakanath, Anurag",
editor = "Bandyopadhyay, Sivaji and
Devi, Sobha Lalitha and
Bhattacharyya, Pushpak",
booktitle = "Proceedings of the 18th International Conference on Natural Language Processing (ICON)",
month = dec,
year = "2021",
address = "National Institute of Technology Silchar, Silchar, India",
publisher = "NLP Association of India (NLPAI)",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.icon-main.57/",
pages = "474--480",
abstract = "Building machine learning models for low resource languages is extremely challenging due to the lack of available training data (either un-annotated or annotated). To support such scenarios, zero-shot cross lingual transfer is used where the machine learning model is trained on a resource rich language and is directly tested on the resource poor language. In this paper, we present a technique which improves the performance of zero-shot cross lingual transfer. Our method performs multi-task pre-finetuning on a resource rich language using a multilingual pre-trained model. The pre-finetuned model is then tested in a zero-shot manner on the resource poor languages. We test the performance of our method on 8 languages and for two tasks, namely, Intent Classification (IC) {\&} Named Entity Recognition (NER) using the MultiAtis++ dataset. The results showed that our method improves IC performance in 7 out of 8 languages and NER performance in 4 languages. Our method also leads to faster convergence during finetuning. The usage of pre-finetuning demonstrates a data efficient way for supporting new languages and geographies across the world."
}
Markdown (Informal)
[Multi-task pre-finetuning for zero-shot cross lingual transfer](https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.icon-main.57/) (Yerramilli et al., ICON 2021)
ACL
- Moukthika Yerramilli, Pritam Varma, and Anurag Dwarakanath. 2021. Multi-task pre-finetuning for zero-shot cross lingual transfer. In Proceedings of the 18th International Conference on Natural Language Processing (ICON), pages 474–480, National Institute of Technology Silchar, Silchar, India. NLP Association of India (NLPAI).