@inproceedings{bari-etal-2021-nearest,
title = "Nearest Neighbour Few-Shot Learning for Cross-lingual Classification",
author = "Bari, M Saiful and
Haider, Batool and
Mansour, Saab",
editor = "Moens, Marie-Francine and
Huang, Xuanjing and
Specia, Lucia and
Yih, Scott Wen-tau",
booktitle = "Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2021",
address = "Online and Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2021.emnlp-main.131/",
doi = "10.18653/v1/2021.emnlp-main.131",
pages = "1745--1753",
abstract = "Even though large pre-trained multilingual models (e.g. mBERT, XLM-R) have led to significant performance gains on a wide range of cross-lingual NLP tasks, success on many downstream tasks still relies on the availability of sufficient annotated data. Traditional fine-tuning of pre-trained models using only a few target samples can cause over-fitting. This can be quite limiting as most languages in the world are under-resourced. In this work, we investigate cross-lingual adaptation using a simple nearest-neighbor few-shot ($<15$ samples) inference technique for classification tasks. We experiment using a total of 16 distinct languages across two NLP tasks- XNLI and PAWS-X. Our approach consistently improves traditional fine-tuning using only a handful of labeled samples in target locales. We also demonstrate its generalization capability across tasks."
}
Markdown (Informal)
[Nearest Neighbour Few-Shot Learning for Cross-lingual Classification](https://preview.aclanthology.org/fix-sig-urls/2021.emnlp-main.131/) (Bari et al., EMNLP 2021)
ACL