@inproceedings{nguyen-etal-2025-kda,
title = "{KDA}: Knowledge Distillation Adapter for Cross-Lingual Transfer",
author = "Nguyen, Ta-Bao and
Phan, Nguyen-Phuong and
Le, Tung and
Nguyen, Huy Tien",
editor = "Flek, Lucie and
Narayan, Shashi and
Phương, L{\^e} Hồng and
Pei, Jiahuan",
booktitle = "Proceedings of the 18th International Natural Language Generation Conference",
month = oct,
year = "2025",
address = "Hanoi, Vietnam",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/author-page-lei-gao-usc/2025.inlg-main.8/",
pages = "122--133",
abstract = "State-of-the-art cross-lingual transfer often relies on massive multilingual models, but their prohibitive size and computational cost limit their practicality for low-resource languages. An alternative is to adapt powerful, task-specialized monolingual models, but this presents challenges in bridging the vocabulary and structural gaps between languages. To address this, we propose KDA, a Knowledge Distillation Adapter framework that efficiently adapts a fine-tuned, high-resource monolingual model to a low-resource target language. KDA utilizes knowledge distillation to transfer the source model{'}s task-solving capabilities to the target language in a parameter-efficient manner. In addition, we introduce a novel adapter architecture that integrates source-language token embeddings while learning new positional embeddings, directly mitigating cross-lingual representational mismatches. Our empirical results on zero-shot transfer for Vietnamese Sentiment Analysis demonstrate that KDA significantly outperforms existing methods, offering a new, effective, and computationally efficient pathway for cross-lingual transfer."
}Markdown (Informal)
[KDA: Knowledge Distillation Adapter for Cross-Lingual Transfer](https://preview.aclanthology.org/author-page-lei-gao-usc/2025.inlg-main.8/) (Nguyen et al., INLG 2025)
ACL