@inproceedings{rupak-raj-etal-2023-active,
title = "Active Learning Approach for Fine-Tuning Pre-Trained {ASR} Model for a Low-Resourced Language: A Case Study of {N}epali",
author = "Ghimire, Rupak Raj and
Bal, Bal Krishna and
Poudyal, Prakash",
editor = "D. Pawar, Jyoti and
Lalitha Devi, Sobha",
booktitle = "Proceedings of the 20th International Conference on Natural Language Processing (ICON)",
month = dec,
year = "2023",
address = "Goa University, Goa, India",
publisher = "NLP Association of India (NLPAI)",
url = "https://preview.aclanthology.org/fix-sig-urls/2023.icon-1.9/",
pages = "82--89",
abstract = "Fine tuning of the pre-trained language model is a technique which can be used to enhance the technologies of low-resourced languages. The unsupervised approach can fine-tune any pre-trained model with minimum or even no language-specific resources. It is highly advantageous, particularly for languages that possess limited computational resources. We present a novel approach for fine-tuning a pre-trained Automatic Speech Recognition (ASR) model that is suitable for low resource languages. Our methods involves iterative fine-tuning of pre-trained ASR model. mms-1b is selected as the pretrained seed model for fine-tuning. We take the Nepali language as a case study for this research work. Our approach achieved a CER of 6.77{\%}, outperforming all previously recorded CER values for the Nepali ASR Systems."
}
Markdown (Informal)
[Active Learning Approach for Fine-Tuning Pre-Trained ASR Model for a Low-Resourced Language: A Case Study of Nepali](https://preview.aclanthology.org/fix-sig-urls/2023.icon-1.9/) (Ghimire et al., ICON 2023)
ACL