@inproceedings{kris-suppa-2025-slovakbabylm,
title = "{S}lovak{B}aby{LM}: Replication of the {B}aby{LM} and Sample-efficient Pretraining for a Low-Resource Language",
author = "Kri{\v{s}}, {\v{L}}ubo{\v{s}} and
Suppa, Marek",
editor = "Charpentier, Lucas and
Choshen, Leshem and
Cotterell, Ryan and
Gul, Mustafa Omer and
Hu, Michael Y. and
Liu, Jing and
Jumelet, Jaap and
Linzen, Tal and
Mueller, Aaron and
Ross, Candace and
Shah, Raj Sanjay and
Warstadt, Alex and
Wilcox, Ethan Gotlieb and
Williams, Adina",
booktitle = "Proceedings of the First BabyLM Workshop",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-emnlp/2025.babylm-main.23/",
pages = "301--312",
ISBN = "TODO",
abstract = "In recent years, we can observe a trend of creating various specific language models (LMs) within the Slavic language family with the Bert architecture. However, with an increasing number of parameters of LM, a larger amount of text is required for good performance, which can hinder the development and creation of LMs for specific languages. Our research is looking for a solution in Curriculum learning(CL) methods that can help us build better models with a lower amount of text in comparison with current LMs, which can help in better prtraining of models with low resource languages(LRL). Therefore, we replicate the BabyLM Challenge in the Slovak language (Dataset: https://huggingface.co/datasets/ubokri/SlovakBabyLM, Code: https://github.com/baucek/Slovakbabylm/tree/main). Additionally, apply CL to test and see the difference in the application of CL methods on the English and Slovak languages and evaluate whether the CL improves performance of LM. Our experiments show that the use of CL methods as preprocessing methods is significant for improving model performance in sentiment analysis and question answering."
}Markdown (Informal)
[SlovakBabyLM: Replication of the BabyLM and Sample-efficient Pretraining for a Low-Resource Language](https://preview.aclanthology.org/ingest-emnlp/2025.babylm-main.23/) (Kriš & Suppa, BabyLM 2025)
ACL