@inproceedings{edman-fraser-2025-mask,
title = "Mask and You Shall Receive: Optimizing Masked Language Modeling For Pretraining {B}aby{LM}s",
author = "Edman, Lukas and
Fraser, Alexander",
editor = "Charpentier, Lucas and
Choshen, Leshem and
Cotterell, Ryan and
Gul, Mustafa Omer and
Hu, Michael Y. and
Liu, Jing and
Jumelet, Jaap and
Linzen, Tal and
Mueller, Aaron and
Ross, Candace and
Shah, Raj Sanjay and
Warstadt, Alex and
Wilcox, Ethan Gotlieb and
Williams, Adina",
booktitle = "Proceedings of the First BabyLM Workshop",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-emnlp/2025.babylm-main.31/",
pages = "445--453",
ISBN = "TODO",
abstract = "We describe our strategy for the 2025 edition of the BabyLM Challenge. Our main contribution is that of an improved form of Masked Language Modeling (MLM), which adapts the probabilities of the tokens masked according to the model{'}s ability to predict them. The results show a substantial increase in performance on (Super)GLUE tasks over the standard MLM. We also incorporate sub-token embeddings, finding that this increases the model{'}s morphological generalization capabilities. Our submission beats the baseline in the strict-small track."
}Markdown (Informal)
[Mask and You Shall Receive: Optimizing Masked Language Modeling For Pretraining BabyLMs](https://preview.aclanthology.org/ingest-emnlp/2025.babylm-main.31/) (Edman & Fraser, BabyLM 2025)
ACL