@inproceedings{surkov-etal-2022-data,
title = "Do Data-based Curricula Work?",
author = "Surkov, Maxim and
Mosin, Vladislav and
Yamshchikov, Ivan P.",
editor = "Tafreshi, Shabnam and
Sedoc, Jo{\~a}o and
Rogers, Anna and
Drozd, Aleksandr and
Rumshisky, Anna and
Akula, Arjun",
booktitle = "Proceedings of the Third Workshop on Insights from Negative Results in NLP",
month = may,
year = "2022",
address = "Dublin, Ireland",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Author-page-Marten-During-lu/2022.insights-1.16/",
doi = "10.18653/v1/2022.insights-1.16",
pages = "119--128",
abstract = "Current state-of-the-art NLP systems use large neural networks that require extensive computational resources for training. Inspired by human knowledge acquisition, researchers have proposed curriculum learning - sequencing tasks (task-based curricula) or ordering and sampling the datasets (data-based curricula) that facilitate training. This work investigates the benefits of data-based curriculum learning for large language models such as BERT and T5. We experiment with various curricula based on complexity measures and different sampling strategies. Extensive experiments on several NLP tasks show that curricula based on various complexity measures rarely have any benefits, while random sampling performs either as well or better than curricula."
}
Markdown (Informal)
[Do Data-based Curricula Work?](https://preview.aclanthology.org/Author-page-Marten-During-lu/2022.insights-1.16/) (Surkov et al., insights 2022)
ACL
- Maxim Surkov, Vladislav Mosin, and Ivan P. Yamshchikov. 2022. Do Data-based Curricula Work?. In Proceedings of the Third Workshop on Insights from Negative Results in NLP, pages 119–128, Dublin, Ireland. Association for Computational Linguistics.