@inproceedings{nielsen-etal-2025-continual, title = "Continual Quantization-Aware Pre-Training: When to transition from 16-bit to 1.58-bit pre-training for {B}it{N}et language models?", author = "Nielsen, Jacob and Schneider-Kamp, Peter and Galke, Lukas", editor = "Che, Wanxiang and Nabende, Joyce and Shutova, Ekaterina and Pilehvar, Mohammad Taher", booktitle = "Findings of the Association for Computational Linguistics: ACL 2025", month = jul, year = "2025", address = "Vienna, Austria", publisher = "Association for Computational Linguistics", url = "https://preview.aclanthology.org/landing_page/2025.findings-acl.694/", pages = "13483--13493", ISBN = "979-8-89176-256-5" }