@inproceedings{jin-etal-2022-lifelong, title = "Lifelong Pretraining: Continually Adapting Language Models to Emerging Corpora", author = "Jin, Xisen and Zhang, Dejiao and Zhu, Henghui and Xiao, Wei and Li, Shang-Wen and Wei, Xiaokai and Arnold, Andrew and Ren, Xiang", editor = "Fan, Angela and Ilic, Suzana and Wolf, Thomas and Gall{\'e}, Matthias", booktitle = "Proceedings of BigScience Episode {\#}5 -- Workshop on Challenges {\&} Perspectives in Creating Large Language Models", month = may, year = "2022", address = "virtual+Dublin", publisher = "Association for Computational Linguistics", url = "https://preview.aclanthology.org/fix-sig-urls/2022.bigscience-1.1/", doi = "10.18653/v1/2022.bigscience-1.1", pages = "1--16" }