@inproceedings{gao-etal-2023-small, title = "Small Pre-trained Language Models Can be Fine-tuned as Large Models via Over-Parameterization", author = "Gao, Ze-Feng and Zhou, Kun and Liu, Peiyu and Zhao, Wayne Xin and Wen, Ji-Rong", editor = "Rogers, Anna and Boyd-Graber, Jordan and Okazaki, Naoaki", booktitle = "Proceedings of the 61st Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)", month = jul, year = "2023", address = "Toronto, Canada", publisher = "Association for Computational Linguistics", url = "https://preview.aclanthology.org/fix-sig-urls/2023.acl-long.212/", doi = "10.18653/v1/2023.acl-long.212", pages = "3819--3834" }