@inproceedings{oh-schuler-2023-transformer, title = "Transformer-Based Language Model Surprisal Predicts Human Reading Times Best with About Two Billion Training Tokens", author = "Oh, Byung-Doh and Schuler, William", editor = "Bouamor, Houda and Pino, Juan and Bali, Kalika", booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2023", month = dec, year = "2023", address = "Singapore", publisher = "Association for Computational Linguistics", url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.findings-emnlp.128/", doi = "10.18653/v1/2023.findings-emnlp.128", pages = "1915--1921" }