@inproceedings{zhang-etal-2025-attention, title = "Attention Entropy is a Key Factor: An Analysis of Parallel Context Encoding with Full-attention-based Pre-trained Language Models", author = "Zhang, Zhisong and Wang, Yan and Huang, Xinting and Fang, Tianqing and Zhang, Hongming and Deng, Chenlong and Li, Shuaiyi and Yu, Dong", editor = "Che, Wanxiang and Nabende, Joyce and Shutova, Ekaterina and Pilehvar, Mohammad Taher", booktitle = "Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)", month = jul, year = "2025", address = "Vienna, Austria", publisher = "Association for Computational Linguistics", url = "https://preview.aclanthology.org/landing_page/2025.acl-long.485/", pages = "9840--9855", ISBN = "979-8-89176-251-0" }