@inproceedings{son-etal-2024-prefixing, title = "Prefixing Attention Sinks can Mitigate Activation Outliers for Large Language Model Quantization", author = "Son, Seungwoo and Park, Wonpyo and Han, Woohyun and Kim, Kyuyeun and Lee, Jaeho", editor = "Al-Onaizan, Yaser and Bansal, Mohit and Chen, Yun-Nung", booktitle = "Proceedings of the 2024 Conference on Empirical Methods in Natural Language Processing", month = nov, year = "2024", address = "Miami, Florida, USA", publisher = "Association for Computational Linguistics", url = "https://preview.aclanthology.org/landing_page/2024.emnlp-main.134/", doi = "10.18653/v1/2024.emnlp-main.134", pages = "2242--2252" }