@inproceedings{ye-etal-2024-chunkattention,
title = "{C}hunk{A}ttention: Efficient Self-Attention with Prefix-Aware {KV} Cache and Two-Phase Partition",
author = "Ye, Lu and
Tao, Ze and
Huang, Yong and
Li, Yang",
editor = "Ku, Lun-Wei and
Martins, Andre and
Srikumar, Vivek",
booktitle = "Proceedings of the 62nd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = aug,
year = "2024",
address = "Bangkok, Thailand",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Add-Cong-Liu-Florida-Atlantic-University-author-id/2024.acl-long.623/",
doi = "10.18653/v1/2024.acl-long.623",
pages = "11608--11620",
abstract = "Self-attention is an essential component of large language models (LLM) but a significant source of inference latency for long sequences. In multi-tenant LLMs serving scenarios, the compute and memory operation cost of self-attention can be optimized by using the probability that multiple LLM requests have shared system prompts in prefixes. In this paper, we introduce ChunkAttention, a prefix-aware self-attention module that can detect matching prompt prefixes across multiple requests and share their key/value tensors in memory at runtime to improve the memory utilization of KV cache. This is achieved by breaking monolithic key/value tensors into smaller chunks and structuring them into the auxiliary prefix tree. Consequently, on top of the prefix-tree based KV cache, we design an efficient self-attention kernel, where a two-phase partition algorithm is implemented to improve the data locality during self-attention computation in the presence of shared system prompts. Experiments show that ChunkAttention can speed up the self-attention kernel by 3.2-4.8$\times$ compared to the start-of-the-art implementation, with the length of the system prompt ranging from 1024 to 4096."
}
Markdown (Informal)
[ChunkAttention: Efficient Self-Attention with Prefix-Aware KV Cache and Two-Phase Partition](https://preview.aclanthology.org/Add-Cong-Liu-Florida-Atlantic-University-author-id/2024.acl-long.623/) (Ye et al., ACL 2024)
ACL