@inproceedings{que-rong-2025-pic,
title = "{PIC}: Unlocking Long-Form Text Generation Capabilities of Large Language Models via Position {ID} Compression",
author = "Que, Haoran and
Rong, Wenge",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.347/",
pages = "6982--6995",
ISBN = "979-8-89176-251-0",
abstract = "Long-context understanding is crucial for large language models (LLMs) and has become a fundamental capability for most LLMs. However, beyond the focus on ``input-long'', the ability to ``output-long'' is equally significant, yet it remains underexplored. To address this limitation, we propose a simple, efficient, and plug-in approach, Position ID Compression (PIC), to unlock the long-form text generation potential of LLMs. The idea is straightforward: by compressing the position ids of the context, we provoke and guide LLMs to generate coherent and longer output. Specifically, we find that directly reducing the position ids by a fixed ratio significantly impacts the generation quality. To mitigate this, we propose two variants of PIC: NTK-aware PIC and Dynamic PIC. Without additional training, both methods enable LLMs to extend their generation length by approximately 1.5 times without compromising generation quality. Furthermore, by integrating supervised fine-tuning (SFT) with PIC, we propose PIC-SFT, which further improves LLMs' long-form text generation capabilities, achieving top performance on HelloBench and LongBench-Write. Extensive experiments demonstrate the effectiveness of our approach."
}
Markdown (Informal)
[PIC: Unlocking Long-Form Text Generation Capabilities of Large Language Models via Position ID Compression](https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.347/) (Que & Rong, ACL 2025)
ACL