@inproceedings{wuhrmann-etal-2025-low,
title = "Low-Perplexity {LLM}-Generated Sequences and Where To Find Them",
author = "Wuhrmann, Arthur and
Kucharavy, Andrei and
Kucherenko, Anastasiia",
editor = "Zhao, Jin and
Wang, Mingyang and
Liu, Zhu",
booktitle = "Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 4: Student Research Workshop)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/landing_page/2025.acl-srw.51/",
pages = "774--783",
ISBN = "979-8-89176-254-1",
abstract = "As Large Language Models (LLMs) become increasingly widespread, understanding how specific training data shapes their outputs is crucial for transparency, accountability, privacy, and fairness. To explore how LLMs leverage and replicate their training data, we introduce a systematic approach centered on analyzing low-perplexity sequences{---}high-probability text spans generated by the model. Our pipeline reliably extracts such long sequences across diverse topics while avoiding degeneration, then traces them back to their sources in the training data. Surprisingly, we find that a substantial portion of these low-perplexity spans cannot be mapped to the corpus. For those that do match, we quantify the distribution of occurrences across source documents, highlighting the scope and nature of verbatim recall and paving a way toward better understanding of how LLMs training data impacts their behavior."
}
Markdown (Informal)
[Low-Perplexity LLM-Generated Sequences and Where To Find Them](https://preview.aclanthology.org/landing_page/2025.acl-srw.51/) (Wuhrmann et al., ACL 2025)
ACL
- Arthur Wuhrmann, Andrei Kucharavy, and Anastasiia Kucherenko. 2025. Low-Perplexity LLM-Generated Sequences and Where To Find Them. In Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 4: Student Research Workshop), pages 774–783, Vienna, Austria. Association for Computational Linguistics.