@inproceedings{kunitomo-jacquin-etal-2025-role,
title = "On the Role of Unobserved Sequences on Sample-based Uncertainty Quantification for {LLM}s",
author = "Kunitomo-Jacquin, Lucie and
Marrese-Taylor, Edison and
Fukuda, Ken",
editor = "Noidea, Noidea",
booktitle = "Proceedings of the 2nd Workshop on Uncertainty-Aware NLP (UncertaiNLP 2025)",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-emnlp/2025.uncertainlp-main.15/",
pages = "179--183",
ISBN = "979-8-89176-349-4",
abstract = "Quantifying uncertainty in large language models (LLMs) is important for safety-critical applications because it helps spot incorrect answers, known as hallucinations. One major trend of uncertainty quantification methods is based on estimating the entropy of the distribution of the LLM{'}s potential output sequences. This estimation is based on a set of output sequences and associated probabilities obtained by querying the LLM several times. In this paper, we advocate and experimentally and show that the probability of unobserved sequences plays a crucial role, and we recommend future research to integrate it to enhance such LLM uncertainty quantification methods."
}Markdown (Informal)
[On the Role of Unobserved Sequences on Sample-based Uncertainty Quantification for LLMs](https://preview.aclanthology.org/ingest-emnlp/2025.uncertainlp-main.15/) (Kunitomo-Jacquin et al., UncertaiNLP 2025)
ACL