@inproceedings{yang-etal-2022-re3,
title = "Re3: Generating Longer Stories With Recursive Reprompting and Revision",
author = "Yang, Kevin and
Tian, Yuandong and
Peng, Nanyun and
Klein, Dan",
editor = "Goldberg, Yoav and
Kozareva, Zornitsa and
Zhang, Yue",
booktitle = "Proceedings of the 2022 Conference on Empirical Methods in Natural Language Processing",
month = dec,
year = "2022",
address = "Abu Dhabi, United Arab Emirates",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2022.emnlp-main.296/",
doi = "10.18653/v1/2022.emnlp-main.296",
pages = "4393--4479",
abstract = "We consider the problem of automatically generating longer stories of over two thousand words. Compared to prior work on shorter stories, long-range plot coherence and relevance are more central challenges here. We propose the Recursive Reprompting and Revision framework (Re3) to address these challenges by (a) prompting a general-purpose language model to construct a structured overarching plan, and (b) generating story passages by repeatedly injecting contextual information from both the plan and current story state into a language model prompt. We then revise by (c) reranking different continuations for plot coherence and premise relevance, and finally (d) editing the best continuation for factual consistency. Compared to similar-length stories generated directly from the same base model, human evaluators judged substantially more of Re3{'}s stories as having a coherent overarching plot (by 14{\%} absolute increase), and relevant to the given initial premise (by 20{\%})."
}
Markdown (Informal)
[Re3: Generating Longer Stories With Recursive Reprompting and Revision](https://preview.aclanthology.org/fix-sig-urls/2022.emnlp-main.296/) (Yang et al., EMNLP 2022)
ACL