@inproceedings{liu-etal-2023-bolt,
title = "{BOLT}: Fast Energy-based Controlled Text Generation with Tunable Biases",
author = "Liu, Xin and
Khalifa, Muhammad and
Wang, Lu",
editor = "Rogers, Anna and
Boyd-Graber, Jordan and
Okazaki, Naoaki",
booktitle = "Proceedings of the 61st Annual Meeting of the Association for Computational Linguistics (Volume 2: Short Papers)",
month = jul,
year = "2023",
address = "Toronto, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2023.acl-short.18/",
doi = "10.18653/v1/2023.acl-short.18",
pages = "186--200",
abstract = "Energy-based models (EBMs) have gained popularity for controlled text generation due to their high applicability to a wide range of constraints. However, sampling from EBMs is non-trivial, as it often requires a large number of iterations to converge to plausible text, which slows down the decoding process and makes it less practical for real-world applications. In this work, we propose BOLT, which relies on tunable biases to directly adjust the language model{'}s output logits. Unlike prior work, BOLT maintains the generator{'}s autoregressive nature to assert a strong control on token-wise conditional dependencies and overall fluency, and thus converges faster. When compared with state-of-the-arts on controlled generation tasks using both soft constraints (e.g., sentiment control) and hard constraints (e.g., keyword-guided topic control), BOLT demonstrates significantly improved efficiency and fluency. On sentiment control, BOLT is 7x faster than competitive baselines, and more fluent in 74.4{\%} of the evaluation samples according to human judges."
}
Markdown (Informal)
[BOLT: Fast Energy-based Controlled Text Generation with Tunable Biases](https://preview.aclanthology.org/fix-sig-urls/2023.acl-short.18/) (Liu et al., ACL 2023)
ACL