@inproceedings{schmidt-etal-2019-autoregressive,
title = "Autoregressive Text Generation Beyond Feedback Loops",
author = "Schmidt, Florian and
Mandt, Stephan and
Hofmann, Thomas",
editor = "Inui, Kentaro and
Jiang, Jing and
Ng, Vincent and
Wan, Xiaojun",
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP)",
month = nov,
year = "2019",
address = "Hong Kong, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest_wac_2008/D19-1338/",
doi = "10.18653/v1/D19-1338",
pages = "3400--3406",
abstract = "Autoregressive state transitions, where predictions are conditioned on past predictions, are the predominant choice for both deterministic and stochastic sequential models. However, autoregressive feedback exposes the evolution of the hidden state trajectory to potential biases from well-known train-test discrepancies. In this paper, we combine a latent state space model with a CRF observation model. We argue that such autoregressive observation models form an interesting middle ground that expresses local correlations on the word level but keeps the state evolution non-autoregressive. On unconditional sentence generation we show performance improvements compared to RNN and GAN baselines while avoiding some prototypical failure modes of autoregressive models."
}
Markdown (Informal)
[Autoregressive Text Generation Beyond Feedback Loops](https://preview.aclanthology.org/ingest_wac_2008/D19-1338/) (Schmidt et al., EMNLP-IJCNLP 2019)
ACL
- Florian Schmidt, Stephan Mandt, and Thomas Hofmann. 2019. Autoregressive Text Generation Beyond Feedback Loops. In Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP), pages 3400–3406, Hong Kong, China. Association for Computational Linguistics.