@inproceedings{brantley-etal-2019-non,
title = "Non-Monotonic Sequential Text Generation",
author = "Brantley, Kiante and
Cho, Kyunghyun and
Daum{\'e}, Hal and
Welleck, Sean",
editor = "Axelrod, Amittai and
Yang, Diyi and
Cunha, Rossana and
Shaikh, Samira and
Waseem, Zeerak",
booktitle = "Proceedings of the 2019 Workshop on Widening NLP",
month = aug,
year = "2019",
address = "Florence, Italy",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/W19-3620/",
pages = "57--59",
abstract = "Standard sequential generation methods assume a pre-specified generation order, such as text generation methods which generate words from left to right. In this work, we propose a framework for training models of text generation that operate in non-monotonic orders; the model directly learns good orders, without any additional annotation. Our framework operates by generating a word at an arbitrary position, and then recursively generating words to its left and then words to its right, yielding a binary tree. Learning is framed as imitation learning, including a coaching method which moves from imitating an oracle to reinforcing the policy`s own preferences. Experimental results demonstrate that using the proposed method, it is possible to learn policies which generate text without pre-specifying a generation order while achieving competitive performance with conventional left-to-right generation."
}
Markdown (Informal)
[Non-Monotonic Sequential Text Generation](https://preview.aclanthology.org/jlcl-multiple-ingestion/W19-3620/) (Brantley et al., WiNLP 2019)
ACL
- Kiante Brantley, Kyunghyun Cho, Hal Daumé, and Sean Welleck. 2019. Non-Monotonic Sequential Text Generation. In Proceedings of the 2019 Workshop on Widening NLP, pages 57–59, Florence, Italy. Association for Computational Linguistics.