@inproceedings{liu-etal-2022-learning,
title = "Learning Non-Autoregressive Models from Search for Unsupervised Sentence Summarization",
author = "Liu, Puyuan and
Huang, Chenyang and
Mou, Lili",
editor = "Muresan, Smaranda and
Nakov, Preslav and
Villavicencio, Aline",
booktitle = "Proceedings of the 60th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = may,
year = "2022",
address = "Dublin, Ireland",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2022.acl-long.545/",
doi = "10.18653/v1/2022.acl-long.545",
pages = "7916--7929",
abstract = "Text summarization aims to generate a short summary for an input text. In this work, we propose a Non-Autoregressive Unsupervised Summarization (NAUS) approach, which does not require parallel data for training. Our NAUS first performs edit-based search towards a heuristically defined score, and generates a summary as pseudo-groundtruth. Then, we train an encoder-only non-autoregressive Transformer based on the search result. We also propose a dynamic programming approach for length-control decoding, which is important for the summarization task. Experiments on two datasets show that NAUS achieves state-of-the-art performance for unsupervised summarization, yet largely improving inference efficiency. Further, our algorithm is able to perform explicit length-transfer summary generation."
}
Markdown (Informal)
[Learning Non-Autoregressive Models from Search for Unsupervised Sentence Summarization](https://preview.aclanthology.org/fix-sig-urls/2022.acl-long.545/) (Liu et al., ACL 2022)
ACL