@inproceedings{liu-etal-2022-length,
title = "Length Control in Abstractive Summarization by Pretraining Information Selection",
author = "Liu, Yizhu and
Jia, Qi and
Zhu, Kenny",
editor = "Muresan, Smaranda and
Nakov, Preslav and
Villavicencio, Aline",
booktitle = "Proceedings of the 60th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = may,
year = "2022",
address = "Dublin, Ireland",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2022.acl-long.474/",
doi = "10.18653/v1/2022.acl-long.474",
pages = "6885--6895",
abstract = "Previous length-controllable summarization models mostly control lengths at the decoding stage, whereas the encoding or the selection of information from the source document is not sensitive to the designed length. They also tend to generate summaries as long as those in the training data. In this paper, we propose a length-aware attention mechanism (LAAM) to adapt the encoding of the source based on the desired length. Our approach works by training LAAM on a summary length balanced dataset built from the original training data, and then fine-tuning as usual. Results show that this approach is effective in generating high-quality summaries with desired lengths and even those short lengths never seen in the original training set."
}
Markdown (Informal)
[Length Control in Abstractive Summarization by Pretraining Information Selection](https://preview.aclanthology.org/fix-sig-urls/2022.acl-long.474/) (Liu et al., ACL 2022)
ACL