@inproceedings{bae-etal-2019-summary,
title = "Summary Level Training of Sentence Rewriting for Abstractive Summarization",
author = "Bae, Sanghwan and
Kim, Taeuk and
Kim, Jihoon and
Lee, Sang-goo",
booktitle = "Proceedings of the 2nd Workshop on New Frontiers in Summarization",
month = nov,
year = "2019",
address = "Hong Kong, China",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/D19-5402",
doi = "10.18653/v1/D19-5402",
pages = "10--20",
abstract = "As an attempt to combine extractive and abstractive summarization, Sentence Rewriting models adopt the strategy of extracting salient sentences from a document first and then paraphrasing the selected ones to generate a summary. However, the existing models in this framework mostly rely on sentence-level rewards or suboptimal labels, causing a mismatch between a training objective and evaluation metric. In this paper, we present a novel training signal that directly maximizes summary-level ROUGE scores through reinforcement learning. In addition, we incorporate BERT into our model, making good use of its ability on natural language understanding. In extensive experiments, we show that a combination of our proposed model and training procedure obtains new state-of-the-art performance on both CNN/Daily Mail and New York Times datasets. We also demonstrate that it generalizes better on DUC-2002 test set.",
}
Markdown (Informal)
[Summary Level Training of Sentence Rewriting for Abstractive Summarization](https://aclanthology.org/D19-5402) (Bae et al., 2019)
ACL