@inproceedings{yuan-etal-2020-fact,
title = "Fact-level Extractive Summarization with Hierarchical Graph Mask on {BERT}",
author = "Yuan, Ruifeng and
Wang, Zili and
Li, Wenjie",
editor = "Scott, Donia and
Bel, Nuria and
Zong, Chengqing",
booktitle = "Proceedings of the 28th International Conference on Computational Linguistics",
month = dec,
year = "2020",
address = "Barcelona, Spain (Online)",
publisher = "International Committee on Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.coling-main.493/",
doi = "10.18653/v1/2020.coling-main.493",
pages = "5629--5639",
abstract = "Most current extractive summarization models generate summaries by selecting salient sentences. However, one of the problems with sentence-level extractive summarization is that there exists a gap between the human-written gold summary and the oracle sentence labels. In this paper, we propose to extract fact-level semantic units for better extractive summarization. We also introduce a hierarchical structure, which incorporates the multi-level of granularities of the textual information into the model. In addition, we incorporate our model with BERT using a hierarchical graph mask. This allows us to combine BERT`s ability in natural language understanding and the structural information without increasing the scale of the model. Experiments on the CNN/DaliyMail dataset show that our model achieves state-of-the-art results."
}
Markdown (Informal)
[Fact-level Extractive Summarization with Hierarchical Graph Mask on BERT](https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.coling-main.493/) (Yuan et al., COLING 2020)
ACL