@inproceedings{ximing-ruifang-2021-jointly,
title = "Jointly Learning Salience and Redundancy by Adaptive Sentence Reranking for Extractive Summarization",
author = "Ximing, Zhang and
Ruifang, Liu",
booktitle = "Proceedings of the 20th Chinese National Conference on Computational Linguistics",
month = aug,
year = "2021",
address = "Huhhot, China",
publisher = "Chinese Information Processing Society of China",
url = "https://aclanthology.org/2021.ccl-1.85",
pages = "952--963",
abstract = "{``}Extractive text summarization seeks to extract indicative sentences from a source document andassemble them to form a summary. Selecting salient but not redundant sentences has alwaysbeen the main challenge. Unlike the previous two-stage strategies this paper presents a unifiedend-to-end model learning to rerank the sentences by modeling salience and redundancy simul-taneously. Through this ranking mechanism our method can improve the quality of the overall candidate summary by giving higher scores to sentences that can bring more novel informa-tion. We first design a summary-level measure to evaluate the cumulating gain of each candidate summaries. Then we propose an adaptive training objective to rerank the sentences aiming atobtaining a summary with a high summary-level score. The experimental results and evalua-tion show that our method outperforms the strong baselines on three datasets and further booststhe quality of candidate summaries which intensely indicate the effectiveness of the proposed framework.{''}",
language = "English",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="ximing-ruifang-2021-jointly">
<titleInfo>
<title>Jointly Learning Salience and Redundancy by Adaptive Sentence Reranking for Extractive Summarization</title>
</titleInfo>
<name type="personal">
<namePart type="given">Zhang</namePart>
<namePart type="family">Ximing</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Liu</namePart>
<namePart type="family">Ruifang</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-aug</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<language>
<languageTerm type="text">English</languageTerm>
<languageTerm type="code" authority="iso639-2b">eng</languageTerm>
</language>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 20th Chinese National Conference on Computational Linguistics</title>
</titleInfo>
<originInfo>
<publisher>Chinese Information Processing Society of China</publisher>
<place>
<placeTerm type="text">Huhhot, China</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>“Extractive text summarization seeks to extract indicative sentences from a source document andassemble them to form a summary. Selecting salient but not redundant sentences has alwaysbeen the main challenge. Unlike the previous two-stage strategies this paper presents a unifiedend-to-end model learning to rerank the sentences by modeling salience and redundancy simul-taneously. Through this ranking mechanism our method can improve the quality of the overall candidate summary by giving higher scores to sentences that can bring more novel informa-tion. We first design a summary-level measure to evaluate the cumulating gain of each candidate summaries. Then we propose an adaptive training objective to rerank the sentences aiming atobtaining a summary with a high summary-level score. The experimental results and evalua-tion show that our method outperforms the strong baselines on three datasets and further booststhe quality of candidate summaries which intensely indicate the effectiveness of the proposed framework.”</abstract>
<identifier type="citekey">ximing-ruifang-2021-jointly</identifier>
<location>
<url>https://aclanthology.org/2021.ccl-1.85</url>
</location>
<part>
<date>2021-aug</date>
<extent unit="page">
<start>952</start>
<end>963</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Jointly Learning Salience and Redundancy by Adaptive Sentence Reranking for Extractive Summarization
%A Ximing, Zhang
%A Ruifang, Liu
%S Proceedings of the 20th Chinese National Conference on Computational Linguistics
%D 2021
%8 aug
%I Chinese Information Processing Society of China
%C Huhhot, China
%G English
%F ximing-ruifang-2021-jointly
%X “Extractive text summarization seeks to extract indicative sentences from a source document andassemble them to form a summary. Selecting salient but not redundant sentences has alwaysbeen the main challenge. Unlike the previous two-stage strategies this paper presents a unifiedend-to-end model learning to rerank the sentences by modeling salience and redundancy simul-taneously. Through this ranking mechanism our method can improve the quality of the overall candidate summary by giving higher scores to sentences that can bring more novel informa-tion. We first design a summary-level measure to evaluate the cumulating gain of each candidate summaries. Then we propose an adaptive training objective to rerank the sentences aiming atobtaining a summary with a high summary-level score. The experimental results and evalua-tion show that our method outperforms the strong baselines on three datasets and further booststhe quality of candidate summaries which intensely indicate the effectiveness of the proposed framework.”
%U https://aclanthology.org/2021.ccl-1.85
%P 952-963
Markdown (Informal)
[Jointly Learning Salience and Redundancy by Adaptive Sentence Reranking for Extractive Summarization](https://aclanthology.org/2021.ccl-1.85) (Ximing & Ruifang, CCL 2021)
ACL