@inproceedings{urlana-etal-2022-ltrc,
title = "{LTRC} @{M}u{P} 2022: Multi-Perspective Scientific Document Summarization Using Pre-trained Generation Models",
author = "Urlana, Ashok and
Surange, Nirmal and
Shrivastava, Manish",
editor = "Cohan, Arman and
Feigenblat, Guy and
Freitag, Dayne and
Ghosal, Tirthankar and
Herrmannova, Drahomira and
Knoth, Petr and
Lo, Kyle and
Mayr, Philipp and
Shmueli-Scheuer, Michal and
de Waard, Anita and
Wang, Lucy Lu",
booktitle = "Proceedings of the Third Workshop on Scholarly Document Processing",
month = oct,
year = "2022",
address = "Gyeongju, Republic of Korea",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2022.sdp-1.35/",
pages = "279--284",
abstract = "The MuP-2022 shared task focuses on multiperspective scientific document summarization. Given a scientific document, with multiple reference summaries, our goal was to develop a model that can produce a generic summary covering as many aspects of the document as covered by all of its reference summaries. This paper describes our best official model, a finetuned BART-large, along with a discussion on the challenges of this task and some of our unofficial models including SOTA generation models. Our submitted model out performedthe given, MuP 2022 shared task, baselines on ROUGE-2, ROUGE-L and average ROUGE F1-scores. Code of our submission can be ac- cessed here."
}
Markdown (Informal)
[LTRC @MuP 2022: Multi-Perspective Scientific Document Summarization Using Pre-trained Generation Models](https://preview.aclanthology.org/add-emnlp-2024-awards/2022.sdp-1.35/) (Urlana et al., sdp 2022)
ACL