@inproceedings{goodwin-etal-2020-towards,
title = "Towards {Z}ero-{S}hot {C}onditional {S}ummarization with {A}daptive {M}ulti-{T}ask {F}ine-{T}uning",
author = "Goodwin, Travis and
Savery, Max and
Demner-Fushman, Dina",
editor = "Cohn, Trevor and
He, Yulan and
Liu, Yang",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2020",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Ingest-2025-COMPUTEL/2020.findings-emnlp.289/",
doi = "10.18653/v1/2020.findings-emnlp.289",
pages = "3215--3226",
abstract = "Automatic summarization research has traditionally focused on providing high quality general-purpose summaries of documents. However, there are many applications which require more specific summaries, such as supporting question answering or topic-based literature discovery. In this paper we study the problem of conditional summarization in which content selection and surface realization are explicitly conditioned on an ad-hoc natural language question or topic description. Because of the difficulty in obtaining sufficient reference summaries to support arbitrary conditional summarization, we explore the use of multi-task fine-tuning (MTFT) on twenty-one natural language tasks to enable zero-shot conditional summarization on five tasks. We present four new summarization datasets, two novel {\textquotedblleft}online{\textquotedblright} or adaptive task-mixing strategies, and report zero-shot performance using T5 and BART, demonstrating that MTFT can improve zero-shot summarization quality."
}
Markdown (Informal)
[Towards Zero-Shot Conditional Summarization with Adaptive Multi-Task Fine-Tuning](https://preview.aclanthology.org/Ingest-2025-COMPUTEL/2020.findings-emnlp.289/) (Goodwin et al., Findings 2020)
ACL