@inproceedings{zhou-etal-2022-learning-decompose,
title = "Learning to Decompose: Hypothetical Question Decomposition Based on Comparable Texts",
author = "Zhou, Ben and
Richardson, Kyle and
Yu, Xiaodong and
Roth, Dan",
editor = "Goldberg, Yoav and
Kozareva, Zornitsa and
Zhang, Yue",
booktitle = "Proceedings of the 2022 Conference on Empirical Methods in Natural Language Processing",
month = dec,
year = "2022",
address = "Abu Dhabi, United Arab Emirates",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2022.emnlp-main.142/",
doi = "10.18653/v1/2022.emnlp-main.142",
pages = "2223--2235",
abstract = "Explicit decomposition modeling, which involves breaking down complex tasks into more straightforward and often more interpretable sub-tasks, has long been a central theme in developing robust and interpretable NLU systems. However, despite the many datasets and resources built as part of this effort, the majority have small-scale annotations and limited scope, which is insufficient to solve general decomposition tasks. In this paper, we look at large-scale intermediate pre-training of decomposition-based transformers using distant supervision from comparable texts, particularly large-scale parallel news. We show that with such intermediate pre-training, developing robust decomposition-based models for a diverse range of tasks becomes more feasible. For example, on semantic parsing, our model, DecompT5, improves 20{\%} to 30{\%} on two datasets, Overnight and TORQUE, over the baseline language model. We further use DecompT5 to build a novel decomposition-based QA system named DecompEntail, improving over state-of-the-art models, including GPT-3, on both HotpotQA and StrategyQA by 8{\%} and 4{\%}, respectively."
}
Markdown (Informal)
[Learning to Decompose: Hypothetical Question Decomposition Based on Comparable Texts](https://preview.aclanthology.org/add-emnlp-2024-awards/2022.emnlp-main.142/) (Zhou et al., EMNLP 2022)
ACL