@inproceedings{wolfson-etal-2022-weakly,
title = "Weakly Supervised Text-to-{SQL} Parsing through Question Decomposition",
author = "Wolfson, Tomer and
Deutch, Daniel and
Berant, Jonathan",
editor = "Carpuat, Marine and
de Marneffe, Marie-Catherine and
Meza Ruiz, Ivan Vladimir",
booktitle = "Findings of the Association for Computational Linguistics: NAACL 2022",
month = jul,
year = "2022",
address = "Seattle, United States",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2022.findings-naacl.193/",
doi = "10.18653/v1/2022.findings-naacl.193",
pages = "2528--2542",
abstract = "Text-to-SQL parsers are crucial in enabling non-experts to effortlessly query relational data. Training such parsers, by contrast, generally requires expertise in annotating natural language (NL) utterances with corresponding SQL queries. In this work, we propose a weak supervision approach for training text-to-SQL parsers. We take advantage of the recently proposed question meaning representation called QDMR, an intermediate between NL and formal query languages. Given questions, their QDMR structures (annotated by non-experts or automatically predicted), and the answers, we are able to automatically synthesize SQL queries that are used to train text-to-SQL models. We test our approach by experimenting on five benchmark datasets. Our results show that the weakly supervised models perform competitively with those trained on annotated NL-SQL data. Overall, we effectively train text-to-SQL parsers, while using zero SQL annotations."
}
Markdown (Informal)
[Weakly Supervised Text-to-SQL Parsing through Question Decomposition](https://preview.aclanthology.org/fix-sig-urls/2022.findings-naacl.193/) (Wolfson et al., Findings 2022)
ACL