@inproceedings{zhu-etal-2025-learning,
title = "Learning {SQL} Like a Human: Structure-Aware Curriculum Learning for Text-to-{SQL} Generation",
author = "Zhu, Xiaohu and
Li, Qian and
Cui, Lizhen and
Du, Yuntao",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/name-variant-enfa-fane/2025.findings-emnlp.190/",
doi = "10.18653/v1/2025.findings-emnlp.190",
pages = "3545--3559",
ISBN = "979-8-89176-335-7",
abstract = "The Text-to-SQL capabilities of large language allow users to interact with databases using natural language. While current models struggle with handling complex queries, especially involving multi-table joins and reasoning. To address this gap, we propose to construct a model, namely SAC-SQL, with synthetic training samples followed by a structure-aware curriculum learning framework for enhancing SQL generation. Our approach begins with a supervised fine-tuning (SFT) stage, where we train open-source models on a synthetically constructed, cross-domain SQL dataset with diverse structural patterns. Moreover, we introduce a unified structure difficulty scoring function to partition the training samples into non-overlapping curriculum phases, guiding the model progressively learning from simpler to more complex SQL structures. Extensive experiments are conducted and the results show that SAC-SQL achieves better results than the baselines, and significantly narrows the performance gap between open-source and close-source models on Spider and Bird benchmarks."
}Markdown (Informal)
[Learning SQL Like a Human: Structure-Aware Curriculum Learning for Text-to-SQL Generation](https://preview.aclanthology.org/name-variant-enfa-fane/2025.findings-emnlp.190/) (Zhu et al., Findings 2025)
ACL