@inproceedings{hao-etal-2025-genlink,
title = "{G}en{L}ink: Generation-Driven Schema-Linking via Multi-Model Learning for Text-to-{SQL}",
author = "Hao, Zhifeng and
Huang, Junqi and
Shi, Shaobin and
Cai, Ruichu and
Xu, Boyan",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Proceedings of the 2025 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-emnlp/2025.emnlp-main.1518/",
pages = "29880--29893",
ISBN = "979-8-89176-332-6",
abstract = "Schema linking is widely recognized as a key factor in improving text-to-SQL performance. Supervised fine-tuning approaches enhance SQL generation quality by explicitly fine-tuning schema linking as an extraction task. However, they suffer from two major limitations: (i) The training corpus of small language models restricts their cross-domain generalization ability. (ii) The extraction-based fine-tuning process struggles to capture complex linking patterns. To address these issues, we propose \textbf{GenLink}, a generation-driven schema-linking framework based on multi-model learning. Instead of explicitly extracting schema elements, GenLink enhances linking through a generation-based learning process, effectively capturing implicit schema relationships. By integrating multiple small language models, GenLink improves schema-linking recall rate and ensures robust cross-domain adaptability. Experimental results on the BIRD and Spider benchmarks validate the effectiveness of GenLink, achieving execution accuracies of 67.34{\%} (BIRD), 89.7{\%} (Spider development set), and 87.8{\%} (Spider test set), demonstrating its superiority in handling diverse and complex database schemas."
}Markdown (Informal)
[GenLink: Generation-Driven Schema-Linking via Multi-Model Learning for Text-to-SQL](https://preview.aclanthology.org/ingest-emnlp/2025.emnlp-main.1518/) (Hao et al., EMNLP 2025)
ACL