@inproceedings{ba-etal-2024-fill,
title = "Fill In The Gaps: Model Calibration and Generalization with Synthetic Data",
author = "Ba, Yang and
Mancenido, Michelle V and
Pan, Rong",
editor = "Al-Onaizan, Yaser and
Bansal, Mohit and
Chen, Yun-Nung",
booktitle = "Proceedings of the 2024 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2024",
address = "Miami, Florida, USA",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2024.emnlp-main.955/",
doi = "10.18653/v1/2024.emnlp-main.955",
pages = "17211--17225",
abstract = "As machine learning models continue to swiftly advance, calibrating their performance has become a major concern prior to practical and widespread implementation. Most existing calibration methods often negatively impact model accuracy due to the lack of diversity of validation data, resulting in reduced generalizability. To address this, we propose a calibration method that incorporates synthetic data without compromising accuracy. We derive the expected calibration error (ECE) bound using the Probably Approximately Correct (PAC) learning framework. Large language models (LLMs), known for their ability to mimic real data and generate text with mixed class labels, are utilized as a synthetic data generation strategy to lower the ECE bound and improve model accuracy on real test data. Additionally, we propose data generation mechanisms for efficient calibration. Testing our method on four different natural language processing tasks, we observed an average up to 34{\%} increase in accuracy and 33{\%} decrease in ECE."
}
Markdown (Informal)
[Fill In The Gaps: Model Calibration and Generalization with Synthetic Data](https://preview.aclanthology.org/fix-sig-urls/2024.emnlp-main.955/) (Ba et al., EMNLP 2024)
ACL