@inproceedings{pang-etal-2024-uncovering,
title = "Uncovering Limitations of Large Language Models in Information Seeking from Tables",
author = "Pang, Chaoxu and
Cao, Yixuan and
Yang, Chunhao and
Luo, Ping",
editor = "Ku, Lun-Wei and
Martins, Andre and
Srikumar, Vivek",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2024",
month = aug,
year = "2024",
address = "Bangkok, Thailand",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2024.findings-acl.82/",
doi = "10.18653/v1/2024.findings-acl.82",
pages = "1388--1409",
abstract = "Tables are recognized for their high information density and widespread usage, serving as essential sources of information. Seeking information from tables (TIS) is a crucial capability for Large Language Models (LLMs), serving as the foundation of knowledge-based Q{\&}A systems. However, this field presently suffers from an absence of thorough and reliable evaluation. This paper introduces a more reliable benchmark for Table Information Seeking (TabIS). To avoid the unreliable evaluation caused by text similarity-based metrics, TabIS adopts a single-choice question format (with two options per question) instead of a text generation format. We establish an effective pipeline for generating options, ensuring their difficulty and quality. Experiments conducted on 12 LLMs reveal that while the performance of GPT-4-turbo is marginally satisfactory, both other proprietary and open-source models perform inadequately. Further analysis shows that LLMs exhibit a poor understanding of table structures, and struggle to balance between TIS performance and robustness against pseudo-relevant tables (common in retrieval-augmented systems). These findings uncover the limitations and potential challenges of LLMs in seeking information from tables. We release our data and code to facilitate further research in this field."
}
Markdown (Informal)
[Uncovering Limitations of Large Language Models in Information Seeking from Tables](https://preview.aclanthology.org/fix-sig-urls/2024.findings-acl.82/) (Pang et al., Findings 2024)
ACL