@inproceedings{mouravieff-etal-2025-structural,
title = "Structural Deep Encoding for Table Question Answering",
author = {Mouravieff, Rapha{\"e}l and
Piwowarski, Benjamin and
Lamprier, Sylvain},
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2025",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/landing_page/2025.findings-acl.121/",
pages = "2389--2402",
ISBN = "979-8-89176-256-5",
abstract = "Although Transformers-based architectures excel at processing textual information, their naive adaptation for tabular data often involves flattening the table structure. This simplification can lead to the loss of essential inter-dependencies between rows, columns, and cells, while also posing scalability challenges for large tables. To address these issues, prior works have explored special tokens, structured embeddings, and sparse attention patterns. In this paper, we conduct a comprehensive analysis of tabular encoding techniques used in QA, which highlights the crucial role of attention sparsity in preserving structural information of tables. We also introduce a set of novel sparse attention mask designs for tabular data, that not only enhance computational efficiency but also preserve structural integrity, leading to better overall performance."
}
Markdown (Informal)
[Structural Deep Encoding for Table Question Answering](https://preview.aclanthology.org/landing_page/2025.findings-acl.121/) (Mouravieff et al., Findings 2025)
ACL
- Raphaël Mouravieff, Benjamin Piwowarski, and Sylvain Lamprier. 2025. Structural Deep Encoding for Table Question Answering. In Findings of the Association for Computational Linguistics: ACL 2025, pages 2389–2402, Vienna, Austria. Association for Computational Linguistics.