@inproceedings{xu-etal-2025-principled,
title = "Principled Understanding of Generalization for Generative Transformer Models in Arithmetic Reasoning Tasks",
author = "Xu, Xingcheng and
Zhao, Zibo and
Zhang, Haipeng and
Yang, Yanqing",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.235/",
pages = "4721--4747",
ISBN = "979-8-89176-251-0",
abstract = "Transformer-based models excel in various tasks but their generalization capabilities, especially in arithmetic reasoning, remain incompletely understood. Arithmetic tasks provide a controlled framework to explore these capabilities, yet performance anomalies persist, such as inconsistent effectiveness in multiplication and erratic generalization in modular addition (e.g., modulo 100 vs. 101). This paper develops a unified theoretical framework for understanding the generalization behaviors of transformers in arithmetic tasks, focusing on length generalization. Through detailed analysis of addition, multiplication, and modular operations, we reveal that translation invariance in addition aligns with relative positional encoding for robust generalization, while base mismatch in modular operations disrupts this alignment. Experiments across GPT-family models validate our framework, confirming its ability to predict generalization behaviors. Our work highlights the importance of task structure and training data distribution for achieving data-efficient and structure-aware training, providing a systematic approach to understanding of length generalization in transformers."
}
Markdown (Informal)
[Principled Understanding of Generalization for Generative Transformer Models in Arithmetic Reasoning Tasks](https://preview.aclanthology.org/ingestion-acl-25/2025.acl-long.235/) (Xu et al., ACL 2025)
ACL