@inproceedings{jiao-etal-2025-spe,
title = "{SPE} Attention: Making Attention Equivariant to Semantic-Preserving Permutation for Code Processing",
author = "Jiao, Chengyu and
Chen, Shuhao and
Zhang, Yu",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Proceedings of the 2025 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/name-variant-enfa-fane/2025.emnlp-main.332/",
doi = "10.18653/v1/2025.emnlp-main.332",
pages = "6566--6579",
ISBN = "979-8-89176-332-6",
abstract = "Codes serve as the fundamental language for human to communicate with machines, and various Transformer-based models are trained to process codes in recent advancements. A unique symmetry of code is its semantic-preserving permutation, which allows certain lines to be rearranged without altering the overall meaning. To capture such symmetry, we propose a novel attention mechanism that incorporates semantic-preserving permutation equivariance, called the SPE attention. By leveraging the symmetry relationships within code, we introduce a directed layered graph to represent the code structure, and this graph is then summarized into a symmetry mask. The SPE attention integrates those symmetry masks, granting semantic-preserving permutations equivariance to the model. Experiments on various code related tasks, including code summarization and error detection, demonstrate the effectiveness of the proposed SPE attention."
}Markdown (Informal)
[SPE Attention: Making Attention Equivariant to Semantic-Preserving Permutation for Code Processing](https://preview.aclanthology.org/name-variant-enfa-fane/2025.emnlp-main.332/) (Jiao et al., EMNLP 2025)
ACL