@inproceedings{leng-etal-2026-token,
title = "Token-Wise Kernels ({TW}i{K}ers) for Vicinity-Aware Attention in Transformers",
author = "Leng, Kuangdai and
Bi, Jia and
Pinilla, Samuel and
Cha, Jaehoon",
editor = "Demberg, Vera and
Inui, Kentaro and
Marquez, Llu{\'i}s",
booktitle = "Findings of the {A}ssociation for {C}omputational {L}inguistics: {EACL} 2026",
month = mar,
year = "2026",
address = "Rabat, Morocco",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-eacl/2026.findings-eacl.306/",
pages = "5819--5835",
ISBN = "979-8-89176-386-9",
abstract = "Self-attention mechanisms in transformers enable tokens to interact across a sequence but lack an explicit inductive bias to capture local contextual dependencies, an inherent characteristic of natural languages. We propose Token-Wise Kernels (TWiKers), a novel enhancement to transformers that learn token-specific convolutional kernels applied to the keys or values. Each token is assigned a small kernel, initialized to the ``Central Dirac'' (e.g., [0,1,0] for size=3), meaning the token ``bears'' the attention from all other tokens alone. During training, these kernels adapt, and greater deviation from the Central Dirac indicates stronger attention redistribution to neighboring tokens. This introduces the first transformer weights with direct semantic interpretability. Our experiments show that content words (e.g., nouns and verbs) retain self-focus, while function words (e.g., prepositions and conjunctions) shift attention toward their neighbors, aligning with their syntactic and semantic roles. We further apply TWiKers to distinguish literary genres, historical periods, and authors, demonstrating their effectiveness in capturing high-level stylistic patterns. Finally, we demonstrate the potential of TWiKers as an effective inductive bias to improve transformer training, validated across a range of downstream tasks."
}Markdown (Informal)
[Token-Wise Kernels (TWiKers) for Vicinity-Aware Attention in Transformers](https://preview.aclanthology.org/ingest-eacl/2026.findings-eacl.306/) (Leng et al., Findings 2026)
ACL