@inproceedings{accou-poelman-2026-tipa,
title = "{TIPA}: Typologically Informed Parameter Aggregation",
author = "Accou, Stef and
Poelman, Wessel",
editor = "Demberg, Vera and
Inui, Kentaro and
Marquez, Llu{\'i}s",
booktitle = "Findings of the {A}ssociation for {C}omputational {L}inguistics: {EACL} 2026",
month = mar,
year = "2026",
address = "Rabat, Morocco",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-eacl/2026.findings-eacl.119/",
pages = "2253--2267",
ISBN = "979-8-89176-386-9",
abstract = "Massively multilingual language models enable cross-lingual generalization but underperform on low-resource and unseen languages. While adapter-based fine-tuning offers a parameter-efficient solution, training language-specific adapters at scale remains costly. We introduce Typologically Informed Parameter Aggregation (TIPA), a training-free framework that constructs proxy language adapters by aggregating existing ones, weighted by typological similarity. Integrated into the MAD-X architecture, these proxies enable zero-shot cross-lingual transfer without additional training. We evaluate TIPA on five NLP tasks and over 230 languages. TIPA consistently outperforms baselines such as English-only fine-tuning and selecting the typologically closest-language adapter, with the largest gains for languages lacking dedicated adapters. Our results demonstrate that typologically informed aggregation provides a viable alternative to language-specific modules without any training needed."
}Markdown (Informal)
[TIPA: Typologically Informed Parameter Aggregation](https://preview.aclanthology.org/ingest-eacl/2026.findings-eacl.119/) (Accou & Poelman, Findings 2026)
ACL