@inproceedings{ha-etal-2026-rb,
title = "{RB}-{L}o{RA}: Rank-Balanced Aggregation for Low-Rank Adaptation with Federated Fine-Tuning",
author = "Ha, Sihyeon and
Oh, Yongjeong and
Jeon, Yo-Seb",
editor = "Demberg, Vera and
Inui, Kentaro and
Marquez, Llu{\'i}s",
booktitle = "Findings of the {A}ssociation for {C}omputational {L}inguistics: {EACL} 2026",
month = mar,
year = "2026",
address = "Rabat, Morocco",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-ccl/2026.findings-eacl.88/",
pages = "1737--1746",
ISBN = "979-8-89176-386-9",
abstract = "Federated fine-tuning of foundation models is impeded by the need to communicate billions of parameters. Low-rank adaptation (LoRA) alleviates this by updating only compact adapter matrices. However, varying client device capabilities lead to different adapter ranks, causing rank heterogeneity that undermines aggregation, and existing reconciliation methods still incur bias or inefficiency. To address this challenge, we propose RB-LoRA, a principled rank-balanced aggregation framework that decomposes each update into rank-wise components and aligns them using analytically derived weights. Experiments on both language and vision models demonstrate consistent improvements under one and three rounds of communication in federated learning."
}Markdown (Informal)
[RB-LoRA: Rank-Balanced Aggregation for Low-Rank Adaptation with Federated Fine-Tuning](https://preview.aclanthology.org/ingest-ccl/2026.findings-eacl.88/) (Ha et al., Findings 2026)
ACL