@inproceedings{byun-lee-2025-towards,
title = "Towards Federated Low-Rank Adaptation of Language Models with Rank Heterogeneity",
author = "Byun, Yuji and
Lee, Jaeho",
editor = "Chiruzzo, Luis and
Ritter, Alan and
Wang, Lu",
booktitle = "Proceedings of the 2025 Conference of the Nations of the Americas Chapter of the Association for Computational Linguistics: Human Language Technologies (Volume 2: Short Papers)",
month = apr,
year = "2025",
address = "Albuquerque, New Mexico",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2025.naacl-short.30/",
pages = "356--362",
ISBN = "979-8-89176-190-2",
abstract = "Low-rank adaptation (LoRA) offers an efficient alternative to full-weight adaptation in federated fine-tuning of language models, significantly reducing computational costs. By adjusting ranks for each client, federated LoRA enables flexible resource allocation. However, we observe that heterogeneous ranks among clients lead to unstable performance. Our analysis attributes this instability to the conventional zero-padding aggregation strategy, which dilutes information from high-rank clients during model aggregation. To address this issue, we propose a replication-based padding strategy that better retains valuable information from clients with high-quality data. Empirically, this approach accelerates convergence and enhances the global model{'}s predictive performance."
}
Markdown (Informal)
[Towards Federated Low-Rank Adaptation of Language Models with Rank Heterogeneity](https://preview.aclanthology.org/fix-sig-urls/2025.naacl-short.30/) (Byun & Lee, NAACL 2025)
ACL