@inproceedings{fan-etal-2025-towards,
title = "Towards More Efficient Post-training via {F}ourier Domain Adapter Framework",
author = "Fan, Yijia and
Zhang, Jusheng and
Wang, Keze",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.328/",
doi = "10.18653/v1/2025.findings-emnlp.328",
pages = "6175--6193",
ISBN = "979-8-89176-335-7",
abstract = "We introduce Fourier Domain Adapter (FDA), a novel and parameter-efficient framework for fine-tuning large-scale pre-trained language models. FDA reparameterizes the core projection operation of the adapter module directly in the Fourier domain. This involves transforming the input features via discrete Fourier transform (DFT), applying sparse learnable complex modulations in frequency space, and then back-transforming via inverse DFT, supplemented by highly compact auxiliary linear layers. This approach significantly reduces the number of trainable parameters while enhancing the model{'}s ability to capture salient frequency-based semantic information. Comprehensive experiments on GLUE, E2E NLG, and instruction tuning benchmarks show that our FDA consistently outperforms existing parameter-efficient fine-tuning (PEFT) methods. It can achieve better performance with nearly 100x fewer training parameters than traditional fine-tuning methods such as LoRA and AdapterH. Our results demonstrate that FDA is a robust and efficient solution for developing efficient and powerful language models."
}Markdown (Informal)
[Towards More Efficient Post-training via Fourier Domain Adapter Framework](https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.328/) (Fan et al., Findings 2025)
ACL