@inproceedings{seo-etal-2025-mofe,
title = "{M}o{FE}: Mixture of Frozen Experts Architecture",
author = "Seo, Jean and
Kim, Jaeyoon and
Shin, Hyopil",
editor = "Chen, Weizhu and
Yang, Yi and
Kachuee, Mohammad and
Fu, Xue-Yong",
booktitle = "Proceedings of the 2025 Conference of the Nations of the Americas Chapter of the Association for Computational Linguistics: Human Language Technologies (Volume 3: Industry Track)",
month = apr,
year = "2025",
address = "Albuquerque, New Mexico",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2025.naacl-industry.28/",
pages = "340--348",
ISBN = "979-8-89176-194-0",
abstract = "We propose the Mixture of Frozen Experts (MoFE) architecture, which integrates Parameter-efficient Fine-tuning (PEFT) and the Mixture of Experts (MoE) architecture to enhance both training efficiency and model scalability. By freezing the Feed Forward Network (FFN) layers within the MoE framework, MoFE significantly reduces the number of trainable parameters, improving training efficiency while still allowing for effective knowledge transfer from the expert models. This facilitates the creation of models proficient in multiple domains. We conduct experiments to evaluate the trade-offs between performance and efficiency, compare MoFE with other PEFT methodologies, assess the impact of domain expertise in the constituent models, and determine the optimal training strategy. The results show that, although there may be some trade-offs in performance, the efficiency gains are substantial, making MoFE a reasonable solution for real-world, resource-constrained environments."
}
Markdown (Informal)
[MoFE: Mixture of Frozen Experts Architecture](https://preview.aclanthology.org/fix-sig-urls/2025.naacl-industry.28/) (Seo et al., NAACL 2025)
ACL
- Jean Seo, Jaeyoon Kim, and Hyopil Shin. 2025. MoFE: Mixture of Frozen Experts Architecture. In Proceedings of the 2025 Conference of the Nations of the Americas Chapter of the Association for Computational Linguistics: Human Language Technologies (Volume 3: Industry Track), pages 340–348, Albuquerque, New Mexico. Association for Computational Linguistics.