@inproceedings{hsia-2025-cubicpower,
title = "Cubicpower Agentic Mixture of Experts({AM}o{E}) Framework for Fine-Tuning {NLP} Tasks Without {GPU}s",
author = "Hsia, Chao-Yih",
editor = "Chang, Kai-Wei and
Lu, Ke-Han and
Yang, Chih-Kai and
Tam, Zhi-Rui and
Chang, Wen-Yu and
Wang, Chung-Che",
booktitle = "Proceedings of the 37th Conference on Computational Linguistics and Speech Processing (ROCLING 2025)",
month = nov,
year = "2025",
address = "National Taiwan University, Taipei City, Taiwan",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/dashboard/2025.rocling-main.2/",
pages = "11--19",
ISBN = "979-8-89176-379-1",
abstract = "The rise of Green AI emphasizes minimizing the environmental footprint of AI systems. This paper explores a no-GPU agentic architecture for fine-tuning NLP tasks. It presents our initial experiments applying these no-GPU algorithms in pretraining and fine-tuning tasks on our CubicPower agentic mixture of experts (AMoE) framework, with the aim of contributing to more sustainable AI development. In contrast to the training procedures of neural networks, which consume significant power, the AMoE framework{'}s primary contribution toward power savings is that it requires no training process. We explore non-neural-network methods for solving NLP tasks and employ similarity measures to match predefined patterns for use in a RAG database."
}Markdown (Informal)
[Cubicpower Agentic Mixture of Experts(AMoE) Framework for Fine-Tuning NLP Tasks Without GPUs](https://preview.aclanthology.org/dashboard/2025.rocling-main.2/) (Hsia, ROCLING 2025)
ACL