@inproceedings{tamburini-2024-complexifying,
title = "Complexifying {BERT} Using {L}o{RA} Adapters",
author = "Tamburini, Fabio",
editor = "Dell'Orletta, Felice and
Lenci, Alessandro and
Montemagni, Simonetta and
Sprugnoli, Rachele",
booktitle = "Proceedings of the 10th Italian Conference on Computational Linguistics (CLiC-it 2024)",
month = dec,
year = "2024",
address = "Pisa, Italy",
publisher = "CEUR Workshop Proceedings",
url = "https://preview.aclanthology.org/fix-sig-urls/2024.clicit-1.102/",
pages = "948--954",
ISBN = "979-12-210-7060-6",
abstract = "This paper presents the first results of a pilot study for transforming a real-valued pre-trained transformer encoder into a complex-valued one. Following recent findings about pre-training using LoRA, the main idea is to employ complex-valued LoRA adapters to make the trick and continue the pre-training of a given Italian model for setting up the adapters. After pre-training, the proposed complex-valued model has been evaluated on a standardised benchmark for Italian natural-language understanding obtaining very encouraging results."
}
Markdown (Informal)
[Complexifying BERT Using LoRA Adapters](https://preview.aclanthology.org/fix-sig-urls/2024.clicit-1.102/) (Tamburini, CLiC-it 2024)
ACL
- Fabio Tamburini. 2024. Complexifying BERT Using LoRA Adapters. In Proceedings of the 10th Italian Conference on Computational Linguistics (CLiC-it 2024), pages 948–954, Pisa, Italy. CEUR Workshop Proceedings.