@inproceedings{gkouti-etal-2024-try, title = "Should {I} try multiple optimizers when fine-tuning a pre-trained Transformer for {NLP} tasks? Should {I} tune their hyperparameters?", author = "Gkouti, Nefeli and Malakasiotis, Prodromos and Toumpis, Stavros and Androutsopoulos, Ion", editor = "Graham, Yvette and Purver, Matthew", booktitle = "Proceedings of the 18th Conference of the European Chapter of the Association for Computational Linguistics (Volume 1: Long Papers)", month = mar, year = "2024", address = "St. Julian{'}s, Malta", publisher = "Association for Computational Linguistics", url = "https://preview.aclanthology.org/fix-sig-urls/2024.eacl-long.157/", pages = "2555--2574" }