@inproceedings{sushko-etal-2025-skipclm,
title = "{S}kip{CLM}: Enhancing Crosslingual Alignment of Decoder Transformer Models via Contrastive Learning and Skip Connection",
author = "Sushko, Nikita and
Panchenko, Alexander and
Tutubalina, Elena",
editor = "Ebrahimi, Abteen and
Haider, Samar and
Liu, Emmy and
Haider, Sammar and
Leonor Pacheco, Maria and
Wein, Shira",
booktitle = "Proceedings of the 2025 Conference of the Nations of the Americas Chapter of the Association for Computational Linguistics: Human Language Technologies (Volume 4: Student Research Workshop)",
month = apr,
year = "2025",
address = "Albuquerque, USA",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2025.naacl-srw.50/",
pages = "517--528",
ISBN = "979-8-89176-192-6",
abstract = "This paper proposes SkipCLM, a novel method for improving multilingual machine translation in Decoder Transformers. We augment contrastive learning for cross-lingual alignment with a trainable skip connection to preserve information crucial for accurate target language generation. Experiments with XGLM-564M on the Flores-101 benchmark demonstrate improved performance, particularly for en-de and en-zh direction translations, compared to direct sequence-to-sequence training and existing contrastive learning methods. Code is available at: https://github.com/s-nlp/skipclm."
}
Markdown (Informal)
[SkipCLM: Enhancing Crosslingual Alignment of Decoder Transformer Models via Contrastive Learning and Skip Connection](https://preview.aclanthology.org/fix-sig-urls/2025.naacl-srw.50/) (Sushko et al., NAACL 2025)
ACL