@inproceedings{mikelenic-etal-2025-fine,
title = "Fine-tuning and evaluation of {NMT} models for literary texts using {R}om{C}ro v.2.0",
author = "Mikeleni{\'c}, Bojana and
Oliver, Antoni and
Vidal, Sergi {\`A}lvarez",
editor = "Vanroy, Bram and
Lefer, Marie-Aude and
Macken, Lieve and
Ruffo, Paola and
Arenas, Ana Guerberof and
Hansen, Damien",
booktitle = "Proceedings of the Second Workshop on Creative-text Translation and Technology (CTT)",
month = jun,
year = "2025",
address = "Geneva, Switzerland",
publisher = "European Association for Machine Translation",
url = "https://preview.aclanthology.org/mtsummit-25-ingestion/2025.ctt-1.4/",
pages = "44--51",
ISBN = "978-2-9701897-6-3",
abstract = "This paper explores the fine-tuning and evaluation of neural machine translation (NMT) models for literary texts using RomCro v.2.0, an expanded multilingual and multidirectional parallel corpus. RomCro v.2.0 is based on RomCro v.1.0, but includes additional literary works, as well as texts in Catalan, making it a valuable resource for improving MT in underrepresented language pairs. Given the challenges of literary translation, where style, narrative voice, and cultural nuances must be preserved, fine-tuning on high-quality domain-specific data is essential for enhancing MT performance. We fine-tune existing NMT models with RomCro v.2.0 and evaluate their performance for six different language combinations using automatic metrics and for Spanish-Croatian and French-Catalan using manual evaluation. Results indicate that fine-tuned models outperform general-purpose systems, achieving greater fluency and stylistic coherence. These findings support the effectiveness of corpus-driven fine-tuning for literary translation and highlight the importance of curated high-quality corpus."
}
Markdown (Informal)
[Fine-tuning and evaluation of NMT models for literary texts using RomCro v.2.0](https://preview.aclanthology.org/mtsummit-25-ingestion/2025.ctt-1.4/) (Mikelenić et al., CTT 2025)
ACL