@inproceedings{li-etal-2024-linchance,
title = "{L}in{C}hance-{NTU} for Unconstrained {WMT}2024 Literary Translation",
author = "Li, Kechen and
Tao, Yaotian and
Huang, Hongyi and
Ji, Tianbo",
editor = "Haddow, Barry and
Kocmi, Tom and
Koehn, Philipp and
Monz, Christof",
booktitle = "Proceedings of the Ninth Conference on Machine Translation",
month = nov,
year = "2024",
address = "Miami, Florida, USA",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2024.wmt-1.99/",
doi = "10.18653/v1/2024.wmt-1.99",
pages = "987--992",
abstract = "The rapid growth of deep learning has spurred significant advancements across industries, par- ticularly in machine translation through large language models (LLMs). However, translat- ing literary still presents challenges, including cross-cultural nuances, complex language struc- tures, metaphorical expressions, and cultural differences. To address these issues, this study utilizes the Llama and Phi models using both LoRA and full-parameter techniques, along-side a prompt-based translation system. Full-parameter tuning of the Llama-3-Chinese-8B-Instruct model was unsuccessful due to mem-ory constraints. In terms of the WMT task, the fully fine-tuned Phi 3 model was selected for submission due to its more natural and flu-ent translations. Nonetheless, results showed that LoRA and the prompt-based system sig- nificantly improved the Llama3 model{'}s perfor- mance, surpassing other models in BLEU and ROUGE evaluations."
}
Markdown (Informal)
[LinChance-NTU for Unconstrained WMT2024 Literary Translation](https://preview.aclanthology.org/fix-sig-urls/2024.wmt-1.99/) (Li et al., WMT 2024)
ACL