@inproceedings{zhao-etal-2023-dutnlp,
title = "{DUTNLP} System for the {WMT}2023 Discourse-Level Literary Translation",
author = "Zhao, Anqi and
Huang, Kaiyu and
Yu, Hao and
Huang, Degen",
editor = "Koehn, Philipp and
Haddow, Barry and
Kocmi, Tom and
Monz, Christof",
booktitle = "Proceedings of the Eighth Conference on Machine Translation",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Add-Cong-Liu-Florida-Atlantic-University-author-id/2023.wmt-1.31/",
doi = "10.18653/v1/2023.wmt-1.31",
pages = "296--301",
abstract = "This paper describes the submission of DUTNLP Lab submission to WMT23 Discourse-Level Literary Translation in the Chinese to English translation direction under unconstrained conditions. Our primary system aims to leverage a large language model with various prompt strategies, which can fully investigate the potential capabilities of large language models for discourse-level neural machine translation. Moreover, we test a widely used discourse-level machine translation model, G-transformer, with different training strategies. In our experimental results, the method with large language models achieves a BLEU score of 28.16, while the fine-tuned method scores 25.26. These findings indicate that selecting appropriate prompt strategies based on large language models can significantly improve translation performance compared to traditional model training methods."
}
Markdown (Informal)
[DUTNLP System for the WMT2023 Discourse-Level Literary Translation](https://preview.aclanthology.org/Add-Cong-Liu-Florida-Atlantic-University-author-id/2023.wmt-1.31/) (Zhao et al., WMT 2023)
ACL