@inproceedings{wang-etal-2023-pre-trained,
title = "Pre-trained Model In {A}ncient-{C}hinese-to-{M}odern-{C}hinese Machine Translation",
author = "Wang, Jiahui and
Zhang, Xuqin and
Li, Jiahuan and
Huang, Shujian",
booktitle = "Proceedings of ALT2023: Ancient Language Translation Workshop",
month = sep,
year = "2023",
address = "Macau SAR, China",
publisher = "Asia-Pacific Association for Machine Translation",
url = "https://preview.aclanthology.org/fix-sig-urls/2023.alt-1.3/",
pages = "23--28",
abstract = "This paper presents an analysis of the pre-trained Transformer model Neural Machine Translation (NMT) for the Ancient-Chinese-to-Modern-Chinese machine translation task."
}
Markdown (Informal)
[Pre-trained Model In Ancient-Chinese-to-Modern-Chinese Machine Translation](https://preview.aclanthology.org/fix-sig-urls/2023.alt-1.3/) (Wang et al., alt 2023)
ACL