@inproceedings{zeng-etal-2023-bit,
title = "{BIT}-{ACT}: An {A}ncient {C}hinese Translation System Using Data Augmentation",
author = "Zeng, Li and
Tian, Yanzhi and
Shan, Yingyu and
Guo, Yuhang",
booktitle = "Proceedings of ALT2023: Ancient Language Translation Workshop",
month = sep,
year = "2023",
address = "Macau SAR, China",
publisher = "Asia-Pacific Association for Machine Translation",
url = "https://preview.aclanthology.org/fix-sig-urls/2023.alt-1.6/",
pages = "43--47",
abstract = "This paper describes a translation model for ancient Chinese to modern Chinese and English for the Evahan 2023 competition, a subtask of the Ancient Language Translation 2023 challenge. During the training of our model, we applied various data augmentation techniques and used SiKu-RoBERTa as part of our model architecture. The results indicate that back translation improves the model{'}s performance, but double back translation introduces noise and harms the model{'}s performance. Fine-tuning on the original dataset can be helpful in solving the issue."
}
Markdown (Informal)
[BIT-ACT: An Ancient Chinese Translation System Using Data Augmentation](https://preview.aclanthology.org/fix-sig-urls/2023.alt-1.6/) (Zeng et al., alt 2023)
ACL