@inproceedings{li-etal-2023-kyb,
title = "{KYB} General Machine Translation Systems for {WMT}23",
author = "Li, Ben and
Matsuzaki, Yoko and
Kalkar, Shivam",
editor = "Koehn, Philipp and
Haddow, Barry and
Kocmi, Tom and
Monz, Christof",
booktitle = "Proceedings of the Eighth Conference on Machine Translation",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.wmt-1.10/",
doi = "10.18653/v1/2023.wmt-1.10",
pages = "137--142",
abstract = "This paper describes our approach to constructing a neural machine translation system for the WMT 2023 general machine translation shared task. Our model is based on the Transformer architecture`s base settings. We optimize system performance through various strategies. Enhancing our model`s capabilities involves fine-tuning the pretrained model with an extended dataset. To further elevate translation quality, specialized pre- and post-processing techniques are deployed. Our central focus is on efficient model training, aiming for exceptional accuracy through the synergy of a compact model and curated data. We also performed ensembling augmented by N-best ranking, for both directions of English to Japanese and Japanese to English translation."
}
Markdown (Informal)
[KYB General Machine Translation Systems for WMT23](https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.wmt-1.10/) (Li et al., WMT 2023)
ACL