@inproceedings{chen-etal-2024-improving-nmt,
title = "Improving {NMT} from a Low-Resource Source Language: A Use Case from {C}atalan to {C}hinese via {S}panish",
author = "Chen, Yongjian and
Toral, Antonio and
Li, Zhijian and
Farr{\'u}s, Mireia",
editor = "Scarton, Carolina and
Prescott, Charlotte and
Bayliss, Chris and
Oakley, Chris and
Wright, Joanna and
Wrigley, Stuart and
Song, Xingyi and
Gow-Smith, Edward and
Bawden, Rachel and
S{\'a}nchez-Cartagena, V{\'i}ctor M and
Cadwell, Patrick and
Lapshinova-Koltunski, Ekaterina and
Cabarr{\~a}o, Vera and
Chatzitheodorou, Konstantinos and
Nurminen, Mary and
Kanojia, Diptesh and
Moniz, Helena",
booktitle = "Proceedings of the 25th Annual Conference of the European Association for Machine Translation (Volume 1)",
month = jun,
year = "2024",
address = "Sheffield, UK",
publisher = "European Association for Machine Translation (EAMT)",
url = "https://preview.aclanthology.org/fix-sig-urls/2024.eamt-1.20/",
pages = "229--245",
abstract = "The effectiveness of neural machine translation is markedly constrained in low-resource scenarios, where the scarcity of parallel data hampers the development of robust models. This paper focuses on the scenario where the source language is low-resourceand there exists a related high-resource language, for which we introduce a novel approach that combines pivot translation and multilingual training. As a use case we tackle the automatic translation from Catalan to Chinese, using Spanish as an additional language. Our evaluation, conducted on the FLORES-200 benchmark, compares our new approach against a vanilla baseline alongside other models representing various low-resource techniques in the Catalan-to-Chinese context. Experimental results highlight the efficacy of our proposed method, which outperforms existing models, notably demonstrating significant improvements both in translation quality and in lexical diversity."
}
Markdown (Informal)
[Improving NMT from a Low-Resource Source Language: A Use Case from Catalan to Chinese via Spanish](https://preview.aclanthology.org/fix-sig-urls/2024.eamt-1.20/) (Chen et al., EAMT 2024)
ACL