@inproceedings{menglong-yanliang-2024-simclnmt,
title = "{S}im{CLNMT}: A Simple Contrastive Learning Method for Enhancing Neural Machine Translation Quality",
author = "Menglong, Xu and
Yanliang, Zhang",
editor = "Sun, Maosong and
Liang, Jiye and
Han, Xianpei and
Liu, Zhiyuan and
He, Yulan",
booktitle = "Proceedings of the 23rd Chinese National Conference on Computational Linguistics (Volume 1: Main Conference)",
month = jul,
year = "2024",
address = "Taiyuan, China",
publisher = "Chinese Information Processing Society of China",
url = "https://preview.aclanthology.org/fix-sig-urls/2024.ccl-1.81/",
pages = "1047--1058",
language = "eng",
abstract = "``Neural Machine Translation (NMT) models are typically trained using Maximum LikelihoodEstimation (MLE). However, this approach has a limitation: while it might select the bestword for the immediate context, it does not generally optimize for the entire sentence. Tomitigate this issue, we propose a simple yet effective training method called SimCLNMT.This method is designed to select words that fit well in the immediate context and also en-hance the overall translation quality over time. During training, SimCLNMT scores multiplesystem-generated (candidate) translations using the logarithm of conditional probabilities.Itthen employs a ranking loss function to learn and adjust these probabilities to align with thecorresponding quality scores. Our experimental results demonstrate that SimCLNMT consis-tently outperforms traditional MLE training on both the NIST English-Chinese and WMT{'}14English-German datasets. Further analysis also indicates that the translations generated by ourmodel are more closely aligned with the corresponding quality scores. We release our code athttps://github.com/chaos130/fairseq{\_}SimCLNMT.Introduction''"
}
Markdown (Informal)
[SimCLNMT: A Simple Contrastive Learning Method for Enhancing Neural Machine Translation Quality](https://preview.aclanthology.org/fix-sig-urls/2024.ccl-1.81/) (Menglong & Yanliang, CCL 2024)
ACL