@inproceedings{yang-etal-2019-reducing,
title = "Reducing Word Omission Errors in Neural Machine Translation: A Contrastive Learning Approach",
author = "Yang, Zonghan and
Cheng, Yong and
Liu, Yang and
Sun, Maosong",
editor = "Korhonen, Anna and
Traum, David and
M{\`a}rquez, Llu{\'i}s",
booktitle = "Proceedings of the 57th Annual Meeting of the Association for Computational Linguistics",
month = jul,
year = "2019",
address = "Florence, Italy",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/P19-1623/",
doi = "10.18653/v1/P19-1623",
pages = "6191--6196",
abstract = "While neural machine translation (NMT) has achieved remarkable success, NMT systems are prone to make word omission errors. In this work, we propose a contrastive learning approach to reducing word omission errors in NMT. The basic idea is to enable the NMT model to assign a higher probability to a ground-truth translation and a lower probability to an erroneous translation, which is automatically constructed from the ground-truth translation by omitting words. We design different types of negative examples depending on the number of omitted words, word frequency, and part of speech. Experiments on Chinese-to-English, German-to-English, and Russian-to-English translation tasks show that our approach is effective in reducing word omission errors and achieves better translation performance than three baseline methods."
}
Markdown (Informal)
[Reducing Word Omission Errors in Neural Machine Translation: A Contrastive Learning Approach](https://preview.aclanthology.org/add-emnlp-2024-awards/P19-1623/) (Yang et al., ACL 2019)
ACL