@inproceedings{zhang-etal-2017-incorporating,
title = "Incorporating Word Reordering Knowledge into Attention-based Neural Machine Translation",
author = "Zhang, Jinchao and
Wang, Mingxuan and
Liu, Qun and
Zhou, Jie",
editor = "Barzilay, Regina and
Kan, Min-Yen",
booktitle = "Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2017",
address = "Vancouver, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/P17-1140/",
doi = "10.18653/v1/P17-1140",
pages = "1524--1534",
abstract = "This paper proposes three distortion models to explicitly incorporate the word reordering knowledge into attention-based Neural Machine Translation (NMT) for further improving translation performance. Our proposed models enable attention mechanism to attend to source words regarding both the semantic requirement and the word reordering penalty. Experiments on Chinese-English translation show that the approaches can improve word alignment quality and achieve significant translation improvements over a basic attention-based NMT by large margins. Compared with previous works on identical corpora, our system achieves the state-of-the-art performance on translation quality."
}
Markdown (Informal)
[Incorporating Word Reordering Knowledge into Attention-based Neural Machine Translation](https://preview.aclanthology.org/fix-sig-urls/P17-1140/) (Zhang et al., ACL 2017)
ACL