@inproceedings{wang-etal-2017-exploiting-cross,
title = "Exploiting Cross-Sentence Context for Neural Machine Translation",
author = "Wang, Longyue and
Tu, Zhaopeng and
Way, Andy and
Liu, Qun",
editor = "Palmer, Martha and
Hwa, Rebecca and
Riedel, Sebastian",
booktitle = "Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing",
month = sep,
year = "2017",
address = "Copenhagen, Denmark",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest_wac_2008/D17-1301/",
doi = "10.18653/v1/D17-1301",
pages = "2826--2831",
abstract = "In translation, considering the document as a whole can help to resolve ambiguities and inconsistencies. In this paper, we propose a cross-sentence context-aware approach and investigate the influence of historical contextual information on the performance of neural machine translation (NMT). First, this history is summarized in a hierarchical way. We then integrate the historical representation into NMT in two strategies: 1) a warm-start of encoder and decoder states, and 2) an auxiliary context source for updating decoder states. Experimental results on a large Chinese-English translation task show that our approach significantly improves upon a strong attention-based NMT system by up to +2.1 BLEU points."
}
Markdown (Informal)
[Exploiting Cross-Sentence Context for Neural Machine Translation](https://preview.aclanthology.org/ingest_wac_2008/D17-1301/) (Wang et al., EMNLP 2017)
ACL