@inproceedings{ma-etal-2020-simple,
title = "A Simple and Effective Unified Encoder for Document-Level Machine Translation",
author = "Ma, Shuming and
Zhang, Dongdong and
Zhou, Ming",
editor = "Jurafsky, Dan and
Chai, Joyce and
Schluter, Natalie and
Tetreault, Joel",
booktitle = "Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2020.acl-main.321/",
doi = "10.18653/v1/2020.acl-main.321",
pages = "3505--3511",
abstract = "Most of the existing models for document-level machine translation adopt dual-encoder structures. The representation of the source sentences and the document-level contexts are modeled with two separate encoders. Although these models can make use of the document-level contexts, they do not fully model the interaction between the contexts and the source sentences, and can not directly adapt to the recent pre-training models (e.g., BERT) which encodes multiple sentences with a single encoder. In this work, we propose a simple and effective unified encoder that can outperform the baseline models of dual-encoder models in terms of BLEU and METEOR scores. Moreover, the pre-training models can further boost the performance of our proposed model."
}
Markdown (Informal)
[A Simple and Effective Unified Encoder for Document-Level Machine Translation](https://preview.aclanthology.org/fix-sig-urls/2020.acl-main.321/) (Ma et al., ACL 2020)
ACL