@inproceedings{stojanovski-fraser-2021-addressing,
title = "Addressing Zero-Resource Domains Using Document-Level Context in Neural Machine Translation",
author = "Stojanovski, Dario and
Fraser, Alexander",
editor = "Ben-David, Eyal and
Cohen, Shay and
McDonald, Ryan and
Plank, Barbara and
Reichart, Roi and
Rotman, Guy and
Ziser, Yftah",
booktitle = "Proceedings of the Second Workshop on Domain Adaptation for NLP",
month = apr,
year = "2021",
address = "Kyiv, Ukraine",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/landing_page/2021.adaptnlp-1.9/",
pages = "80--93",
abstract = "Achieving satisfying performance in machine translation on domains for which there is no training data is challenging. Traditional supervised domain adaptation is not suitable for addressing such zero-resource domains because it relies on in-domain parallel data. We show that when in-domain parallel data is not available, access to document-level context enables better capturing of domain generalities compared to only having access to a single sentence. Having access to more information provides a more reliable domain estimation. We present two document-level Transformer models which are capable of using large context sizes and we compare these models against strong Transformer baselines. We obtain improvements for the two zero-resource domains we study. We additionally provide an analysis where we vary the amount of context and look at the case where in-domain data is available."
}
Markdown (Informal)
[Addressing Zero-Resource Domains Using Document-Level Context in Neural Machine Translation](https://preview.aclanthology.org/landing_page/2021.adaptnlp-1.9/) (Stojanovski & Fraser, AdaptNLP 2021)
ACL