@inproceedings{tang-etal-2019-understanding,
title = "Understanding Neural Machine Translation by Simplification: The Case of Encoder-free Models",
author = "Tang, Gongbo and
Sennrich, Rico and
Nivre, Joakim",
editor = "Mitkov, Ruslan and
Angelova, Galia",
booktitle = "Proceedings of the International Conference on Recent Advances in Natural Language Processing (RANLP 2019)",
month = sep,
year = "2019",
address = "Varna, Bulgaria",
publisher = "INCOMA Ltd.",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/R19-1136/",
doi = "10.26615/978-954-452-056-4_136",
pages = "1186--1193",
abstract = "In this paper, we try to understand neural machine translation (NMT) via simplifying NMT architectures and training encoder-free NMT models. In an encoder-free model, the sums of word embeddings and positional embeddings represent the source. The decoder is a standard Transformer or recurrent neural network that directly attends to embeddings via attention mechanisms. Experimental results show (1) that the attention mechanism in encoder-free models acts as a strong feature extractor, (2) that the word embeddings in encoder-free models are competitive to those in conventional models, (3) that non-contextualized source representations lead to a big performance drop, and (4) that encoder-free models have different effects on alignment quality for German-English and Chinese-English."
}
Markdown (Informal)
[Understanding Neural Machine Translation by Simplification: The Case of Encoder-free Models](https://preview.aclanthology.org/jlcl-multiple-ingestion/R19-1136/) (Tang et al., RANLP 2019)
ACL