@inproceedings{bahar-etal-2018-towards,
title = "Towards Two-Dimensional Sequence to Sequence Model in Neural Machine Translation",
author = "Bahar, Parnia and
Brix, Christopher and
Ney, Hermann",
editor = "Riloff, Ellen and
Chiang, David and
Hockenmaier, Julia and
Tsujii, Jun{'}ichi",
booktitle = "Proceedings of the 2018 Conference on Empirical Methods in Natural Language Processing",
month = oct # "-" # nov,
year = "2018",
address = "Brussels, Belgium",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/moar-dois/D18-1335/",
doi = "10.18653/v1/D18-1335",
pages = "3009--3015",
abstract = "This work investigates an alternative model for neural machine translation (NMT) and proposes a novel architecture, where we employ a multi-dimensional long short-term memory (MDLSTM) for translation modelling. In the state-of-the-art methods, source and target sentences are treated as one-dimensional sequences over time, while we view translation as a two-dimensional (2D) mapping using an MDLSTM layer to define the correspondence between source and target words. We extend beyond the current sequence to sequence backbone NMT models to a 2D structure in which the source and target sentences are aligned with each other in a 2D grid. Our proposed topology shows consistent improvements over attention-based sequence to sequence model on two WMT 2017 tasks, German{\ensuremath{<}}-{\ensuremath{>}}English."
}
Markdown (Informal)
[Towards Two-Dimensional Sequence to Sequence Model in Neural Machine Translation](https://preview.aclanthology.org/moar-dois/D18-1335/) (Bahar et al., EMNLP 2018)
ACL