@inproceedings{omote-etal-2019-dependency,
title = "Dependency-Based Relative Positional Encoding for Transformer {NMT}",
author = "Omote, Yutaro and
Tamura, Akihiro and
Ninomiya, Takashi",
editor = "Mitkov, Ruslan and
Angelova, Galia",
booktitle = "Proceedings of the International Conference on Recent Advances in Natural Language Processing (RANLP 2019)",
month = sep,
year = "2019",
address = "Varna, Bulgaria",
publisher = "INCOMA Ltd.",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/R19-1099/",
doi = "10.26615/978-954-452-056-4_099",
pages = "854--861",
abstract = "This paper proposes a new Transformer neural machine translation model that incorporates syntactic distances between two source words into the relative position representations of the self-attention mechanism. In particular, the proposed model encodes pair-wise relative depths on a source dependency tree, which are differences between the depths of the two source words, in the encoder`s self-attention. The experiments show that our proposed model achieves 0.5 point gain in BLEU on the Asian Scientific Paper Excerpt Corpus Japanese-to-English translation task."
}
Markdown (Informal)
[Dependency-Based Relative Positional Encoding for Transformer NMT](https://preview.aclanthology.org/jlcl-multiple-ingestion/R19-1099/) (Omote et al., RANLP 2019)
ACL