@inproceedings{zhang-feng-2021-modeling-concentrated,
title = "Modeling Concentrated Cross-Attention for Neural Machine Translation with {G}aussian Mixture Model",
author = "Zhang, Shaolei and
Feng, Yang",
editor = "Moens, Marie-Francine and
Huang, Xuanjing and
Specia, Lucia and
Yih, Scott Wen-tau",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2021",
month = nov,
year = "2021",
address = "Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.findings-emnlp.121/",
doi = "10.18653/v1/2021.findings-emnlp.121",
pages = "1401--1411",
abstract = "Cross-attention is an important component of neural machine translation (NMT), which is always realized by dot-product attention in previous methods. However, dot-product attention only considers the pair-wise correlation between words, resulting in dispersion when dealing with long sentences and neglect of source neighboring relationships. Inspired by linguistics, the above issues are caused by ignoring a type of cross-attention, called concentrated attention, which focuses on several central words and then spreads around them. In this work, we apply Gaussian Mixture Model (GMM) to model the concentrated attention in cross-attention. Experiments and analyses we conducted on three datasets show that the proposed method outperforms the baseline and has significant improvement on alignment quality, N-gram accuracy, and long sentence translation."
}
Markdown (Informal)
[Modeling Concentrated Cross-Attention for Neural Machine Translation with Gaussian Mixture Model](https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.findings-emnlp.121/) (Zhang & Feng, Findings 2021)
ACL