@inproceedings{ferrando-costa-jussa-2021-attention-weights, title = "Attention Weights in Transformer {NMT} Fail Aligning Words Between Sequences but Largely Explain Model Predictions", author = "Ferrando, Javier and Costa-juss{\`a}, Marta R.", editor = "Moens, Marie-Francine and Huang, Xuanjing and Specia, Lucia and Yih, Scott Wen-tau", booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2021", month = nov, year = "2021", address = "Punta Cana, Dominican Republic", publisher = "Association for Computational Linguistics", url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.findings-emnlp.39/", doi = "10.18653/v1/2021.findings-emnlp.39", pages = "434--443" }