@inproceedings{hao-etal-2019-towards,
title = "Towards Better Modeling Hierarchical Structure for Self-Attention with Ordered Neurons",
author = "Hao, Jie and
Wang, Xing and
Shi, Shuming and
Zhang, Jinfeng and
Tu, Zhaopeng",
editor = "Inui, Kentaro and
Jiang, Jing and
Ng, Vincent and
Wan, Xiaojun",
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP)",
month = nov,
year = "2019",
address = "Hong Kong, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/D19-1135/",
doi = "10.18653/v1/D19-1135",
pages = "1336--1341",
abstract = "Recent studies have shown that a hybrid of self-attention networks (SANs) and recurrent neural networks RNNs outperforms both individual architectures, while not much is known about why the hybrid models work. With the belief that modeling hierarchical structure is an essential complementary between SANs and RNNs, we propose to further enhance the strength of hybrid models with an advanced variant of RNNs {--} Ordered Neurons LSTM (ON-LSTM), which introduces a syntax-oriented inductive bias to perform tree-like composition. Experimental results on the benchmark machine translation task show that the proposed approach outperforms both individual architectures and a standard hybrid model. Further analyses on targeted linguistic evaluation and logical inference tasks demonstrate that the proposed approach indeed benefits from a better modeling of hierarchical structure."
}
Markdown (Informal)
[Towards Better Modeling Hierarchical Structure for Self-Attention with Ordered Neurons](https://preview.aclanthology.org/fix-sig-urls/D19-1135/) (Hao et al., EMNLP-IJCNLP 2019)
ACL