@inproceedings{zhang-nie-2020-inducing,
title = "Inducing Grammar from Long Short-Term Memory Networks by Shapley Decomposition",
author = "Zhang, Yuhui and
Nie, Allen",
editor = "Rijhwani, Shruti and
Liu, Jiangming and
Wang, Yizhong and
Dror, Rotem",
booktitle = "Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics: Student Research Workshop",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.acl-srw.40/",
doi = "10.18653/v1/2020.acl-srw.40",
pages = "299--305",
abstract = "The principle of compositionality has deep roots in linguistics: the meaning of an expression is determined by its structure and the meanings of its constituents. However, modern neural network models such as long short-term memory network process expressions in a linear fashion and do not seem to incorporate more complex compositional patterns. In this work, we show that we can explicitly induce grammar by tracing the computational process of a long short-term memory network. We show: (i) the multiplicative nature of long short-term memory network allows complex interaction beyond sequential linear combination; (ii) we can generate compositional trees from the network without external linguistic knowledge; (iii) we evaluate the syntactic difference between the generated trees, randomly generated trees and gold reference trees produced by constituency parsers; (iv) we evaluate whether the generated trees contain the rich semantic information."
}
Markdown (Informal)
[Inducing Grammar from Long Short-Term Memory Networks by Shapley Decomposition](https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.acl-srw.40/) (Zhang & Nie, ACL 2020)
ACL