@inproceedings{maillard-clark-2018-latent,
title = "Latent Tree Learning with Differentiable Parsers: Shift-Reduce Parsing and Chart Parsing",
author = "Maillard, Jean and
Clark, Stephen",
editor = "Dinu, Georgiana and
Ballesteros, Miguel and
Sil, Avirup and
Bowman, Sam and
Hamza, Wael and
Sogaard, Anders and
Naseem, Tahira and
Goldberg, Yoav",
booktitle = "Proceedings of the Workshop on the Relevance of Linguistic Structure in Neural Architectures for {NLP}",
month = jul,
year = "2018",
address = "Melbourne, Australia",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/W18-2903/",
doi = "10.18653/v1/W18-2903",
pages = "13--18",
abstract = "Latent tree learning models represent sentences by composing their words according to an induced parse tree, all based on a downstream task. These models often outperform baselines which use (externally provided) syntax trees to drive the composition order. This work contributes (a) a new latent tree learning model based on shift-reduce parsing, with competitive downstream performance and non-trivial induced trees, and (b) an analysis of the trees learned by our shift-reduce model and by a chart-based model."
}
Markdown (Informal)
[Latent Tree Learning with Differentiable Parsers: Shift-Reduce Parsing and Chart Parsing](https://preview.aclanthology.org/jlcl-multiple-ingestion/W18-2903/) (Maillard & Clark, ACL 2018)
ACL