@inproceedings{liu-lapata-2017-learning,
title = "Learning Contextually Informed Representations for Linear-Time Discourse Parsing",
author = "Liu, Yang and
Lapata, Mirella",
editor = "Palmer, Martha and
Hwa, Rebecca and
Riedel, Sebastian",
booktitle = "Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing",
month = sep,
year = "2017",
address = "Copenhagen, Denmark",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/D17-1133/",
doi = "10.18653/v1/D17-1133",
pages = "1289--1298",
abstract = "Recent advances in RST discourse parsing have focused on two modeling paradigms: (a) high order parsers which jointly predict the tree structure of the discourse and the relations it encodes; or (b) linear-time parsers which are efficient but mostly based on local features. In this work, we propose a linear-time parser with a novel way of representing discourse constituents based on neural networks which takes into account global contextual information and is able to capture long-distance dependencies. Experimental results show that our parser obtains state-of-the art performance on benchmark datasets, while being efficient (with time complexity linear in the number of sentences in the document) and requiring minimal feature engineering."
}
Markdown (Informal)
[Learning Contextually Informed Representations for Linear-Time Discourse Parsing](https://preview.aclanthology.org/fix-sig-urls/D17-1133/) (Liu & Lapata, EMNLP 2017)
ACL