@article{liu-lapata-2018-learning,
title = "Learning Structured Text Representations",
author = "Liu, Yang and
Lapata, Mirella",
editor = "Lee, Lillian and
Johnson, Mark and
Toutanova, Kristina and
Roark, Brian",
journal = "Transactions of the Association for Computational Linguistics",
volume = "6",
year = "2018",
address = "Cambridge, MA",
publisher = "MIT Press",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/Q18-1005/",
doi = "10.1162/tacl_a_00005",
pages = "63--75",
abstract = "In this paper, we focus on learning structure-aware document representations from data without recourse to a discourse parser or additional annotations. Drawing inspiration from recent efforts to empower neural networks with a structural bias (Cheng et al., 2016; Kim et al., 2017), we propose a model that can encode a document while automatically inducing rich structural dependencies. Specifically, we embed a differentiable non-projective parsing algorithm into a neural model and use attention mechanisms to incorporate the structural biases. Experimental evaluations across different tasks and datasets show that the proposed model achieves state-of-the-art results on document modeling tasks while inducing intermediate structures which are both interpretable and meaningful."
}
Markdown (Informal)
[Learning Structured Text Representations](https://preview.aclanthology.org/add-emnlp-2024-awards/Q18-1005/) (Liu & Lapata, TACL 2018)
ACL