@inproceedings{wang-etal-2018-improved,
title = "Improved Dependency Parsing using Implicit Word Connections Learned from Unlabeled Data",
author = "Wang, Wenhui and
Chang, Baobao and
Mansur, Mairgup",
editor = "Riloff, Ellen and
Chiang, David and
Hockenmaier, Julia and
Tsujii, Jun{'}ichi",
booktitle = "Proceedings of the 2018 Conference on Empirical Methods in Natural Language Processing",
month = oct # "-" # nov,
year = "2018",
address = "Brussels, Belgium",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/D18-1311/",
doi = "10.18653/v1/D18-1311",
pages = "2857--2863",
abstract = "Pre-trained word embeddings and language model have been shown useful in a lot of tasks. However, both of them cannot directly capture word connections in a sentence, which is important for dependency parsing given its goal is to establish dependency relations between words. In this paper, we propose to implicitly capture word connections from unlabeled data by a word ordering model with self-attention mechanism. Experiments show that these implicit word connections do improve our parsing model. Furthermore, by combining with a pre-trained language model, our model gets state-of-the-art performance on the English PTB dataset, achieving 96.35{\%} UAS and 95.25{\%} LAS."
}
Markdown (Informal)
[Improved Dependency Parsing using Implicit Word Connections Learned from Unlabeled Data](https://preview.aclanthology.org/fix-sig-urls/D18-1311/) (Wang et al., EMNLP 2018)
ACL