@article{rotman-reichart-2019-deep,
title = "Deep Contextualized Self-training for Low Resource Dependency Parsing",
author = "Rotman, Guy and
Reichart, Roi",
editor = "Lee, Lillian and
Johnson, Mark and
Roark, Brian and
Nenkova, Ani",
journal = "Transactions of the Association for Computational Linguistics",
volume = "7",
year = "2019",
address = "Cambridge, MA",
publisher = "MIT Press",
url = "https://preview.aclanthology.org/fix-sig-urls/Q19-1044/",
doi = "10.1162/tacl_a_00294",
pages = "695--713",
abstract = "Neural dependency parsing has proven very effective, achieving state-of-the-art results on numerous domains and languages. Unfortunately, it requires large amounts of labeled data, which is costly and laborious to create. In this paper we propose a self-training algorithm that alleviates this annotation bottleneck by training a parser on its own output. Our Deep Contextualized Self-training (DCST) algorithm utilizes representation models trained on sequence labeling tasks that are derived from the parser{'}s output when applied to unlabeled data, and integrates these models with the base parser through a gating mechanism. We conduct experiments across multiple languages, both in low resource in-domain and in cross-domain setups, and demonstrate that DCST substantially outperforms traditional self-training as well as recent semi-supervised training methods.1"
}
Markdown (Informal)
[Deep Contextualized Self-training for Low Resource Dependency Parsing](https://preview.aclanthology.org/fix-sig-urls/Q19-1044/) (Rotman & Reichart, TACL 2019)
ACL