@inproceedings{zhang-etal-2020-empirical,
title = "An Empirical Exploration of Local Ordering Pre-training for Structured Prediction",
author = "Zhang, Zhisong and
Kong, Xiang and
Levin, Lori and
Hovy, Eduard",
editor = "Cohn, Trevor and
He, Yulan and
Liu, Yang",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2020",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2020.findings-emnlp.160/",
doi = "10.18653/v1/2020.findings-emnlp.160",
pages = "1770--1783",
abstract = "Recently, pre-training contextualized encoders with language model (LM) objectives has been shown an effective semi-supervised method for structured prediction. In this work, we empirically explore an alternative pre-training method for contextualized encoders. Instead of predicting words in LMs, we ``mask out'' and predict word order information, with a local ordering strategy and word-selecting objectives. With evaluations on three typical structured prediction tasks (dependency parsing, POS tagging, and NER) over four languages (English, Finnish, Czech, and Italian), we show that our method is consistently beneficial. We further conduct detailed error analysis, including one that examines a specific type of parsing error where the head is misidentified. The results show that pre-trained contextual encoders can bring improvements in a structured way, suggesting that they may be able to capture higher-order patterns and feature combinations from unlabeled data."
}
Markdown (Informal)
[An Empirical Exploration of Local Ordering Pre-training for Structured Prediction](https://preview.aclanthology.org/fix-sig-urls/2020.findings-emnlp.160/) (Zhang et al., Findings 2020)
ACL