@inproceedings{mohananey-etal-2020-self,
title = "Self-Training for Unsupervised Parsing with {PRPN}",
author = "Mohananey, Anhad and
Kann, Katharina and
Bowman, Samuel R.",
booktitle = "Proceedings of the 16th International Conference on Parsing Technologies and the IWPT 2020 Shared Task on Parsing into Enhanced Universal Dependencies",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.iwpt-1.11",
doi = "10.18653/v1/2020.iwpt-1.11",
pages = "105--110",
abstract = "Neural unsupervised parsing (UP) models learn to parse without access to syntactic annotations, while being optimized for another task like language modeling. In this work, we propose self-training for neural UP models: we leverage aggregated annotations predicted by copies of our model as supervision for future copies. To be able to use our model{'}s predictions during training, we extend a recent neural UP architecture, the PRPN (Shen et al., 2018a), such that it can be trained in a semi-supervised fashion. We then add examples with parses predicted by our model to our unlabeled UP training data. Our self-trained model outperforms the PRPN by 8.1{\%} F1 and the previous state of the art by 1.6{\%} F1. In addition, we show that our architecture can also be helpful for semi-supervised parsing in ultra-low-resource settings.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="mohananey-etal-2020-self">
<titleInfo>
<title>Self-Training for Unsupervised Parsing with PRPN</title>
</titleInfo>
<name type="personal">
<namePart type="given">Anhad</namePart>
<namePart type="family">Mohananey</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Katharina</namePart>
<namePart type="family">Kann</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Samuel</namePart>
<namePart type="given">R</namePart>
<namePart type="family">Bowman</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-jul</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 16th International Conference on Parsing Technologies and the IWPT 2020 Shared Task on Parsing into Enhanced Universal Dependencies</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Neural unsupervised parsing (UP) models learn to parse without access to syntactic annotations, while being optimized for another task like language modeling. In this work, we propose self-training for neural UP models: we leverage aggregated annotations predicted by copies of our model as supervision for future copies. To be able to use our model’s predictions during training, we extend a recent neural UP architecture, the PRPN (Shen et al., 2018a), such that it can be trained in a semi-supervised fashion. We then add examples with parses predicted by our model to our unlabeled UP training data. Our self-trained model outperforms the PRPN by 8.1% F1 and the previous state of the art by 1.6% F1. In addition, we show that our architecture can also be helpful for semi-supervised parsing in ultra-low-resource settings.</abstract>
<identifier type="citekey">mohananey-etal-2020-self</identifier>
<identifier type="doi">10.18653/v1/2020.iwpt-1.11</identifier>
<location>
<url>https://aclanthology.org/2020.iwpt-1.11</url>
</location>
<part>
<date>2020-jul</date>
<extent unit="page">
<start>105</start>
<end>110</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Self-Training for Unsupervised Parsing with PRPN
%A Mohananey, Anhad
%A Kann, Katharina
%A Bowman, Samuel R.
%S Proceedings of the 16th International Conference on Parsing Technologies and the IWPT 2020 Shared Task on Parsing into Enhanced Universal Dependencies
%D 2020
%8 jul
%I Association for Computational Linguistics
%C Online
%F mohananey-etal-2020-self
%X Neural unsupervised parsing (UP) models learn to parse without access to syntactic annotations, while being optimized for another task like language modeling. In this work, we propose self-training for neural UP models: we leverage aggregated annotations predicted by copies of our model as supervision for future copies. To be able to use our model’s predictions during training, we extend a recent neural UP architecture, the PRPN (Shen et al., 2018a), such that it can be trained in a semi-supervised fashion. We then add examples with parses predicted by our model to our unlabeled UP training data. Our self-trained model outperforms the PRPN by 8.1% F1 and the previous state of the art by 1.6% F1. In addition, we show that our architecture can also be helpful for semi-supervised parsing in ultra-low-resource settings.
%R 10.18653/v1/2020.iwpt-1.11
%U https://aclanthology.org/2020.iwpt-1.11
%U https://doi.org/10.18653/v1/2020.iwpt-1.11
%P 105-110
Markdown (Informal)
[Self-Training for Unsupervised Parsing with PRPN](https://aclanthology.org/2020.iwpt-1.11) (Mohananey et al., IWPT 2020)
ACL
- Anhad Mohananey, Katharina Kann, and Samuel R. Bowman. 2020. Self-Training for Unsupervised Parsing with PRPN. In Proceedings of the 16th International Conference on Parsing Technologies and the IWPT 2020 Shared Task on Parsing into Enhanced Universal Dependencies, pages 105–110, Online. Association for Computational Linguistics.