@inproceedings{de-lhoneux-etal-2022-zero,
title = "Zero-Shot Dependency Parsing with Worst-Case Aware Automated Curriculum Learning",
author = "de Lhoneux, Miryam and
Zhang, Sheng and
S{\o}gaard, Anders",
editor = "Muresan, Smaranda and
Nakov, Preslav and
Villavicencio, Aline",
booktitle = "Proceedings of the 60th Annual Meeting of the Association for Computational Linguistics (Volume 2: Short Papers)",
month = may,
year = "2022",
address = "Dublin, Ireland",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest_wac_2008/2022.acl-short.64/",
doi = "10.18653/v1/2022.acl-short.64",
pages = "578--587",
abstract = "Large multilingual pretrained language models such as mBERT and XLM-RoBERTa have been found to be surprisingly effective for cross-lingual transfer of syntactic parsing models Wu and Dredze (2019), but only between related languages. However, source and training languages are rarely related, when parsing truly low-resource languages. To close this gap, we adopt a method from multi-task learning, which relies on automated curriculum learning, to dynamically optimize for parsing performance on \textit{outlier} languages. We show that this approach is significantly better than uniform and size-proportional sampling in the zero-shot setting."
}
Markdown (Informal)
[Zero-Shot Dependency Parsing with Worst-Case Aware Automated Curriculum Learning](https://preview.aclanthology.org/ingest_wac_2008/2022.acl-short.64/) (de Lhoneux et al., ACL 2022)
ACL