@inproceedings{liu-etal-2022-pre,
title = "Pre-training to Match for Unified Low-shot Relation Extraction",
author = "Liu, Fangchao and
Lin, Hongyu and
Han, Xianpei and
Cao, Boxi and
Sun, Le",
editor = "Muresan, Smaranda and
Nakov, Preslav and
Villavicencio, Aline",
booktitle = "Proceedings of the 60th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = may,
year = "2022",
address = "Dublin, Ireland",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2022.acl-long.397/",
doi = "10.18653/v1/2022.acl-long.397",
pages = "5785--5795",
abstract = "Low-shot relation extraction (RE) aims to recognize novel relations with very few or even no samples, which is critical in real scenario application. Few-shot and zero-shot RE are two representative low-shot RE tasks, which seem to be with similar target but require totally different underlying abilities. In this paper, we propose Multi-Choice Matching Networks to unify low-shot relation extraction. To fill in the gap between zero-shot and few-shot RE, we propose the triplet-paraphrase meta-training, which leverages triplet paraphrase to pre-train zero-shot label matching ability and uses meta-learning paradigm to learn few-shot instance summarizing ability. Experimental results on three different low-shot RE tasks show that the proposed method outperforms strong baselines by a large margin, and achieve the best performance on few-shot RE leaderboard."
}
Markdown (Informal)
[Pre-training to Match for Unified Low-shot Relation Extraction](https://preview.aclanthology.org/fix-sig-urls/2022.acl-long.397/) (Liu et al., ACL 2022)
ACL
- Fangchao Liu, Hongyu Lin, Xianpei Han, Boxi Cao, and Le Sun. 2022. Pre-training to Match for Unified Low-shot Relation Extraction. In Proceedings of the 60th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers), pages 5785–5795, Dublin, Ireland. Association for Computational Linguistics.