@inproceedings{zhang-etal-2024-improving-continual,
title = "Improving Continual Few-shot Relation Extraction through Relational Knowledge Distillation and Prototype Augmentation",
author = "Zhang, Zhiheng and
Zeng, Daojian and
Bai, Xue",
editor = "Calzolari, Nicoletta and
Kan, Min-Yen and
Hoste, Veronique and
Lenci, Alessandro and
Sakti, Sakriani and
Xue, Nianwen",
booktitle = "Proceedings of the 2024 Joint International Conference on Computational Linguistics, Language Resources and Evaluation (LREC-COLING 2024)",
month = may,
year = "2024",
address = "Torino, Italia",
publisher = "ELRA and ICCL",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2024.lrec-main.767/",
pages = "8756--8767",
abstract = "In this paper, we focus on the challenging yet practical problem of Continual Few-shot Relation Extraction (CFRE), which involves extracting relations in the continuous and iterative arrival of new data with only a few labeled examples. The main challenges in CFRE are overfitting due to few-shot learning and catastrophic forgetting caused by continual learning. To address these problems, we propose a novel framework called RK2DA, which seamlessly integrates prototype-based data augmentation and relational knowledge distillation. Specifically, RK2DA generates pseudo data by introducing Gaussian noise to the prototype embeddings and utilizes a novel two-phase multi-teacher relational knowledge distillation method to transfer various knowledge from different embedding spaces. Experimental results on the FewRel and TACRED datasets demonstrate that our method outperforms the state-of-the-art baselines."
}
Markdown (Informal)
[Improving Continual Few-shot Relation Extraction through Relational Knowledge Distillation and Prototype Augmentation](https://preview.aclanthology.org/add-emnlp-2024-awards/2024.lrec-main.767/) (Zhang et al., LREC-COLING 2024)
ACL