@inproceedings{shao-etal-2023-class,
title = "Class-Incremental Learning based on Label Generation",
author = "Shao, Yijia and
Guo, Yiduo and
Zhao, Dongyan and
Liu, Bing",
editor = "Rogers, Anna and
Boyd-Graber, Jordan and
Okazaki, Naoaki",
booktitle = "Proceedings of the 61st Annual Meeting of the Association for Computational Linguistics (Volume 2: Short Papers)",
month = jul,
year = "2023",
address = "Toronto, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/landing_page/2023.acl-short.109/",
doi = "10.18653/v1/2023.acl-short.109",
pages = "1263--1276",
abstract = "Despite the great success of pre-trained language models, it is still a challenge to use these models for continual learning, especially for the class-incremental learning (CIL) setting due to catastrophic forgetting (CF). This paper reports our finding that if we formulate CIL as a continual label generation problem, CF is drastically reduced and the generalizable representations of pre-trained models can be better retained. We thus propose a new CIL method (VAG) that also leverages the sparsity of vocabulary to focus the generation and creates pseudo-replay samples by using label semantics. Experimental results show that VAG outperforms baselines by a large margin."
}
Markdown (Informal)
[Class-Incremental Learning based on Label Generation](https://preview.aclanthology.org/landing_page/2023.acl-short.109/) (Shao et al., ACL 2023)
ACL
- Yijia Shao, Yiduo Guo, Dongyan Zhao, and Bing Liu. 2023. Class-Incremental Learning based on Label Generation. In Proceedings of the 61st Annual Meeting of the Association for Computational Linguistics (Volume 2: Short Papers), pages 1263–1276, Toronto, Canada. Association for Computational Linguistics.