@inproceedings{ke-etal-2021-adapting,
title = "Adapting {BERT} for Continual Learning of a Sequence of Aspect Sentiment Classification Tasks",
author = "Ke, Zixuan and
Xu, Hu and
Liu, Bing",
editor = "Toutanova, Kristina and
Rumshisky, Anna and
Zettlemoyer, Luke and
Hakkani-Tur, Dilek and
Beltagy, Iz and
Bethard, Steven and
Cotterell, Ryan and
Chakraborty, Tanmoy and
Zhou, Yichao",
booktitle = "Proceedings of the 2021 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies",
month = jun,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.naacl-main.378/",
doi = "10.18653/v1/2021.naacl-main.378",
pages = "4746--4755",
abstract = "This paper studies continual learning (CL) of a sequence of aspect sentiment classification (ASC) tasks. Although some CL techniques have been proposed for document sentiment classification, we are not aware of any CL work on ASC. A CL system that incrementally learns a sequence of ASC tasks should address the following two issues: (1) transfer knowledge learned from previous tasks to the new task to help it learn a better model, and (2) maintain the performance of the models for previous tasks so that they are not forgotten. This paper proposes a novel capsule network based model called B-CL to address these issues. B-CL markedly improves the ASC performance on both the new task and the old tasks via forward and backward knowledge transfer. The effectiveness of B-CL is demonstrated through extensive experiments."
}
Markdown (Informal)
[Adapting BERT for Continual Learning of a Sequence of Aspect Sentiment Classification Tasks](https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.naacl-main.378/) (Ke et al., NAACL 2021)
ACL