@inproceedings{li-etal-2025-l4,
title = "L4: Mutual Learning Helps Lifelong Language Learning",
author = "Li, Jiyong and
Azizov, Dilshod and
Liang, Shangsong",
editor = "Potdar, Saloni and
Rojas-Barahona, Lina and
Montella, Sebastien",
booktitle = "Proceedings of the 2025 Conference on Empirical Methods in Natural Language Processing: Industry Track",
month = nov,
year = "2025",
address = "Suzhou (China)",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-emnlp/2025.emnlp-industry.89/",
pages = "1275--1286",
ISBN = "979-8-89176-333-3",
abstract = "Adapting language models to learn continuously from data streams while retaining previous knowledge is a key challenge in artificial intelligence (AI), particularly in lifelong language learning. Existing distillation methods are based on offline techniques, limiting their ability to update in real-time and adapt to dynamic environments. To address this, we propose online dynamic mutual distillation - a novel framework that enables continuous mutual learning from task streams without relying on domain-specific teachers. To our knowledge, this is the first application of mutual learning in lifelong language learning, providing dynamic knowledge transfer without domain-specific teachers. Moreover, our extensive experiments demonstrate that the proposed method reduces catastrophic forgetting, while improving task performance on various benchmark datasets making it suitable for real-world, dynamic natural language processing (NLP) applications such as adaptive chatbots and personalized language systems. We will make our code publicly available upon acceptance."
}Markdown (Informal)
[L4: Mutual Learning Helps Lifelong Language Learning](https://preview.aclanthology.org/ingest-emnlp/2025.emnlp-industry.89/) (Li et al., EMNLP 2025)
ACL
- Jiyong Li, Dilshod Azizov, and Shangsong Liang. 2025. L4: Mutual Learning Helps Lifelong Language Learning. In Proceedings of the 2025 Conference on Empirical Methods in Natural Language Processing: Industry Track, pages 1275–1286, Suzhou (China). Association for Computational Linguistics.