@inproceedings{jia-zhang-2023-memory,
title = "Memory-Based Invariance Learning for Out-of-Domain Text Classification",
author = "Jia, Chen and
Zhang, Yue",
editor = "Bouamor, Houda and
Pino, Juan and
Bali, Kalika",
booktitle = "Proceedings of the 2023 Conference on Empirical Methods in Natural Language Processing",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2023.emnlp-main.101/",
doi = "10.18653/v1/2023.emnlp-main.101",
pages = "1635--1647",
abstract = "We investigate the task of out-of-domain (OOD) text classification with the aim of extending a classification model, trained on multiple source domains, to an unseen target domain. Recent studies have shown that learning invariant representations can enhance the performance of OOD generalization. However, the inherent disparity in data distribution across different domains poses challenges for achieving effective invariance learning. This study addresses this issue by employing memory augmentations. Specifically, we augment the original feature space using key-value memory and employ a meta-learning-based approach to enhance the quality of the invariant representations. Experimental results on sentiment analysis and natural language inference tasks show the effectiveness of memory-based method for invariance learning, leading to state-of-the-art performance on six datasets."
}
Markdown (Informal)
[Memory-Based Invariance Learning for Out-of-Domain Text Classification](https://preview.aclanthology.org/fix-sig-urls/2023.emnlp-main.101/) (Jia & Zhang, EMNLP 2023)
ACL