@inproceedings{duan-etal-2022-barle,
title = "{BARLE}: Background-Aware Representation Learning for Background Shift Out-of-Distribution Detection",
author = "Duan, Hanyu and
Yang, Yi and
Abbasi, Ahmed and
Tam, Kar Yan",
editor = "Goldberg, Yoav and
Kozareva, Zornitsa and
Zhang, Yue",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2022",
month = dec,
year = "2022",
address = "Abu Dhabi, United Arab Emirates",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2022.findings-emnlp.53/",
doi = "10.18653/v1/2022.findings-emnlp.53",
pages = "750--764",
abstract = "Machine learning models often suffer from a performance drop when they are applied to out-of-distribution (OOD) samples, i.e., those drawn far away from the training data distribution. Existing OOD detection work mostly focuses on identifying semantic-shift OOD samples, e.g., instances from unseen new classes. However, background-shift OOD detection, which identifies samples with domain or style-change, represents a more practical yet challenging task. In this paper, we propose Background-Aware Representation Learning (BARLE) for background-shift OOD detection in NLP. Specifically, we generate semantics-preserving background-shifted pseudo OOD samples from pretrained masked language models. We then contrast the in-distribution (ID) samples with their pseudo OOD counterparts. Unlike prior semantic-shift OOD detection work that often leverages an external text corpus, BARLE only uses ID data, which is more flexible and cost-efficient. In experiments across several text classification tasks, we demonstrate that BARLE is capable of improving background-shift OOD detection performance while maintaining ID classification accuracy. We further investigate the properties of the generated pseudo OOD samples, uncovering the working mechanism of BARLE."
}
Markdown (Informal)
[BARLE: Background-Aware Representation Learning for Background Shift Out-of-Distribution Detection](https://preview.aclanthology.org/fix-sig-urls/2022.findings-emnlp.53/) (Duan et al., Findings 2022)
ACL