@inproceedings{liang-etal-2020-alice,
title = "{ALICE}: Active Learning with Contrastive Natural Language Explanations",
author = "Liang, Weixin and
Zou, James and
Yu, Zhou",
editor = "Webber, Bonnie and
Cohn, Trevor and
He, Yulan and
Liu, Yang",
booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing (EMNLP)",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.emnlp-main.355/",
doi = "10.18653/v1/2020.emnlp-main.355",
pages = "4380--4391",
abstract = "Training a supervised neural network classifier typically requires many annotated training samples. Collecting and annotating a large number of data points are costly and sometimes even infeasible. Traditional annotation process uses a low-bandwidth human-machine communication interface: classification labels, each of which only provides a few bits of information. We propose Active Learning with Contrastive Explanations (ALICE), an expert-in-the-loop training framework that utilizes contrastive natural language explanations to improve data efficiency in learning. AL-ICE learns to first use active learning to select the most informative pairs of label classes to elicit contrastive natural language explanations from experts. Then it extracts knowledge from these explanations using a semantic parser. Finally, it incorporates the extracted knowledge through dynamically changing the learning model`s structure. We applied ALICEin two visual recognition tasks, bird species classification and social relationship classification. We found by incorporating contrastive explanations, our models outperform baseline models that are trained with 40-100{\%} more training data. We found that adding1expla-nation leads to similar performance gain as adding 13-30 labeled training data points."
}
Markdown (Informal)
[ALICE: Active Learning with Contrastive Natural Language Explanations](https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.emnlp-main.355/) (Liang et al., EMNLP 2020)
ACL