@inproceedings{wei-etal-2023-leveraging,
title = "Leveraging Multiple Teachers for Test-Time Adaptation of Language-Guided Classifiers",
author = "Wei, Kangda and
Ghosh, Sayan and
Menon, Rakesh and
Srivastava, Shashank",
editor = "Bouamor, Houda and
Pino, Juan and
Bali, Kalika",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2023",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.findings-emnlp.471/",
doi = "10.18653/v1/2023.findings-emnlp.471",
pages = "7068--7088",
abstract = "Recent approaches have explored language- guided classifiers capable of classifying examples from novel tasks when provided with task-specific natural language explanations, instructions or prompts (Sanh et al., 2022; R. Menon et al., 2022). While these classifiers can generalize in zero-shot settings, their task performance often varies substantially between different language explanations in unpredictable ways (Lu et al., 2022; Gonen et al., 2022). Also, current approaches fail to leverage unlabeled examples that may be available in many scenarios. Here, we introduce TALC, a framework that uses data programming to adapt a language-guided classifier for a new task during inference when provided with explanations from multiple teachers and unlabeled test examples. Our results show that TALC consistently outperforms a competitive baseline from prior work by an impressive 9.3{\%} (relative improvement). Further, we demonstrate the robustness of TALC to variations in the quality and quantity of provided explanations, highlighting its potential in scenarios where learning from multiple teachers or a crowd is involved. Our code is available at: https://github.com/WeiKangda/TALC.git."
}
Markdown (Informal)
[Leveraging Multiple Teachers for Test-Time Adaptation of Language-Guided Classifiers](https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.findings-emnlp.471/) (Wei et al., Findings 2023)
ACL