@inproceedings{srivastava-chiang-2025-calling,
title = "We`re Calling an Intervention: Exploring Fundamental Hurdles in Adapting Language Models to Nonstandard Text",
author = "Srivastava, Aarohi and
Chiang, David",
editor = "Bak, JinYeong and
Goot, Rob van der and
Jang, Hyeju and
Buaphet, Weerayut and
Ramponi, Alan and
Xu, Wei and
Ritter, Alan",
booktitle = "Proceedings of the Tenth Workshop on Noisy and User-generated Text",
month = may,
year = "2025",
address = "Albuquerque, New Mexico, USA",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Ingest-2025-COMPUTEL/2025.wnut-1.6/",
pages = "45--56",
ISBN = "979-8-89176-232-9",
abstract = "We present a suite of experiments that allow us to understand the underlying challenges of language model adaptation to nonstandard text. We do so by designing interventions that approximate core features of user-generated text and their interactions with existing biases of language models. Applying our interventions during language model adaptation to nonstandard text variations, we gain important insights into when such adaptation is successful, as well as the aspects of text variation and noise that are particularly difficult for language models to handle. For instance, on text with character-level variation, out-of-the-box performance improves even with a few additional training examples but approaches a plateau, suggesting that more data is not the solution. In contrast, on text with variation involving new words or meanings, far more data is needed, but it leads to a massive breakthrough in performance. Our findings reveal that existing models lack the necessary infrastructure to handle diverse forms of nonstandard text, guiding the development of more resilient language modeling techniques. We make the code for our interventions, which can be applied to any English text data, publicly available."
}
Markdown (Informal)
[We’re Calling an Intervention: Exploring Fundamental Hurdles in Adapting Language Models to Nonstandard Text](https://preview.aclanthology.org/Ingest-2025-COMPUTEL/2025.wnut-1.6/) (Srivastava & Chiang, WNUT 2025)
ACL