@inproceedings{mittal-etal-2025-recast,
title = "{RECAST}: Retrieval-Augmented Contextual {ASR} via Decoder-State Keyword Spotting",
author = "Mittal, Ashish and
Sarawagi, Sunita and
Jyothi, Preethi",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.203/",
doi = "10.18653/v1/2025.findings-emnlp.203",
pages = "3780--3793",
ISBN = "979-8-89176-335-7",
abstract = "Contextual biasing in ASR systems is critical for recognizing rare, domain-specific terms but becomes impractical with large keyword dictionaries due to prompt size and latency constraints. We present RECAST{--}a lightweight retrieval-augmented approach that repurposes decoder states of a pretrained ASR model to retrieve relevant keywords without requiring audio exemplars. RECAST introduces a contrastively trained retriever that aligns decoder-state embeddings with textual keyword representations, enabling fast token-level retrieval over large dictionaries. Retrieved keywords are ranked and formatted into a prompt to guide a downstream speech language model. Trained solely on LibriSpeech and evaluated on out-of-domain benchmarks covering up to 4,000 keywords across diverse domains, RECAST consistently outperforms full-list prompt biasing and strong phonetic/text baselines. It achieves up to 54.3{\%} relative reduction in entity WER and 41.3{\%} overall WER improvement over the baseline, along with up to 2.5x higher recall in challenging settings. Furthermore, RECAST remains effective for diverse languages such as Hindi, demonstrating its scalability, language-agnostic design, and practicality for real-world contextual ASR."
}Markdown (Informal)
[RECAST: Retrieval-Augmented Contextual ASR via Decoder-State Keyword Spotting](https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.203/) (Mittal et al., Findings 2025)
ACL