@inproceedings{prama-2025-llms,
title = "{LLM}s for Low-Resource Dialect Translation Using Context-Aware Prompting: A Case Study on {S}ylheti",
author = "Prama, Tabia Tanzin",
editor = "Alam, Firoj and
Kar, Sudipta and
Chowdhury, Shammur Absar and
Hassan, Naeemul and
Prince, Enamul Hoque and
Tasnim, Mohiuddin and
Rony, Md Rashad Al Hasan and
Rahman, Md Tahmid Rahman",
booktitle = "Proceedings of the Second Workshop on Bangla Language Processing (BLP-2025)",
month = dec,
year = "2025",
address = "Mumbai, India",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-ijcnlp-aacl/2025.banglalp-1.24/",
pages = "292--308",
ISBN = "979-8-89176-314-2",
abstract = "Large Language Models (LLMs) have demonstrated strong translation abilities through prompting, even without task-specific training. However, their effectiveness in dialectal and low-resource contexts remains underexplored. This study presents the first systematic investigation of LLM-based Machine Translation (MT) for Sylheti, a dialect of Bangla that is itself low-resource. We evaluate five advanced LLMs (GPT-4.1, GPT-4.1-mini, LLaMA 4, Grok 3, and Deepseek V3.2) across both translation directions (Bangla {\ensuremath{\leftrightarrow}} Sylheti), and find that these models struggle with dialect-specific vocabulary. To address this, we introduce Sylheti-CAP (Context-Aware Prompting), a three-step framework that embeds a linguistic rulebook, dictionary (core vocabulary and idioms), and authenticity check directly into prompts. Extensive experiments show that Sylheti-CAP consistently improves translation quality across models and prompting strategies. Both automatic metrics and human evaluations confirm its effectiveness, while qualitative analysis reveals notable reductions in hallucinations, ambiguities, and awkward phrasing{---}establishing Sylheti-CAP as a scalable solution for dialectal and low-resource MT."
}Markdown (Informal)
[LLMs for Low-Resource Dialect Translation Using Context-Aware Prompting: A Case Study on Sylheti](https://preview.aclanthology.org/ingest-ijcnlp-aacl/2025.banglalp-1.24/) (Prama, BanglaLP 2025)
ACL