@inproceedings{bhaskar-krishnamurthy-2024-yes,
title = "Yes-{MT}`s Submission to the Low-Resource {I}ndic Language Translation Shared Task in {WMT} 2024",
author = "Bhaskar, Yash and
Krishnamurthy, Parameswari",
editor = "Haddow, Barry and
Kocmi, Tom and
Koehn, Philipp and
Monz, Christof",
booktitle = "Proceedings of the Ninth Conference on Machine Translation",
month = nov,
year = "2024",
address = "Miami, Florida, USA",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2024.wmt-1.71/",
doi = "10.18653/v1/2024.wmt-1.71",
pages = "788--792",
abstract = "This paper presents the systems submitted by the Yes-MT team for the Low-Resource Indic Language Translation Shared Task at WMT 2024, focusing on translating between English and the Assamese, Mizo, Khasi, and Manipuri languages. The experiments explored various approaches, including fine-tuning pre-trained models like mT5 and IndicBart in both Multilingual and Monolingual settings, LoRA finetune IndicTrans2, zero-shot and few-shot prompting with large language models (LLMs) like Llama 3 and Mixtral 8x7b, LoRA Supervised Fine Tuning Llama 3, and training Transformers from scratch. The results were evaluated on the WMT23 Low-Resource Indic Language Translation Shared Task`s test data using SacreBLEU and CHRF highlighting the challenges of low-resource translation and show the potential of LLMs for these tasks, particularly with fine-tuning."
}
Markdown (Informal)
[Yes-MT’s Submission to the Low-Resource Indic Language Translation Shared Task in WMT 2024](https://preview.aclanthology.org/add-emnlp-2024-awards/2024.wmt-1.71/) (Bhaskar & Krishnamurthy, WMT 2024)
ACL