@inproceedings{barman-etal-2025-scratch,
title = "From Scratch to Fine-Tuned: A Comparative Study of Transformer Training Strategies for Legal Machine Translation",
author = "Barman, Amit and
Mandal, Atanu and
Naskar, Sudip Kumar",
editor = "Modi, Ashutosh and
Ghosh, Saptarshi and
Ekbal, Asif and
Goyal, Pawan and
Jain, Sarika and
Joshi, Abhinav and
Mishra, Shivani and
Datta, Debtanu and
Paul, Shounak and
Singh, Kshetrimayum Boynao and
Kumar, Sandeep",
booktitle = "Proceedings of the 1st Workshop on NLP for Empowering Justice (JUST-NLP 2025)",
month = dec,
year = "2025",
address = "Mumbai, India",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-ijcnlp-aacl/2025.justnlp-main.20/",
pages = "179--185",
ISBN = "979-8-89176-312-8",
abstract = "In multilingual nations like India, access to legal information is often hindered by language barriers, as much of the legal and judicial documentation remains in English. Legal Machine Translation (L-MT) offers a scalable solution to this challenge by enabling accurate and accessible translations of legal documents. This paper presents our work for the JUST-NLP 2025 Legal MT shared task, focusing on English{--}Hindi translation using Transformer-based approaches. We experiment with 2 complementary strategies, fine-tuning a pre-trained OPUS-MT model for domain-specific adaptation and training a Transformer model from scratch using the provided legal corpus. Performance is evaluated using standard MT metrics, including SacreBLEU, chrF++, TER, ROUGE, BERTScore, METEOR, and COMET. Our fine-tuned OPUS-MT model achieves a SacreBLEU score of 46.03, significantly outperforming both baseline and from-scratch models. The results highlight the effectiveness of domain adaptation in enhancing translation quality and demonstrate the potential of L-MT systems to improve access to justice and legal transparency in multilingual contexts."
}Markdown (Informal)
[From Scratch to Fine-Tuned: A Comparative Study of Transformer Training Strategies for Legal Machine Translation](https://preview.aclanthology.org/ingest-ijcnlp-aacl/2025.justnlp-main.20/) (Barman et al., JUSTNLP 2025)
ACL