@inproceedings{singh-etal-2025-information,
title = "An Information-Theoretic Approach to Reducing Fertility in {LLM}s for {M}anipuri Machine Translation",
author = "Singh, Telem Joyson and
Sanasam, Ranbir Singh and
Sarmah, Priyankoo",
editor = "Inui, Kentaro and
Sakti, Sakriani and
Wang, Haofen and
Wong, Derek F. and
Bhattacharyya, Pushpak and
Banerjee, Biplab and
Ekbal, Asif and
Chakraborty, Tanmoy and
Singh, Dhirendra Pratap",
booktitle = "Proceedings of the 14th International Joint Conference on Natural Language Processing and the 4th Conference of the Asia-Pacific Chapter of the Association for Computational Linguistics",
month = dec,
year = "2025",
address = "Mumbai, India",
publisher = "The Asian Federation of Natural Language Processing and The Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-ijcnlp-aacl/2025.findings-ijcnlp.145/",
pages = "2394--2404",
ISBN = "979-8-89176-303-6",
abstract = "Large language models (LLMs) have transformed machine translation, yet they have a high subword fertility issue for low-resource languages, which leads to slow inference speed and increased costs. While vocabulary expansion via continual pre-training is a common solution, it often degrades translation quality and requires large target-language corpora, which are unavailable for truly low-resource languages. To address this, we investigate tokenization efficiency through an information-theoretic lens, building on the established hypothesis that word length correlates with information content. From this perspective, we characterize tokenization inefficiency as having high fertility for low-information (highly predictable) words. Guided by this principle, we introduce a novel fine-tuning strategy that systematically identifies informationally redundant words{---}those with high fertility but low information content{---}for targeted vocabulary expansion and model fine-tuning. Experiments fine-tuning BLOOM and LLaMA-3 in English-Manipuri and other two language pairs show that our proposed method significantly reduces fertility by 50{\%} and accelerates inference by more than 2 times, without compromising and often exceeding the translation quality of standard LLM baselines, providing a theoretically grounded solution for efficient LLM-based MT."
}Markdown (Informal)
[An Information-Theoretic Approach to Reducing Fertility in LLMs for Manipuri Machine Translation](https://preview.aclanthology.org/ingest-ijcnlp-aacl/2025.findings-ijcnlp.145/) (Singh et al., Findings 2025)
ACL
- Telem Joyson Singh, Ranbir Singh Sanasam, and Priyankoo Sarmah. 2025. An Information-Theoretic Approach to Reducing Fertility in LLMs for Manipuri Machine Translation. In Proceedings of the 14th International Joint Conference on Natural Language Processing and the 4th Conference of the Asia-Pacific Chapter of the Association for Computational Linguistics, pages 2394–2404, Mumbai, India. The Asian Federation of Natural Language Processing and The Association for Computational Linguistics.