@inproceedings{li-etal-2024-self-augmented,
title = "Self-Augmented In-Context Learning for Unsupervised Word Translation",
author = "Li, Yaoyiran and
Korhonen, Anna and
Vuli{\'c}, Ivan",
editor = "Ku, Lun-Wei and
Martins, Andre and
Srikumar, Vivek",
booktitle = "Proceedings of the 62nd Annual Meeting of the Association for Computational Linguistics (Volume 2: Short Papers)",
month = aug,
year = "2024",
address = "Bangkok, Thailand",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2024.acl-short.67/",
doi = "10.18653/v1/2024.acl-short.67",
pages = "743--753",
abstract = "Recent work has shown that, while large language models (LLMs) demonstrate strong word translation or bilingual lexicon induction (BLI) capabilities in few-shot setups, they still cannot match the performance of {\textquoteleft}traditional' mapping-based approaches in the unsupervised scenario where no seed translation pairs are available, especially for lower-resource languages. To address this challenge with LLMs, we propose self-augmented in-context learning (SAIL) for unsupervised BLI: starting from a zero-shot prompt, SAIL iteratively induces a set of high-confidence word translation pairs for in-context learning (ICL) from an LLM, which it then reapplies to the same LLM in the ICL fashion. Our method shows substantial gains over zero-shot prompting of LLMs on two established BLI benchmarks spanning a wide range of language pairs, also outperforming mapping-based baselines across the board. In addition to achieving state-of-the-art unsupervised BLI performance, we also conduct comprehensive analyses on SAIL and discuss its limitations."
}
Markdown (Informal)
[Self-Augmented In-Context Learning for Unsupervised Word Translation](https://preview.aclanthology.org/add-emnlp-2024-awards/2024.acl-short.67/) (Li et al., ACL 2024)
ACL