@inproceedings{soto-martinez-etal-2024-generating-amrs,
title = "Generating from {AMR}s into High and Low-Resource Languages using Phylogenetic Knowledge and Hierarchical {QL}o{RA} Training ({HQL})",
author = "Soto Martinez, William and
Parmentier, Yannick and
Gardent, Claire",
editor = "Mahamood, Saad and
Minh, Nguyen Le and
Ippolito, Daphne",
booktitle = "Proceedings of the 17th International Natural Language Generation Conference",
month = sep,
year = "2024",
address = "Tokyo, Japan",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Ingest-2025-COMPUTEL/2024.inlg-main.7/",
pages = "70--81",
abstract = "Multilingual generation from Abstract Meaning Representations (AMRs) verbalises AMRs into multiple languages. Previous work has focused on high- and medium-resource languages relying on large amounts of training data. In this work, we consider both high- and low-resource languages capping training data size at the lower bound set by our low-resource languages i.e. 31K. We propose a straightforward technique to enhance results on low-resource while preserving performance on high-resource languages. We iteratively refine a multilingua model to a set of monolingual models using Low-Rank Adaptation with a training curriculum based on a tree structure; this permits investigating how the languages used at each iteration impact generation performance on high and low-resource languages. We show an improvement over both mono and multilingual approaches. Comparing different ways of grouping languages at each iteration step we find two working configurations: grouping related languages which promotes transfer, or grouping distant languages which facilitates regularisation"
}
Markdown (Informal)
[Generating from AMRs into High and Low-Resource Languages using Phylogenetic Knowledge and Hierarchical QLoRA Training (HQL)](https://preview.aclanthology.org/Ingest-2025-COMPUTEL/2024.inlg-main.7/) (Soto Martinez et al., INLG 2024)
ACL