@inproceedings{goswami-etal-2024-gmu,
title = "{GMU} at {MLSP} 2024: Multilingual Lexical Simplification with Transformer Models",
author = "Goswami, Dhiman and
North, Kai and
Zampieri, Marcos",
editor = {Kochmar, Ekaterina and
Bexte, Marie and
Burstein, Jill and
Horbach, Andrea and
Laarmann-Quante, Ronja and
Tack, Ana{\"i}s and
Yaneva, Victoria and
Yuan, Zheng},
booktitle = "Proceedings of the 19th Workshop on Innovative Use of NLP for Building Educational Applications (BEA 2024)",
month = jun,
year = "2024",
address = "Mexico City, Mexico",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2024.bea-1.57/",
pages = "627--634",
abstract = "This paper presents GMU`s submission to the Multilingual Lexical Simplification Pipeline (MLSP) shared task at the BEA workshop 2024. The task includes Lexical Complexity Prediction (LCP) and Lexical Simplification (LS) sub-tasks across 10 languages. Our submissions achieved rankings ranging from 1st to 5th in LCP and from 1st to 3rd in LS. Our best performing approach for LCP is a weighted ensemble based on Pearson correlation of language specific transformer models trained on all languages combined. For LS, GPT4-turbo zero-shot prompting achieved the best performance."
}
Markdown (Informal)
[GMU at MLSP 2024: Multilingual Lexical Simplification with Transformer Models](https://preview.aclanthology.org/jlcl-multiple-ingestion/2024.bea-1.57/) (Goswami et al., BEA 2024)
ACL