@inproceedings{zagar-robnik-sikonja-2022-slovene,
title = "{S}lovene {S}uper{GLUE} Benchmark: Translation and Evaluation",
author = "{\v{Z}}agar, Ale{\v{s}} and
Robnik-{\v{S}}ikonja, Marko",
editor = "Calzolari, Nicoletta and
B{\'e}chet, Fr{\'e}d{\'e}ric and
Blache, Philippe and
Choukri, Khalid and
Cieri, Christopher and
Declerck, Thierry and
Goggi, Sara and
Isahara, Hitoshi and
Maegaard, Bente and
Mariani, Joseph and
Mazo, H{\'e}l{\`e}ne and
Odijk, Jan and
Piperidis, Stelios",
booktitle = "Proceedings of the Thirteenth Language Resources and Evaluation Conference",
month = jun,
year = "2022",
address = "Marseille, France",
publisher = "European Language Resources Association",
url = "https://preview.aclanthology.org/fix-sig-urls/2022.lrec-1.221/",
pages = "2058--2065",
abstract = "We present SuperGLUE benchmark adapted and translated into Slovene using a combination of human and machine translation. We describe the translation process and problems arising due to differences in morphology and grammar. We evaluate the translated datasets in several modes: monolingual, cross-lingual, and multilingual, taking into account differences between machine and human translated training sets. The results show that the monolingual Slovene SloBERTa model is superior to massively multilingual and trilingual BERT models, but these also show a good cross-lingual performance on certain tasks. The performance of Slovene models still lags behind the best English models."
}
Markdown (Informal)
[Slovene SuperGLUE Benchmark: Translation and Evaluation](https://preview.aclanthology.org/fix-sig-urls/2022.lrec-1.221/) (Žagar & Robnik-Šikonja, LREC 2022)
ACL