@inproceedings{dannells-etal-2024-transformer,
title = "Transformer-based {S}wedish Semantic Role Labeling through Transfer Learning",
author = "Dann{\'e}lls, Dana and
Johansson, Richard and
Yang Buhr, Lucy",
editor = "Calzolari, Nicoletta and
Kan, Min-Yen and
Hoste, Veronique and
Lenci, Alessandro and
Sakti, Sakriani and
Xue, Nianwen",
booktitle = "Proceedings of the 2024 Joint International Conference on Computational Linguistics, Language Resources and Evaluation (LREC-COLING 2024)",
month = may,
year = "2024",
address = "Torino, Italia",
publisher = "ELRA and ICCL",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2024.lrec-main.1458/",
pages = "16762--16769",
abstract = "Semantic Role Labeling (SRL) is a task in natural language understanding where the goal is to extract semantic roles for a given sentence. English SRL has achieved state-of-the-art performance using Transformer techniques and supervised learning. However, this technique is not a viable choice for smaller languages like Swedish due to the limited amount of training data. In this paper, we present the first effort in building a Transformer-based SRL system for Swedish by exploring multilingual and cross-lingual transfer learning methods and leveraging the Swedish FrameNet resource. We demonstrate that multilingual transfer learning outperforms two different cross-lingual transfer models. We also found some differences between frames in FrameNet that can either hinder or enhance the model`s performance. The resulting end-to-end model is freely available and will be made accessible through Spr{\r{a}}kbanken Text`s research infrastructure."
}
Markdown (Informal)
[Transformer-based Swedish Semantic Role Labeling through Transfer Learning](https://preview.aclanthology.org/jlcl-multiple-ingestion/2024.lrec-main.1458/) (Dannélls et al., LREC-COLING 2024)
ACL