@inproceedings{li-etal-2023-lotus,
title = "Lotus at {W}ojood{NER} Shared Task: Multilingual Transformers: Unveiling Flat and Nested Entity Recognition",
author = "Li, Jiyong and
Azizov, Dilshod and
AlQuabeh, Hilal and
Liang, Shangsong",
editor = "Sawaf, Hassan and
El-Beltagy, Samhaa and
Zaghouani, Wajdi and
Magdy, Walid and
Abdelali, Ahmed and
Tomeh, Nadi and
Abu Farha, Ibrahim and
Habash, Nizar and
Khalifa, Salam and
Keleg, Amr and
Haddad, Hatem and
Zitouni, Imed and
Mrini, Khalil and
Almatham, Rawan",
booktitle = "Proceedings of ArabicNLP 2023",
month = dec,
year = "2023",
address = "Singapore (Hybrid)",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Author-page-Marten-During-lu/2023.arabicnlp-1.85/",
doi = "10.18653/v1/2023.arabicnlp-1.85",
pages = "765--770",
abstract = "We introduce our systems developed for two subtasks in the shared task {\textquotedblleft}Wojood{\textquotedblright} on Arabic NER detection, part of ArabicNLP 2023. For Subtask 1, we employ the XLM-R model to predict Flat NER labels for given tokens using a single classifier capable of categorizing all labels. For Subtask 2, we use the XLM-R encoder by building 21 individual classifiers. Each classifier corresponds to a specific label and is designed to determine the presence of its respective label. In terms of performance, our systems achieved competitive \textit{micro-F1} scores of \textbf{0.83} for Subtask 1 and \textbf{0.76} for Subtask 2, according to the leaderboard scores."
}
Markdown (Informal)
[Lotus at WojoodNER Shared Task: Multilingual Transformers: Unveiling Flat and Nested Entity Recognition](https://preview.aclanthology.org/Author-page-Marten-During-lu/2023.arabicnlp-1.85/) (Li et al., ArabicNLP 2023)
ACL