@inproceedings{li-etal-2023-janko,
title = "Janko at {S}em{E}val-2023 Task 2: Bidirectional {LSTM} Model Based on Pre-training for {C}hinese Named Entity Recognition",
author = "Li, Jiankuo and
Guan, Zhengyi and
Ding, Haiyan",
editor = {Ojha, Atul Kr. and
Do{\u{g}}ru{\"o}z, A. Seza and
Da San Martino, Giovanni and
Tayyar Madabushi, Harish and
Kumar, Ritesh and
Sartori, Elisa},
booktitle = "Proceedings of the 17th International Workshop on Semantic Evaluation (SemEval-2023)",
month = jul,
year = "2023",
address = "Toronto, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.semeval-1.132/",
doi = "10.18653/v1/2023.semeval-1.132",
pages = "958--962",
abstract = "This paper describes the method we submitted as the Janko team in the SemEval-2023 Task 2,Multilingual Complex Named Entity Recognition (MultiCoNER 2). We only participated in the Chinese track. In this paper, we implement the BERT-BiLSTM-RDrop model. We use the fine-tuned BERT models, take the output of BERT as the input of the BiLSTM network, and finally use R-Drop technology to optimize the loss function. Our submission achieved a macro-averaged F1 score of 0.579 on the testset."
}
Markdown (Informal)
[Janko at SemEval-2023 Task 2: Bidirectional LSTM Model Based on Pre-training for Chinese Named Entity Recognition](https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.semeval-1.132/) (Li et al., SemEval 2023)
ACL