@inproceedings{singh-tiwary-2023-silp,
title = "Silp{\_}nlp at {S}em{E}val-2023 Task 2: Cross-lingual Knowledge Transfer for Mono-lingual Learning",
author = "Singh, Sumit and
Tiwary, Uma",
editor = {Ojha, Atul Kr. and
Do{\u{g}}ru{\"o}z, A. Seza and
Da San Martino, Giovanni and
Tayyar Madabushi, Harish and
Kumar, Ritesh and
Sartori, Elisa},
booktitle = "Proceedings of the 17th International Workshop on Semantic Evaluation (SemEval-2023)",
month = jul,
year = "2023",
address = "Toronto, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest_wac_2008/2023.semeval-1.164/",
doi = "10.18653/v1/2023.semeval-1.164",
pages = "1183--1189",
abstract = "Our team silp{\_}nlp participated in SemEval2023 Task 2: MultiCoNER II. Our work made systems for 11 mono-lingual tracks. For leveraging the advantage of all track knowledge we chose transformer-based pretrained models, which have strong cross-lingual transferability. Hence our model trained in two stages, the first stage for multi-lingual learning from all tracks and the second for fine-tuning individual tracks. Our work highlights that the knowledge of all tracks can be transferred to an individual track if the baseline language model has crosslingual features. Our system positioned itself in the top 10 for 4 tracks by scoring 0.7432 macro F1 score for the Hindi track ( 7th rank ) and 0.7322 macro F1 score for the Bangla track ( 9th rank )."
}
Markdown (Informal)
[Silp_nlp at SemEval-2023 Task 2: Cross-lingual Knowledge Transfer for Mono-lingual Learning](https://preview.aclanthology.org/ingest_wac_2008/2023.semeval-1.164/) (Singh & Tiwary, SemEval 2023)
ACL