@inproceedings{andres-santamaria-2023-ixa,
title = "{IXA} at {S}em{E}val-2023 Task 2: Baseline Xlm-Roberta-base Approach",
author = "Andres Santamaria, Edgar",
editor = {Ojha, Atul Kr. and
Do{\u{g}}ru{\"o}z, A. Seza and
Da San Martino, Giovanni and
Tayyar Madabushi, Harish and
Kumar, Ritesh and
Sartori, Elisa},
booktitle = "Proceedings of the 17th International Workshop on Semantic Evaluation (SemEval-2023)",
month = jul,
year = "2023",
address = "Toronto, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2023.semeval-1.50/",
doi = "10.18653/v1/2023.semeval-1.50",
pages = "379--381",
abstract = "IXA proposes a Sequence labeling fine-tune approach, which consists of a lightweight few-shot baseline (10e), the system takes advantage of transfer learning from pre-trained Named Entity Recognition and cross-lingual knowledge from the LM checkpoint. This technique obtains a drastic reduction in the effective training costs that works as a perfect baseline, future improvements in the baseline approach could fit: 1) Domain adequation, 2) Data augmentation, and 3) Intermediate task learning."
}
Markdown (Informal)
[IXA at SemEval-2023 Task 2: Baseline Xlm-Roberta-base Approach](https://preview.aclanthology.org/add-emnlp-2024-awards/2023.semeval-1.50/) (Andres Santamaria, SemEval 2023)
ACL