@inproceedings{ezquerro-etal-2026-bringing,
title = "Bringing Emerging Architectures to Sequence Labeling in {NLP}",
author = "Ezquerro, Ana and
G{\'o}mez-Rodr{\'i}guez, Carlos and
Vilares, David",
editor = "Demberg, Vera and
Inui, Kentaro and
Marquez, Llu{\'i}s",
booktitle = "Proceedings of the 19th Conference of the {E}uropean Chapter of the {A}ssociation for {C}omputational {L}inguistics (Volume 1: Long Papers)",
month = mar,
year = "2026",
address = "Rabat, Morocco",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-eacl/2026.eacl-long.227/",
pages = "4886--4909",
ISBN = "979-8-89176-380-7",
abstract = "Pretrained Transformer encoders are the dominant approach to sequence labeling. While some alternative architectures-such as xLSTMs, structured state-space models, diffusion models, and adversarial learning-have shown promise in language modeling, few have been applied to sequence labeling, and mostly on flat or simplified tasks. We study how these architectures adapt across tagging tasks that vary in structural complexity, label space, and token dependencies, with evaluation spanning multiple languages. We find that the strong performance previously observed in simpler settings does not always generalize well across languages or datasets, nor does it extend to more complex structured tasks."
}Markdown (Informal)
[Bringing Emerging Architectures to Sequence Labeling in NLP](https://preview.aclanthology.org/ingest-eacl/2026.eacl-long.227/) (Ezquerro et al., EACL 2026)
ACL
- Ana Ezquerro, Carlos Gómez-Rodríguez, and David Vilares. 2026. Bringing Emerging Architectures to Sequence Labeling in NLP. In Proceedings of the 19th Conference of the European Chapter of the Association for Computational Linguistics (Volume 1: Long Papers), pages 4886–4909, Rabat, Morocco. Association for Computational Linguistics.