@inproceedings{santos-rodriguez-graff-2025-infotec,
title = "{INFOTEC}-{NLP} at {S}em{E}val-2025 Task 11: A Case Study on Transformer-Based Models and Bag of Words",
author = "Santos - Rodriguez, Emmanuel and
Graff, Mario",
editor = "Rosenthal, Sara and
Ros{\'a}, Aiala and
Ghosh, Debanjan and
Zampieri, Marcos",
booktitle = "Proceedings of the 19th International Workshop on Semantic Evaluation (SemEval-2025)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/corrections-2025-08/2025.semeval-1.50/",
pages = "350--356",
ISBN = "979-8-89176-273-2",
abstract = "Leveraging transformer-based models as feature extractors, we introduce a hybrid architecture that integrates a bidirectional LSTM network with a multi-head attention mechanism to address the challenges of multilingual emotion detection in text. While pre-trained transformers provide robust contextual embeddings, they often struggle with capturing long-range dependencies and handling class imbalances, particularly in low-resource languages. To mitigate these issues, our approach combines sequential modeling and attention mechanisms, allowing the model to refine representations by emphasizing key emotional cues in text."
}
Markdown (Informal)
[INFOTEC-NLP at SemEval-2025 Task 11: A Case Study on Transformer-Based Models and Bag of Words](https://preview.aclanthology.org/corrections-2025-08/2025.semeval-1.50/) (Santos - Rodriguez & Graff, SemEval 2025)
ACL