@inproceedings{barikbin-2022-slpl,
title = "{SLPL}-Sentiment at {S}em{E}val-2022 Task 10: Making Use of Pre-Trained Model`s Attention Values in Structured Sentiment Analysis",
author = "Barikbin, Sadrodin",
editor = "Emerson, Guy and
Schluter, Natalie and
Stanovsky, Gabriel and
Kumar, Ritesh and
Palmer, Alexis and
Schneider, Nathan and
Singh, Siddharth and
Ratan, Shyam",
booktitle = "Proceedings of the 16th International Workshop on Semantic Evaluation (SemEval-2022)",
month = jul,
year = "2022",
address = "Seattle, United States",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.semeval-1.192/",
doi = "10.18653/v1/2022.semeval-1.192",
pages = "1382--1388",
abstract = "Sentiment analysis is a useful problem which could serve a variety of fields from business intelligence to social studies and even health studies. Using SemEval 2022 Task 10 formulation of this problem and taking sequence labeling as our approach, we propose a model which learns the task by finetuning a pretrained transformer, introducing as few parameters ({\textasciitilde}150k) as possible and making use of precomputed attention values in the transformer. Our model improves shared task baselines on all task datasets."
}
Markdown (Informal)
[SLPL-Sentiment at SemEval-2022 Task 10: Making Use of Pre-Trained Model’s Attention Values in Structured Sentiment Analysis](https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.semeval-1.192/) (Barikbin, SemEval 2022)
ACL