@inproceedings{pisarevskaya-zubiaga-2022-team,
title = "Team dina at {S}em{E}val-2022 Task 8: Pre-trained Language Models as Baselines for Semantic Similarity",
author = "Pisarevskaya, Dina and
Zubiaga, Arkaitz",
editor = "Emerson, Guy and
Schluter, Natalie and
Stanovsky, Gabriel and
Kumar, Ritesh and
Palmer, Alexis and
Schneider, Nathan and
Singh, Siddharth and
Ratan, Shyam",
booktitle = "Proceedings of the 16th International Workshop on Semantic Evaluation (SemEval-2022)",
month = jul,
year = "2022",
address = "Seattle, United States",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.semeval-1.169/",
doi = "10.18653/v1/2022.semeval-1.169",
pages = "1196--1201",
abstract = "This paper describes the participation of the team {\textquotedblleft}dina{\textquotedblright} in the Multilingual News Similarity task at SemEval 2022. To build our system for the task, we experimented with several multilingual language models which were originally pre-trained for semantic similarity but were not further fine-tuned. We use these models in combination with state-of-the-art packages for machine translation and named entity recognition with the expectation of providing valuable input to the model. Our work assesses the applicability of such {\textquotedblleft}pure{\textquotedblright} models to solve the multilingual semantic similarity task in the case of news articles. Our best model achieved a score of 0.511, but shows that there is room for improvement."
}
Markdown (Informal)
[Team dina at SemEval-2022 Task 8: Pre-trained Language Models as Baselines for Semantic Similarity](https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.semeval-1.169/) (Pisarevskaya & Zubiaga, SemEval 2022)
ACL