@inproceedings{kumar-kumar-2024-scalar,
title = "sca{LAR} {S}em{E}val-2024 Task 1: Semantic Textual Relatednes for {E}nglish",
author = "Kumar, Anand and
Kumar, Hemanth",
editor = {Ojha, Atul Kr. and
Do{\u{g}}ru{\"o}z, A. Seza and
Tayyar Madabushi, Harish and
Da San Martino, Giovanni and
Rosenthal, Sara and
Ros{\'a}, Aiala},
booktitle = "Proceedings of the 18th International Workshop on Semantic Evaluation (SemEval-2024)",
month = jun,
year = "2024",
address = "Mexico City, Mexico",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/moar-dois/2024.semeval-1.129/",
doi = "10.18653/v1/2024.semeval-1.129",
pages = "902--906",
abstract = "This study investigates Semantic TextualRelated- ness (STR) within Natural LanguageProcessing (NLP) through experiments conducted on a dataset from the SemEval-2024STR task. The dataset comprises train instances with three features (PairID, Text, andScore) and test instances with two features(PairID and Text), where sentence pairs areseparated by `/n' in the Text column. UsingBERT(sentence transformers pipeline), we explore two approaches: one with fine-tuning(Track A: Supervised) and another without finetuning (Track B: UnSupervised). Fine-tuningthe BERT pipeline yielded a Spearman correlation coefficient of 0.803, while without finetuning, a coefficient of 0.693 was attained usingcosine similarity. The study concludes by emphasizing the significance of STR in NLP tasks,highlighting the role of pre-trained languagemodels like BERT and Sentence Transformersin enhancing semantic relatedness assessments."
}
Markdown (Informal)
[scaLAR SemEval-2024 Task 1: Semantic Textual Relatednes for English](https://preview.aclanthology.org/moar-dois/2024.semeval-1.129/) (Kumar & Kumar, SemEval 2024)
ACL