@inproceedings{b-v-patil-2025-transformer,
title = "Transformer-Based Analysis of Adaptive and Maladaptive Self-States in Longitudinal Social Media Data",
author = "B, Abhin and
V Patil, Renukasakshi",
editor = "Zirikly, Ayah and
Yates, Andrew and
Desmet, Bart and
Ireland, Molly and
Bedrick, Steven and
MacAvaney, Sean and
Bar, Kfir and
Ophir, Yaakov",
booktitle = "Proceedings of the 10th Workshop on Computational Linguistics and Clinical Psychology (CLPsych 2025)",
month = may,
year = "2025",
address = "Albuquerque, New Mexico",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2025.clpsych-1.26/",
pages = "292--299",
ISBN = "979-8-89176-226-8",
abstract = "The CLPsych workshop, held annually since 2014, promotes the application of computational linguistics to behavioral analysis and neurological health assessment. The CLPsych 2025 shared task, extending the framework of the 2022 iteration, leverages the MIND framework to model temporal fluctuations in mental states. This shared task comprises three sub-tasks, each presenting substantial challenges to natural language processing (NLP) systems, requiring sensitive and precise outcomes in analyzing adaptive and maladaptive behaviors. In this study, we employed a range of modeling strategies tailored to the requirements and expected outputs of each subtask. Our approach mostly utilized traditional language models like BERT, LongFormer and Pegasus diverging from the prevalent trend of prompt-tuned large language models. We achieved an overall ranking of 13th, with subtask rankings of 8th in Task 1a, 13th in Task 1b, 8th in Task 2, and 7th in Task 3. These results highlight the efficacy of our methods while underscoring areas for further refinement in handling complex behavioral data."
}
Markdown (Informal)
[Transformer-Based Analysis of Adaptive and Maladaptive Self-States in Longitudinal Social Media Data](https://preview.aclanthology.org/fix-sig-urls/2025.clpsych-1.26/) (B & V Patil, CLPsych 2025)
ACL