@inproceedings{tran-matsui-2025-team,
title = "Team {ISM} at {CLP}sych 2025: Capturing Mental Health Dynamics from Social Media Timelines using A Pretrained Large Language Model with In-Context Learning",
author = "Tran, Vu and
Matsui, Tomoko",
editor = "Zirikly, Ayah and
Yates, Andrew and
Desmet, Bart and
Ireland, Molly and
Bedrick, Steven and
MacAvaney, Sean and
Bar, Kfir and
Ophir, Yaakov",
booktitle = "Proceedings of the 10th Workshop on Computational Linguistics and Clinical Psychology (CLPsych 2025)",
month = may,
year = "2025",
address = "Albuquerque, New Mexico",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/corrections-2025-06/2025.clpsych-1.25/",
doi = "10.18653/v1/2025.clpsych-1.25",
pages = "287--291",
ISBN = "979-8-89176-226-8",
abstract = "We tackle the task by using a pretrained large language model (LLM) and in-context learning with template-based instructions to guide the LLM. To improve generation quality, we employ a two-step procedure: sampling and selection. For the sampling step, we randomly sample a subset of the provided training data for the context of LLM prompting. Next, for the selection step, we map the LLM generated outputs into a vector space and employ the Gaussian kernel density estimation to select the most likely output. The results show that the approach can achieve a certain degree of performance and there is still room for improvement."
}
Markdown (Informal)
[Team ISM at CLPsych 2025: Capturing Mental Health Dynamics from Social Media Timelines using A Pretrained Large Language Model with In-Context Learning](https://preview.aclanthology.org/corrections-2025-06/2025.clpsych-1.25/) (Tran & Matsui, CLPsych 2025)
ACL