@inproceedings{wang-etal-2024-investigating,
title = "Investigating the Personality Consistency in Quantized Role-Playing Dialogue Agents",
author = "Wang, Yixiao and
Fashandi, Homa and
Ferreira, Kevin",
editor = "Dernoncourt, Franck and
Preo{\c{t}}iuc-Pietro, Daniel and
Shimorina, Anastasia",
booktitle = "Proceedings of the 2024 Conference on Empirical Methods in Natural Language Processing: Industry Track",
month = nov,
year = "2024",
address = "Miami, Florida, US",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2024.emnlp-industry.19/",
doi = "10.18653/v1/2024.emnlp-industry.19",
pages = "239--255",
abstract = "This study explores the consistency of personality traits in quantized large language models (LLMs) for edge device role-playing scenarios. Using the Big Five personality traits model, we evaluate how stable assigned personalities are for Quantized Role-Playing Dialog Agents (QRPDA) during multi-turn interactions. We evaluate multiple LLMs with various quantization levels, combining binary indexing of personality traits, explicit self-assessments, and linguistic analysis of narratives. To address personality inconsistency, we propose a non-parametric method called Think2. Our multi-faceted evaluation framework demonstrates Think2`s effectiveness in maintaining consistent personality traits for QRPDA. Moreover, we offer insights to help select the optimal model for QRPDA, improving its stability and reliability in real-world applications."
}
Markdown (Informal)
[Investigating the Personality Consistency in Quantized Role-Playing Dialogue Agents](https://preview.aclanthology.org/jlcl-multiple-ingestion/2024.emnlp-industry.19/) (Wang et al., EMNLP 2024)
ACL