@inproceedings{lam-yang-2025-revisiting,
title = "Revisiting Pre-trained Language Models for Conversation Disentanglement",
author = "Lam, Tung-Thien and
Yang, Cheng-Zen",
editor = "Chang, Kai-Wei and
Lu, Ke-Han and
Yang, Chih-Kai and
Tam, Zhi-Rui and
Chang, Wen-Yu and
Wang, Chung-Che",
booktitle = "Proceedings of the 37th Conference on Computational Linguistics and Speech Processing (ROCLING 2025)",
month = nov,
year = "2025",
address = "National Taiwan University, Taipei City, Taiwan",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/dashboard/2025.rocling-main.31/",
pages = "296--302",
ISBN = "979-8-89176-379-1",
abstract = "Multi-party conversation is a popular form in online group chatting. However, the interweaving of utterance threads complicates the understanding of the dialogues for participants. Many conversation disentanglement models have been proposed using transformer-based pre-trained language models (PrLMs). However, advanced transformer-based PrLMs have not been extensively studied. This paper investigates the effectiveness of five advanced PrLMs: BERT, XLNet, ELECTRA, RoBERTa, and ModernBERT. The experimental results show that ELECTRA and RoBERTa are two PrLMs with outstanding performance than other PrLMs for the conversation disentanglement task."
}Markdown (Informal)
[Revisiting Pre-trained Language Models for Conversation Disentanglement](https://preview.aclanthology.org/dashboard/2025.rocling-main.31/) (Lam & Yang, ROCLING 2025)
ACL