@inproceedings{li-etal-2023-multi-level,
title = "Multi-level Contrastive Learning for Script-based Character Understanding",
author = "Li, Dawei and
Zhang, Hengyuan and
Li, Yanran and
Yang, Shiping",
editor = "Bouamor, Houda and
Pino, Juan and
Bali, Kalika",
booktitle = "Proceedings of the 2023 Conference on Empirical Methods in Natural Language Processing",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2023.emnlp-main.366/",
doi = "10.18653/v1/2023.emnlp-main.366",
pages = "5995--6013",
abstract = "In this work, we tackle the scenario of understanding characters in scripts, which aims to learn the characters' personalities and identities from their utterances. We begin by analyzing several challenges in this scenario, and then propose a multi-level contrastive learning framework to capture characters' global information in a fine-grained manner. To validate the proposed framework, we conduct extensive experiments on three character understanding sub-tasks by comparing with strong pre-trained language models, including SpanBERT, Longformer, BigBird and ChatGPT-3.5. Experimental results demonstrate that our method improves the performances by a considerable margin. Through further in-depth analysis, we show the effectiveness of our method in addressing the challenges and provide more hints on the scenario of character understanding. We will open-source our work in this URL."
}
Markdown (Informal)
[Multi-level Contrastive Learning for Script-based Character Understanding](https://preview.aclanthology.org/add-emnlp-2024-awards/2023.emnlp-main.366/) (Li et al., EMNLP 2023)
ACL