@inproceedings{bao-etal-2023-exploring,
title = "Exploring Graph Pre-training for Aspect-based Sentiment Analysis",
author = "Bao, Xiaoyi and
Wang, Zhongqing and
Zhou, Guodong",
editor = "Bouamor, Houda and
Pino, Juan and
Bali, Kalika",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2023",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2023.findings-emnlp.234/",
doi = "10.18653/v1/2023.findings-emnlp.234",
pages = "3623--3634",
abstract = "Existing studies tend to extract the sentiment elements in a generative manner in order to avoid complex modeling. Despite their effectiveness, they ignore importance of the relationships between sentiment elements that could be crucial, making the large pre-trained generative models sub-optimal for modeling sentiment knowledge. Therefore, we introduce two pre-training paradigms to improve the generation model by exploring graph pre-training that targeting to strengthen the model in capturing the elements' relationships. Specifically, We first employ an Element-level Graph Pre-training paradigm, which is designed to improve the structure awareness of the generative model. Then, we design a Task Decomposition Pre-training paradigm to make the generative model generalizable and robust against various irregular sentiment quadruples. Extensive experiments show the superiority of our proposed method, validate the correctness of our motivation."
}
Markdown (Informal)
[Exploring Graph Pre-training for Aspect-based Sentiment Analysis](https://preview.aclanthology.org/fix-sig-urls/2023.findings-emnlp.234/) (Bao et al., Findings 2023)
ACL