@inproceedings{feng-etal-2021-target,
title = "Target-specified Sequence Labeling with Multi-head Self-attention for Target-oriented Opinion Words Extraction",
author = "Feng, Yuhao and
Rao, Yanghui and
Tang, Yuyao and
Wang, Ninghua and
Liu, He",
editor = "Toutanova, Kristina and
Rumshisky, Anna and
Zettlemoyer, Luke and
Hakkani-Tur, Dilek and
Beltagy, Iz and
Bethard, Steven and
Cotterell, Ryan and
Chakraborty, Tanmoy and
Zhou, Yichao",
booktitle = "Proceedings of the 2021 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies",
month = jun,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2021.naacl-main.145/",
doi = "10.18653/v1/2021.naacl-main.145",
pages = "1805--1815",
abstract = "Opinion target extraction and opinion term extraction are two fundamental tasks in Aspect Based Sentiment Analysis (ABSA). Many recent works on ABSA focus on Target-oriented Opinion Words (or Terms) Extraction (TOWE), which aims at extracting the corresponding opinion words for a given opinion target. TOWE can be further applied to Aspect-Opinion Pair Extraction (AOPE) which aims at extracting aspects (i.e., opinion targets) and opinion terms in pairs. In this paper, we propose Target-Specified sequence labeling with Multi-head Self-Attention (TSMSA) for TOWE, in which any pre-trained language model with multi-head self-attention can be integrated conveniently. As a case study, we also develop a Multi-Task structure named MT-TSMSA for AOPE by combining our TSMSA with an aspect and opinion term extraction module. Experimental results indicate that TSMSA outperforms the benchmark methods on TOWE significantly; meanwhile, the performance of MT-TSMSA is similar or even better than state-of-the-art AOPE baseline models."
}
Markdown (Informal)
[Target-specified Sequence Labeling with Multi-head Self-attention for Target-oriented Opinion Words Extraction](https://preview.aclanthology.org/fix-sig-urls/2021.naacl-main.145/) (Feng et al., NAACL 2021)
ACL