@inproceedings{tay-etal-2018-attentive,
title = "Attentive Gated Lexicon Reader with Contrastive Contextual Co-Attention for Sentiment Classification",
author = "Tay, Yi and
Luu, Anh Tuan and
Hui, Siu Cheung and
Su, Jian",
editor = "Riloff, Ellen and
Chiang, David and
Hockenmaier, Julia and
Tsujii, Jun{'}ichi",
booktitle = "Proceedings of the 2018 Conference on Empirical Methods in Natural Language Processing",
month = oct # "-" # nov,
year = "2018",
address = "Brussels, Belgium",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/D18-1381/",
doi = "10.18653/v1/D18-1381",
pages = "3443--3453",
abstract = "This paper proposes a new neural architecture that exploits readily available sentiment lexicon resources. The key idea is that that incorporating a word-level prior can aid in the representation learning process, eventually improving model performance. To this end, our model employs two distinctly unique components, i.e., (1) we introduce a lexicon-driven contextual attention mechanism to imbue lexicon words with long-range contextual information and (2), we introduce a contrastive co-attention mechanism that models contrasting polarities between all positive and negative words in a sentence. Via extensive experiments, we show that our approach outperforms many other neural baselines on sentiment classification tasks on multiple benchmark datasets."
}
Markdown (Informal)
[Attentive Gated Lexicon Reader with Contrastive Contextual Co-Attention for Sentiment Classification](https://preview.aclanthology.org/fix-sig-urls/D18-1381/) (Tay et al., EMNLP 2018)
ACL