@inproceedings{liu-etal-2019-incorporating-contextual,
title = "Incorporating Contextual and Syntactic Structures Improves Semantic Similarity Modeling",
author = "Liu, Linqing and
Yang, Wei and
Rao, Jinfeng and
Tang, Raphael and
Lin, Jimmy",
editor = "Inui, Kentaro and
Jiang, Jing and
Ng, Vincent and
Wan, Xiaojun",
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP)",
month = nov,
year = "2019",
address = "Hong Kong, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/D19-1114/",
doi = "10.18653/v1/D19-1114",
pages = "1204--1209",
abstract = "Semantic similarity modeling is central to many NLP problems such as natural language inference and question answering. Syntactic structures interact closely with semantics in learning compositional representations and alleviating long-range dependency issues. How-ever, such structure priors have not been well exploited in previous work for semantic mod-eling. To examine their effectiveness, we start with the Pairwise Word Interaction Model, one of the best models according to a recent reproducibility study, then introduce components for modeling context and structure using multi-layer BiLSTMs and TreeLSTMs. In addition, we introduce residual connections to the deep convolutional neural network component of the model. Extensive evaluations on eight benchmark datasets show that incorporating structural information contributes to consistent improvements over strong baselines."
}
Markdown (Informal)
[Incorporating Contextual and Syntactic Structures Improves Semantic Similarity Modeling](https://preview.aclanthology.org/jlcl-multiple-ingestion/D19-1114/) (Liu et al., EMNLP-IJCNLP 2019)
ACL