@inproceedings{nie-bansal-2017-shortcut,
title = "Shortcut-Stacked Sentence Encoders for Multi-Domain Inference",
author = "Nie, Yixin and
Bansal, Mohit",
editor = "Bowman, Samuel and
Goldberg, Yoav and
Hill, Felix and
Lazaridou, Angeliki and
Levy, Omer and
Reichart, Roi and
S{\o}gaard, Anders",
booktitle = "Proceedings of the 2nd Workshop on Evaluating Vector Space Representations for {NLP}",
month = sep,
year = "2017",
address = "Copenhagen, Denmark",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/W17-5308/",
doi = "10.18653/v1/W17-5308",
pages = "41--45",
abstract = "We present a simple sequential sentence encoder for multi-domain natural language inference. Our encoder is based on stacked bidirectional LSTM-RNNs with shortcut connections and fine-tuning of word embeddings. The overall supervised model uses the above encoder to encode two input sentences into two vectors, and then uses a classifier over the vector combination to label the relationship between these two sentences as that of entailment, contradiction, or neural. Our Shortcut-Stacked sentence encoders achieve strong improvements over existing encoders on matched and mismatched multi-domain natural language inference (top single-model result in the EMNLP RepEval 2017 Shared Task (Nangia et al., 2017)). Moreover, they achieve the new state-of-the-art encoding result on the original SNLI dataset (Bowman et al., 2015)."
}
Markdown (Informal)
[Shortcut-Stacked Sentence Encoders for Multi-Domain Inference](https://preview.aclanthology.org/add-emnlp-2024-awards/W17-5308/) (Nie & Bansal, RepEval 2017)
ACL