@inproceedings{ebner-etal-2019-bag,
title = "Bag-of-Words Transfer: Non-Contextual Techniques for Multi-Task Learning",
author = "Ebner, Seth and
Wang, Felicity and
Van Durme, Benjamin",
editor = "Cherry, Colin and
Durrett, Greg and
Foster, George and
Haffari, Reza and
Khadivi, Shahram and
Peng, Nanyun and
Ren, Xiang and
Swayamdipta, Swabha",
booktitle = "Proceedings of the 2nd Workshop on Deep Learning Approaches for Low-Resource NLP (DeepLo 2019)",
month = nov,
year = "2019",
address = "Hong Kong, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/D19-6105/",
doi = "10.18653/v1/D19-6105",
pages = "40--46",
abstract = "Many architectures for multi-task learning (MTL) have been proposed to take advantage of transfer among tasks, often involving complex models and training procedures. In this paper, we ask if the sentence-level representations learned in previous approaches provide significant benefit beyond that provided by simply improving word-based representations. To investigate this question, we consider three techniques that ignore sequence information: a syntactically-oblivious pooling encoder, pre-trained non-contextual word embeddings, and unigram generative regularization. Compared to a state-of-the-art MTL approach to textual inference, the simple techniques we use yield similar performance on a universe of task combinations while reducing training time and model size."
}
Markdown (Informal)
[Bag-of-Words Transfer: Non-Contextual Techniques for Multi-Task Learning](https://preview.aclanthology.org/fix-sig-urls/D19-6105/) (Ebner et al., 2019)
ACL