@inproceedings{kiela-etal-2018-dynamic,
title = "Dynamic Meta-Embeddings for Improved Sentence Representations",
author = "Kiela, Douwe and
Wang, Changhan and
Cho, Kyunghyun",
editor = "Riloff, Ellen and
Chiang, David and
Hockenmaier, Julia and
Tsujii, Jun{'}ichi",
booktitle = "Proceedings of the 2018 Conference on Empirical Methods in Natural Language Processing",
month = oct # "-" # nov,
year = "2018",
address = "Brussels, Belgium",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/D18-1176/",
doi = "10.18653/v1/D18-1176",
pages = "1466--1477",
abstract = "While one of the first steps in many NLP systems is selecting what pre-trained word embeddings to use, we argue that such a step is better left for neural networks to figure out by themselves. To that end, we introduce dynamic meta-embeddings, a simple yet effective method for the supervised learning of embedding ensembles, which leads to state-of-the-art performance within the same model class on a variety of tasks. We subsequently show how the technique can be used to shed new light on the usage of word embeddings in NLP systems."
}
Markdown (Informal)
[Dynamic Meta-Embeddings for Improved Sentence Representations](https://preview.aclanthology.org/add-emnlp-2024-awards/D18-1176/) (Kiela et al., EMNLP 2018)
ACL