@inproceedings{zobnin-elistratova-2019-learning,
title = "Learning Word Embeddings without Context Vectors",
author = "Zobnin, Alexey and
Elistratova, Evgenia",
editor = "Augenstein, Isabelle and
Gella, Spandana and
Ruder, Sebastian and
Kann, Katharina and
Can, Burcu and
Welbl, Johannes and
Conneau, Alexis and
Ren, Xiang and
Rei, Marek",
booktitle = "Proceedings of the 4th Workshop on Representation Learning for NLP (RepL4NLP-2019)",
month = aug,
year = "2019",
address = "Florence, Italy",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Add-Cong-Liu-Florida-Atlantic-University-author-id/W19-4329/",
doi = "10.18653/v1/W19-4329",
pages = "244--249",
abstract = "Most word embedding algorithms such as word2vec or fastText construct two sort of vectors: for words and for contexts. Naive use of vectors of only one sort leads to poor results. We suggest using indefinite inner product in skip-gram negative sampling algorithm. This allows us to use only one sort of vectors without loss of quality. Our {\textquotedblleft}context-free{\textquotedblright} cf algorithm performs on par with SGNS on word similarity datasets"
}
Markdown (Informal)
[Learning Word Embeddings without Context Vectors](https://preview.aclanthology.org/Add-Cong-Liu-Florida-Atlantic-University-author-id/W19-4329/) (Zobnin & Elistratova, RepL4NLP 2019)
ACL
- Alexey Zobnin and Evgenia Elistratova. 2019. Learning Word Embeddings without Context Vectors. In Proceedings of the 4th Workshop on Representation Learning for NLP (RepL4NLP-2019), pages 244–249, Florence, Italy. Association for Computational Linguistics.