@inproceedings{kaji-kobayashi-2017-incremental,
title = "Incremental Skip-gram Model with Negative Sampling",
author = "Kaji, Nobuhiro and
Kobayashi, Hayato",
editor = "Palmer, Martha and
Hwa, Rebecca and
Riedel, Sebastian",
booktitle = "Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing",
month = sep,
year = "2017",
address = "Copenhagen, Denmark",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/D17-1037/",
doi = "10.18653/v1/D17-1037",
pages = "363--371",
abstract = "This paper explores an incremental training strategy for the skip-gram model with negative sampling (SGNS) from both empirical and theoretical perspectives. Existing methods of neural word embeddings, including SGNS, are multi-pass algorithms and thus cannot perform incremental model update. To address this problem, we present a simple incremental extension of SGNS and provide a thorough theoretical analysis to demonstrate its validity. Empirical experiments demonstrated the correctness of the theoretical analysis as well as the practical usefulness of the incremental algorithm."
}
Markdown (Informal)
[Incremental Skip-gram Model with Negative Sampling](https://preview.aclanthology.org/add-emnlp-2024-awards/D17-1037/) (Kaji & Kobayashi, EMNLP 2017)
ACL
- Nobuhiro Kaji and Hayato Kobayashi. 2017. Incremental Skip-gram Model with Negative Sampling. In Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing, pages 363–371, Copenhagen, Denmark. Association for Computational Linguistics.