@article{arora-etal-2020-learning,
title = "Learning Lexical Subspaces in a Distributional Vector Space",
author = "Arora, Kushal and
Chakraborty, Aishik and
Cheung, Jackie C. K.",
editor = "Johnson, Mark and
Roark, Brian and
Nenkova, Ani",
journal = "Transactions of the Association for Computational Linguistics",
volume = "8",
year = "2020",
address = "Cambridge, MA",
publisher = "MIT Press",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.tacl-1.21/",
doi = "10.1162/tacl_a_00316",
pages = "311--329",
abstract = "In this paper, we propose LexSub, a novel approach towards unifying lexical and distributional semantics. We inject knowledge about lexical-semantic relations into distributional word embeddings by defining subspaces of the distributional vector space in which a lexical relation should hold. Our framework can handle symmetric attract and repel relations (e.g., synonymy and antonymy, respectively), as well as asymmetric relations (e.g., hypernymy and meronomy). In a suite of intrinsic benchmarks, we show that our model outperforms previous approaches on relatedness tasks and on hypernymy classification and detection, while being competitive on word similarity tasks. It also outperforms previous systems on extrinsic classification tasks that benefit from exploiting lexical relational cues. We perform a series of analyses to understand the behaviors of our model.1Code available at \url{https://github.com/aishikchakraborty/LexSub}."
}
Markdown (Informal)
[Learning Lexical Subspaces in a Distributional Vector Space](https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.tacl-1.21/) (Arora et al., TACL 2020)
ACL