@inproceedings{dasigi-etal-2017-ontology,
title = "Ontology-Aware Token Embeddings for Prepositional Phrase Attachment",
author = "Dasigi, Pradeep and
Ammar, Waleed and
Dyer, Chris and
Hovy, Eduard",
editor = "Barzilay, Regina and
Kan, Min-Yen",
booktitle = "Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2017",
address = "Vancouver, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/P17-1191/",
doi = "10.18653/v1/P17-1191",
pages = "2089--2098",
abstract = "Type-level word embeddings use the same set of parameters to represent all instances of a word regardless of its context, ignoring the inherent lexical ambiguity in language. Instead, we embed semantic concepts (or synsets) as defined in WordNet and represent a word token in a particular context by estimating a distribution over relevant semantic concepts. We use the new, context-sensitive embeddings in a model for predicting prepositional phrase (PP) attachments and jointly learn the concept embeddings and model parameters. We show that using context-sensitive embeddings improves the accuracy of the PP attachment model by 5.4{\%} absolute points, which amounts to a 34.4{\%} relative reduction in errors."
}
Markdown (Informal)
[Ontology-Aware Token Embeddings for Prepositional Phrase Attachment](https://preview.aclanthology.org/add-emnlp-2024-awards/P17-1191/) (Dasigi et al., ACL 2017)
ACL