@inproceedings{church-etal-2022-training,
title = "Training on Lexical Resources",
author = "Church, Kenneth and
Cai, Xingyu and
Bian, Yuchen",
editor = "Calzolari, Nicoletta and
B{\'e}chet, Fr{\'e}d{\'e}ric and
Blache, Philippe and
Choukri, Khalid and
Cieri, Christopher and
Declerck, Thierry and
Goggi, Sara and
Isahara, Hitoshi and
Maegaard, Bente and
Mariani, Joseph and
Mazo, H{\'e}l{\`e}ne and
Odijk, Jan and
Piperidis, Stelios",
booktitle = "Proceedings of the Thirteenth Language Resources and Evaluation Conference",
month = jun,
year = "2022",
address = "Marseille, France",
publisher = "European Language Resources Association",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.lrec-1.676/",
pages = "6290--6299",
abstract = "We propose using lexical resources (thesaurus, VAD) to fine-tune pretrained deep nets such as BERT and ERNIE. Then at inference time, these nets can be used to distinguish synonyms from antonyms, as well as VAD distances. The inference method can be applied to words as well as texts such as multiword expressions (MWEs), out of vocabulary words (OOVs), morphological variants and more. Code and data are posted on \url{https://github.com/kwchurch/syn_ant}."
}
Markdown (Informal)
[Training on Lexical Resources](https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.lrec-1.676/) (Church et al., LREC 2022)
ACL
- Kenneth Church, Xingyu Cai, and Yuchen Bian. 2022. Training on Lexical Resources. In Proceedings of the Thirteenth Language Resources and Evaluation Conference, pages 6290–6299, Marseille, France. European Language Resources Association.