@inproceedings{zhang-etal-2020-enhancing-transformer,
title = "Enhancing Transformer with Sememe Knowledge",
author = "Zhang, Yuhui and
Yang, Chenghao and
Zhou, Zhengping and
Liu, Zhiyuan",
editor = "Gella, Spandana and
Welbl, Johannes and
Rei, Marek and
Petroni, Fabio and
Lewis, Patrick and
Strubell, Emma and
Seo, Minjoon and
Hajishirzi, Hannaneh",
booktitle = "Proceedings of the 5th Workshop on Representation Learning for NLP",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2020.repl4nlp-1.21/",
doi = "10.18653/v1/2020.repl4nlp-1.21",
pages = "177--184",
abstract = "While large-scale pretraining has achieved great success in many NLP tasks, it has not been fully studied whether external linguistic knowledge can improve data-driven models. In this work, we introduce sememe knowledge into Transformer and propose three sememe-enhanced Transformer models. Sememes, by linguistic definition, are the minimum semantic units of language, which can well represent implicit semantic meanings behind words. Our experiments demonstrate that introducing sememe knowledge into Transformer can consistently improve language modeling and downstream tasks. The adversarial test further demonstrates that sememe knowledge can substantially improve model robustness."
}
Markdown (Informal)
[Enhancing Transformer with Sememe Knowledge](https://preview.aclanthology.org/add-emnlp-2024-awards/2020.repl4nlp-1.21/) (Zhang et al., RepL4NLP 2020)
ACL
- Yuhui Zhang, Chenghao Yang, Zhengping Zhou, and Zhiyuan Liu. 2020. Enhancing Transformer with Sememe Knowledge. In Proceedings of the 5th Workshop on Representation Learning for NLP, pages 177–184, Online. Association for Computational Linguistics.