@inproceedings{huang-wang-2017-deep,
title = "Deep Residual Learning for Weakly-Supervised Relation Extraction",
author = "Huang, Yi Yao and
Wang, William Yang",
editor = "Palmer, Martha and
Hwa, Rebecca and
Riedel, Sebastian",
booktitle = "Proceedings of the 2017 Conference on Empirical Methods in Natural Language Processing",
month = sep,
year = "2017",
address = "Copenhagen, Denmark",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/D17-1191/",
doi = "10.18653/v1/D17-1191",
pages = "1803--1807",
abstract = "Deep residual learning (ResNet) is a new method for training very deep neural networks using identity mapping for shortcut connections. ResNet has won the ImageNet ILSVRC 2015 classification task, and achieved state-of-the-art performances in many computer vision tasks. However, the effect of residual learning on noisy natural language processing tasks is still not well understood. In this paper, we design a novel convolutional neural network (CNN) with residual learning, and investigate its impacts on the task of distantly supervised noisy relation extraction. In contradictory to popular beliefs that ResNet only works well for very deep networks, we found that even with 9 layers of CNNs, using identity mapping could significantly improve the performance for distantly-supervised relation extraction."
}
Markdown (Informal)
[Deep Residual Learning for Weakly-Supervised Relation Extraction](https://preview.aclanthology.org/fix-sig-urls/D17-1191/) (Huang & Wang, EMNLP 2017)
ACL