@inproceedings{liu-etal-2020-fine,
title = "Fine-grained Fact Verification with Kernel Graph Attention Network",
author = "Liu, Zhenghao and
Xiong, Chenyan and
Sun, Maosong and
Liu, Zhiyuan",
editor = "Jurafsky, Dan and
Chai, Joyce and
Schluter, Natalie and
Tetreault, Joel",
booktitle = "Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2020.acl-main.655/",
doi = "10.18653/v1/2020.acl-main.655",
pages = "7342--7351",
abstract = "Fact Verification requires fine-grained natural language inference capability that finds subtle clues to identify the syntactical and semantically correct but not well-supported claims. This paper presents Kernel Graph Attention Network (KGAT), which conducts more fine-grained fact verification with kernel-based attentions. Given a claim and a set of potential evidence sentences that form an evidence graph, KGAT introduces node kernels, which better measure the importance of the evidence node, and edge kernels, which conduct fine-grained evidence propagation in the graph, into Graph Attention Networks for more accurate fact verification. KGAT achieves a 70.38{\%} FEVER score and significantly outperforms existing fact verification models on FEVER, a large-scale benchmark for fact verification. Our analyses illustrate that, compared to dot-product attentions, the kernel-based attention concentrates more on relevant evidence sentences and meaningful clues in the evidence graph, which is the main source of KGAT{'}s effectiveness. All source codes of this work are available at \url{https://github.com/thunlp/KernelGAT}."
}
Markdown (Informal)
[Fine-grained Fact Verification with Kernel Graph Attention Network](https://preview.aclanthology.org/fix-sig-urls/2020.acl-main.655/) (Liu et al., ACL 2020)
ACL