@inproceedings{laube-eliasmith-2024-qavsa,
title = "{QAVSA}: Question Answering using Vector Symbolic Algebras",
author = "Laube, Ryan and
Eliasmith, Chris",
editor = "Zhao, Chen and
Mosbach, Marius and
Atanasova, Pepa and
Goldfarb-Tarrent, Seraphina and
Hase, Peter and
Hosseini, Arian and
Elbayad, Maha and
Pezzelle, Sandro and
Mozes, Maximilian",
booktitle = "Proceedings of the 9th Workshop on Representation Learning for NLP (RepL4NLP-2024)",
month = aug,
year = "2024",
address = "Bangkok, Thailand",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2024.repl4nlp-1.14/",
pages = "191--202",
abstract = "With the advancement of large pretrained language models (PLMs), many question answering (QA) benchmarks have been developed in order to evaluate the reasoning capabilities of these models. Augmenting PLMs with external knowledge in the form of Knowledge Graphs (KGs) has been a popular method to improve their reasoning capabilities, and a common method to reason over KGs is to use Graph Neural Networks (GNNs). As an alternative to GNNs to augment PLMs, we propose a novel graph reasoning module using Vector Symbolic Algebra (VSA) graph representations and a k-layer MLP. We demonstrate that our VSA-based model performs as well as QA-GNN, a model combining a PLM and a GNN-module, on 3 multiple-choice question answering (MCQA) datasets. Our model has a simpler architecture than QA-GNN and also converges 39{\%} faster during training."
}
Markdown (Informal)
[QAVSA: Question Answering using Vector Symbolic Algebras](https://preview.aclanthology.org/jlcl-multiple-ingestion/2024.repl4nlp-1.14/) (Laube & Eliasmith, RepL4NLP 2024)
ACL