@inproceedings{wu-etal-2024-kmatrix,
title = "{KM}atrix: A Flexible Heterogeneous Knowledge Enhancement Toolkit for Large Language Model",
author = "Wu, Shun and
Wu, Di and
Luo, Kun and
Zhang, XueYou and
Zhao, Jun and
Liu, Kang",
editor = "Hernandez Farias, Delia Irazu and
Hope, Tom and
Li, Manling",
booktitle = "Proceedings of the 2024 Conference on Empirical Methods in Natural Language Processing: System Demonstrations",
month = nov,
year = "2024",
address = "Miami, Florida, USA",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest_wac_2008/2024.emnlp-demo.29/",
doi = "10.18653/v1/2024.emnlp-demo.29",
pages = "280--290",
abstract = "Knowledge-Enhanced Large Language Models (K-LLMs) system enhances Large Language Models (LLMs) abilities using external knowledge. Existing K-LLMs toolkits mainly focus on free-textual knowledge, lacking support for heterogeneous knowledge like tables and knowledge graphs, and fall short in comprehensive datasets, models, and user-friendly experience. To address this gap, we introduce KMatrix: a flexible heterogeneous knowledge enhancement toolkit for LLMs including verbalizing-retrieval and parsing-query methods. Our modularity and control-logic flow diagram design flexibly supports the entire lifecycle of various complex K-LLMs systems, including training, evaluation, and deployment. To assist K-LLMs system research, a series of related knowledge, datasets, and models are integrated into our toolkit, along with performance analyses of K-LLMs systems enhanced by different types of knowledge. Using our toolkit, developers can rapidly build, evaluate, and deploy their own K-LLMs systems."
}
Markdown (Informal)
[KMatrix: A Flexible Heterogeneous Knowledge Enhancement Toolkit for Large Language Model](https://preview.aclanthology.org/ingest_wac_2008/2024.emnlp-demo.29/) (Wu et al., EMNLP 2024)
ACL