@inproceedings{khan-bhattacharyya-2025-mod,
title = "{MOD}-{KG}: {M}ulti{O}rgan Diagnosis Knowledge Graph",
author = "Khan, Anas Anwarul Haq and
Bhattacharyya, Pushpak",
editor = "Krishnamurthy, Parameswari and
Mujadia, Vandan and
Misra Sharma, Dipti and
Mary Thomas, Hannah",
booktitle = "NLP-AI4Health",
month = dec,
year = "2025",
address = "Mumbai, India",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-ijcnlp-aacl/2025.nlpai4health-main.2/",
pages = "9--15",
ISBN = "979-8-89176-315-9",
abstract = "The human body is highly interconnected, where a diagnosis in one organ can influence conditions in others. In medical research, graphs (such as Knowledge Graphs and Causal Graphs) have proven useful for capturing these relationships, but constructing them manually with expert input is both costly and time-intensive, especially given the continuous flow of new findings. To address this, we leverage the extraction capabilities of large language models (LLMs) to build the **MultiOrgan Diagnosis Knowledge Graph (MOD-KG)**. MOD-KG contains over **21,200 knowledge triples**, derived from both textbooks **({\textasciitilde}13{\%})** and carefully selected research papers (with an average of **444** citations each). The graph focuses primarily on the *heart, lungs, kidneys, liver, pancreas, and brain*, which are central to much of today{'}s multimodal imaging research. The extraction quality of the LLM was benchmarked against baselines over **1000** samples, demonstrating reliability. We will make our dataset public upon acceptance."
}Markdown (Informal)
[MOD-KG: MultiOrgan Diagnosis Knowledge Graph](https://preview.aclanthology.org/ingest-ijcnlp-aacl/2025.nlpai4health-main.2/) (Khan & Bhattacharyya, NLP-AI4Health 2025)
ACL