@inproceedings{gul-etal-2025-mucos,
title = "{M}u{C}o{S}: Efficient Drug{--}Target Discovery via Multi-Context-Aware Sampling in Knowledge Graphs",
author = "Gul, Haji and
Naim, Abdul and
Bhat, Ajaz",
editor = "Demner-Fushman, Dina and
Ananiadou, Sophia and
Miwa, Makoto and
Tsujii, Junichi",
booktitle = "ACL 2025",
month = aug,
year = "2025",
address = "Viena, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/acl25-workshop-ingestion/2025.bionlp-1.27/",
pages = "319--327",
ISBN = "979-8-89176-275-6",
abstract = "Accurate prediction of drug{--}target interactions is critical for accelerating drug discovery. In this work, we frame drug{--}target prediction as a link prediction task on heterogeneous biomedical knowledge graphs (KG) that integrate drugs, proteins, diseases, pathways, and other relevant entities. Conventional KG embedding methods such as TransE and ComplEx-SE are hindered by their reliance on computationally intensive negative sampling and their limited generalization to unseen drug{--}target pairs. To address these challenges, we propose Multi-Context-Aware Sampling (MuCoS), a novel framework that prioritizes high-density neighbours to capture salient structural patterns and integrates these with contextual embeddings derived from BERT. By unifying structural and textual modalities and selectively sampling highly informative patterns, MuCoS circumvents the need for negative sampling, significantly reducing computational overhead while enhancing predictive accuracy for novel drug{--}target associations and drug targets. Extensive experiments on the KEGG50k and PharmKG-8k datasets demonstrate that MuCoS outperforms baselines, achieving up to a 13{\%} improvement in MRR for general relation prediction on KEGG50k, a 22{\%} improvement on PharmKG-8k, and a 6{\%} gain in dedicated drug{--}target relation prediction on KEGG50k."
}
Markdown (Informal)
[MuCoS: Efficient Drug–Target Discovery via Multi-Context-Aware Sampling in Knowledge Graphs](https://preview.aclanthology.org/acl25-workshop-ingestion/2025.bionlp-1.27/) (Gul et al., BioNLP 2025)
ACL