@inproceedings{azam-etal-2026-normalizing,
title = "Normalizing Health Concepts with Biomedical Embedding and {LLM}s",
author = "Azam, Iram and
Jiang, Keyuan and
Bernard, Gordon",
editor = {Danilova, Vera and
Kurfal{\i}, Murathan and
S{\"o}derfeldt, Ylva and
Reed, Julia and
Burchell, Andrew},
booktitle = "Proceedings of the 1st Workshop on Linguistic Analysis for Health ({H}ea{L}ing 2026)",
month = mar,
year = "2026",
address = "Rabat, Morocco",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-eacl/2026.healing-1.15/",
pages = "180--190",
ISBN = "979-8-89176-367-8",
abstract = "Accurate normalization of health-related expressions to standardized biomedical concepts is crucial for both healthcare and biomedical research. However, traditional string-based matching methods are limited by lexical variations. In this study, we propose a neural embedding-based normalization framework that utilizes an embedding model trained on biomedical terminology, generating over 3.59 million embeddings corresponding to UMLS terms and Concept Unique Identifiers (CUIs). For clinical data, CUIs were retrieved via semantic matching, while Twitter phrases were first processed using a large language model (LLM) to generate preferred terms prior to embedding-based CUI retrieval. Our approach substantially outperforms exact string matching and MetaMap Lite. For clinical data (3,144 phrases), normalization accuracy improved from 0.679 (string match) and 0.574 (MetaMap Lite) to 0.858. For Twitter data (102 phrases), accuracy increased from 0.235 (string match) and 0.118 (MetaMap Lite) to a range of 0.882 (Gemini 2.5 Flash) to 0.980 (GPT-4o mini). These findings highlight both the effectiveness of embedding-based semantic retrieval and the ability of LLMs to generate preferred terms, enhancing robustness in health concept normalization across diverse text sources."
}Markdown (Informal)
[Normalizing Health Concepts with Biomedical Embedding and LLMs](https://preview.aclanthology.org/ingest-eacl/2026.healing-1.15/) (Azam et al., HeaLing 2026)
ACL