@inproceedings{ibrahim-etal-2026-knowledge,
title = "Knowledge Augmentation Enhances Token Classification for Recipe Understanding",
author = "Ibrahim, Nuhu and
Stevens, Robert and
Batista-Navarro, Riza",
editor = "Demberg, Vera and
Inui, Kentaro and
Marquez, Llu{\'i}s",
booktitle = "Proceedings of the 19th Conference of the {E}uropean Chapter of the {A}ssociation for {C}omputational {L}inguistics (Volume 1: Long Papers)",
month = mar,
year = "2026",
address = "Rabat, Morocco",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-eacl/2026.eacl-long.127/",
pages = "2776--2788",
ISBN = "979-8-89176-380-7",
abstract = "In this work, we propose an entity type-specific and knowledge-augmented token classification framework designed to improve encoder models' performance on recipe texts. Our empirical analysis shows that this approach achieves state-of-the-art (SOTA) results on 5 out of 7 benchmark recipe datasets, significantly outperforming traditional token classification methods. We introduce a novel methodology leveraging curated domain-specific knowledge contexts to guide encoder models such as BERT and RoBERTa, which we refer to as RecipeBERT-KA and RecipeRoBERTa-KA. Additionally, we release a newly reprocessed entity type-specific and knowledge-enriched dataset that merges seven widely used food datasets, making it the largest annotated food-related dataset to date. Comparative analysis with SOTA large language models (GPT-4o, Mistral-7B, LLaMA 3-13B and LLaMA 3-70B) highlights the practical advantages of our smaller and specialised models. Finally, we analyse the impact of the different knowledge contexts, our models' potential for transfer learning, the effect of combining the datasets and scenarios where traditional token classification may still perform competitively, offering nuanced insight into method selection."
}Markdown (Informal)
[Knowledge Augmentation Enhances Token Classification for Recipe Understanding](https://preview.aclanthology.org/ingest-eacl/2026.eacl-long.127/) (Ibrahim et al., EACL 2026)
ACL