@inproceedings{wei-etal-2025-teach,
title = "{TEACH}: A Contrastive Knowledge Adaptive Distillation Framework for Classical {C}hinese Understanding",
author = "Wei, Yuting and
Meng, Qi and
Xu, Yuanxing and
Wu, Bin",
editor = "Che, Wanxiang and
Nabende, Joyce and
Shutova, Ekaterina and
Pilehvar, Mohammad Taher",
booktitle = "Proceedings of the 63rd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2025",
address = "Vienna, Austria",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/landing_page/2025.acl-long.178/",
pages = "3537--3550",
ISBN = "979-8-89176-251-0",
abstract = "Traditional methods for processing classical Chinese typically segment language understanding into discrete tasks, which overlook crucial background information and reduce user engagement. Large language models (LLMs) provide integrated solutions, yet they entail high computational costs and risks of generating inaccurate historical information. To tackle these challenges, we propose a novel framework, TEACH (conTrastive knowlEdge Adaptive distillation with enhanCed Historical interpretability), which focuses on classical Chinese understanding by integrating word sense disambiguation with sentence translation. This integration leverages a confidence-annotated knowledge base and a step-by-step Chain-of-Thought prompting mechanism to minimize hallucinations and improve semantic analysis. Moreover, TEACH employs contrastive distillation learning to efficiently transfer capabilities from larger models to smaller ones (e.g., Qwen2-1.5B), addressing overly liberal translations. Additionally, we introduce an innovative generation evaluation metric using iterative word alignment, enhancing LLM performance assessments by distinguishing additional information and addressing excessive translation issues. Experiments conducted on real-world datasets validate TEACH{'}s efficacy in classical Chinese educational scenarios."
}
Markdown (Informal)
[TEACH: A Contrastive Knowledge Adaptive Distillation Framework for Classical Chinese Understanding](https://preview.aclanthology.org/landing_page/2025.acl-long.178/) (Wei et al., ACL 2025)
ACL