@inproceedings{wang-2025-ccl25,
title = "{CCL}25-Eval 任务5系统报告:基于千问大模型的古诗词理解与推理研究",
author = "Wang, Jue",
editor = "Lin, Hongfei and
Li, Bin and
Tan, Hongye",
booktitle = "Proceedings of the 24th {C}hina National Conference on Computational Linguistics ({CCL} 2025)",
month = aug,
year = "2025",
address = "Jinan, China",
publisher = "Chinese Information Processing Society of China",
url = "https://preview.aclanthology.org/ingest-ccl/2025.ccl-2.25/",
pages = "206--211",
abstract = "``中国古典诗词语言凝练、意境深远,对自然语言处理系统提出了严峻挑战。本次评测聚焦于古诗词理解与推理,包括词语释义、句子翻译和情感分析三项子任务。本文基于Qwen2.5-14B-Instruct 模型,在LLaMA Factory 框架下采用监督微调(SFT)与LoRA 参数高效微调策略,提升模型在few-shot 条件下的表现。训练数据来自官方发布的多类别JSON 格式语料,经整合与指令格式转换后用于模型训练。实验表明,LoRA 微调显著优于zero-shot 基线。本研究验证了参数高效微调方法在有限数据场景下的有效性。''"
}Markdown (Informal)
[CCL25-Eval 任务5系统报告:基于千问大模型的古诗词理解与推理研究](https://preview.aclanthology.org/ingest-ccl/2025.ccl-2.25/) (Wang, CCL 2025)
ACL
- Jue Wang. 2025. CCL25-Eval 任务5系统报告:基于千问大模型的古诗词理解与推理研究. In Proceedings of the 24th China National Conference on Computational Linguistics (CCL 2025), pages 206–211, Jinan, China. Chinese Information Processing Society of China.