@inproceedings{wang-etal-2025-crystalicl,
title = "{C}rystal{ICL}: Enabling In-Context Learning for Crystal Generation",
author = "Wang, Ruobing and
Tan, Qiaoyu and
Wang, Yili and
Wang, Ying and
Wang, Xin",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Proceedings of the 2025 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-luhme/2025.emnlp-main.929/",
doi = "10.18653/v1/2025.emnlp-main.929",
pages = "18440--18455",
ISBN = "979-8-89176-332-6",
abstract = "Designing crystal materials with desired physicochemical properties remains a fundamental challenge in materials science. While large language models (LLMs) have demonstrated strong in-context learning (ICL) capabilities, existing LLM-based crystal generation approaches are limited to zero-shot scenarios and are unable to benefit from few-shot scenarios. In contrast, human experts typically design new materials by modifying relevant known structures which aligns closely with the few-shot ICL paradigm. Motivated by this, we propose CrystalICL, a novel model designed for few-shot crystal generation. Specifically, we introduce a space-group based crystal tokenization method, which effectively reduces the complexity of modeling crystal symmetry in LLMs. We further introduce a condition-structure aware hybrid instruction tuning framework and a multi-task instruction tuning strategy, enabling the model to better exploit ICL by capturing structure-property relationships from limited data. Extensive experiments on four crystal generation benchmarks demonstrate the superiority of CrystalICL over the leading baseline methods on conditional and unconditional generation tasks."
}Markdown (Informal)
[CrystalICL: Enabling In-Context Learning for Crystal Generation](https://preview.aclanthology.org/ingest-luhme/2025.emnlp-main.929/) (Wang et al., EMNLP 2025)
ACL