@inproceedings{zheng-etal-2025-e,
title = "{E}-Gen: Leveraging {E}-Graphs to Improve Continuous Representations of Symbolic Expressions",
author = "Zheng, Hongbo and
Wang, Suyuan and
Gangwar, Neeraj and
Kani, Nickvash",
editor = "Chiruzzo, Luis and
Ritter, Alan and
Wang, Lu",
booktitle = "Proceedings of the 2025 Conference of the Nations of the Americas Chapter of the Association for Computational Linguistics: Human Language Technologies (Volume 1: Long Papers)",
month = apr,
year = "2025",
address = "Albuquerque, New Mexico",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2025.naacl-long.590/",
pages = "11772--11788",
ISBN = "979-8-89176-189-6",
abstract = "Vector representations have been pivotal in advancing natural language processing (NLP), with prior research focusing on embedding techniques for mathematical expressions using mathematically equivalent formulations. While effective, these approaches are constrained by the size and diversity of training data. In this work, we address these limitations by introducing E-Gen, a novel e-graph-based dataset generation scheme that synthesizes large and diverse mathematical expression datasets, surpassing prior methods in size and operator variety. Leveraging this dataset, we train embedding models using two strategies: (1) generating mathematically equivalent expressions, and (2) contrastive learning to explicitly group equivalent expressions. We evaluate these embeddings on both in-distribution and out-of-distribution mathematical language processing tasks, comparing them against prior methods. Finally, we demonstrate that our embedding-based approach outperforms state-of-the-art large language models (LLMs) on several tasks, underscoring the necessity of optimizing embedding methods for the mathematical data modality. The source code and datasets are available at https://github.com/MLPgroup/E-Gen."
}
Markdown (Informal)
[E-Gen: Leveraging E-Graphs to Improve Continuous Representations of Symbolic Expressions](https://preview.aclanthology.org/fix-sig-urls/2025.naacl-long.590/) (Zheng et al., NAACL 2025)
ACL