@inproceedings{gonzalez-gutierrez-quattoni-2025-domain,
title = "Domain Pre-training Impact on Representations",
author = "Gonzalez-Gutierrez, Cesar and
Quattoni, Ariadna",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.1201/",
doi = "10.18653/v1/2025.findings-emnlp.1201",
pages = "22033--22049",
ISBN = "979-8-89176-335-7",
abstract = "This empirical study analyzes how the choice of pre-training corpus affects the quality of learned transformer representations. We focus specifically on the representation quality achieved through pre-training alone. Our experiments demonstrate that pre-training on a small, specialized corpus can produce effective representations, and that the effectiveness of combining a generic and a specialized corpora depends on the distributional similarity between the target task and the specialized corpus."
}Markdown (Informal)
[Domain Pre-training Impact on Representations](https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.1201/) (Gonzalez-Gutierrez & Quattoni, Findings 2025)
ACL
- Cesar Gonzalez-Gutierrez and Ariadna Quattoni. 2025. Domain Pre-training Impact on Representations. In Findings of the Association for Computational Linguistics: EMNLP 2025, pages 22033–22049, Suzhou, China. Association for Computational Linguistics.