@inproceedings{min-etal-2025-punctuation,
title = "Punctuation Restoration Improves Structure Understanding without Supervision",
author = "Min, Junghyun and
Lee, Minho and
Lee, Woochul and
Lee, Yeonsoo",
editor = "Adlakha, Vaibhav and
Chronopoulou, Alexandra and
Li, Xiang Lorraine and
Majumder, Bodhisattwa Prasad and
Shi, Freda and
Vernikos, Giorgos",
booktitle = "Proceedings of the 10th Workshop on Representation Learning for NLP (RepL4NLP-2025)",
month = may,
year = "2025",
address = "Albuquerque, NM",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2025.repl4nlp-1.10/",
pages = "120--130",
ISBN = "979-8-89176-245-9",
abstract = "Unsupervised learning objectives like autoregressive and masked language modeling constitute a significant part in producing pre-trained representations that perform various downstream applications from natural language understanding to conversational tasks. However, despite impressive generative capabilities of recent large language models, their abilities to capture syntactic or semantic structure within text lag behind. We hypothesize that the mismatch between linguistic performance and competence in machines is attributable to insufficient learning of linguistic structure knowledge via currently popular pre-training objectives. Working with English, we show that punctuation restoration as a learning objective improves performance on structure-related tasks like named entity recognition, open information extraction, chunking, and part-of-speech tagging. Punctuation restoration results in ▲{\ensuremath{\geq}}2{\%}p improvement in 16 out of 18 experiments, across 6 out of 7 tasks. Our results show that punctuation restoration is an effective learning objective that can improve structure understanding and yield a more robust structure-aware representations of natural language in base-sized models."
}
Markdown (Informal)
[Punctuation Restoration Improves Structure Understanding without Supervision](https://preview.aclanthology.org/fix-sig-urls/2025.repl4nlp-1.10/) (Min et al., RepL4NLP 2025)
ACL