@inproceedings{gautam-2026-training,
title = "Training in Step-by-Step Formal Reasoning Improves Pronominal Reasoning in Language Models",
author = "Gautam, Vagrant",
editor = "Demberg, Vera and
Inui, Kentaro and
Marquez, Llu{\'i}s",
booktitle = "Proceedings of the 19th Conference of the {E}uropean Chapter of the {A}ssociation for {C}omputational {L}inguistics (Volume 2: Short Papers)",
month = mar,
year = "2026",
address = "Rabat, Morocco",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-eacl/2026.eacl-short.7/",
pages = "121--135",
ISBN = "979-8-89176-381-4",
abstract = "Large reasoning models are trained to solve problems by decomposing them into steps.While they show impressive progress on reasoning tasks, ``reasoning'' here is typically limited to formal reasoning, i.e., math, code, and logic.An open question is whether these abilities transfer to {\_}pronominal reasoning{\_}, where step-by-step thinking in non-reasoning models worsens performance, but code pre-training may help.I answer this question by evaluating six pairs of original and DeepSeek-distilled models (1.5B-70B parameters) on six challenging datasets for English pronoun resolution (identifying whom a pronoun refers to) and pronoun fidelity (learning and applying a pronoun mapping correctly).Performance improves statistically significantly on all datasets (31{\%} relative increase), indicating that distilling step-by-step formal reasoning does in fact help with pronominal reasoning, in part by improving instruction-following.With a qualitative evaluation of 720 generations, I show that improvements occur across granular error types, and come from plausible-looking reasoning chains employing a variety of reasoning strategies.However, the gains put models just above random performance on these datasets, leaving plenty of room for improvement."
}Markdown (Informal)
[Training in Step-by-Step Formal Reasoning Improves Pronominal Reasoning in Language Models](https://preview.aclanthology.org/ingest-eacl/2026.eacl-short.7/) (Gautam, EACL 2026)
ACL