@inproceedings{yedetore-kim-2024-semantic,
title = "Semantic Training Signals Promote Hierarchical Syntactic Generalization in Transformers",
author = "Yedetore, Aditya and
Kim, Najoung",
editor = "Al-Onaizan, Yaser and
Bansal, Mohit and
Chen, Yun-Nung",
booktitle = "Proceedings of the 2024 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2024",
address = "Miami, Florida, USA",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/moar-dois/2024.emnlp-main.235/",
doi = "10.18653/v1/2024.emnlp-main.235",
pages = "4059--4073",
abstract = "Neural networks without hierarchical biases often struggle to learn linguistic rules that come naturally to humans. However, neural networks are trained primarily on form alone, while children acquiring language additionally receive data about meaning. Would neural networks generalize more like humans when trained on both form and meaning? We investigate this by examining if Transformers{---}neural networks without a hierarchical bias{---}better achieve hierarchical generalization when trained on both form and meaning compared to when trained on form alone. Our results show that Transformers trained on form and meaning do favor the hierarchical generalization more than those trained on form alone, suggesting that statistical learners without hierarchical biases can leverage semantic training signals to bootstrap hierarchical syntactic generalization."
}
Markdown (Informal)
[Semantic Training Signals Promote Hierarchical Syntactic Generalization in Transformers](https://preview.aclanthology.org/moar-dois/2024.emnlp-main.235/) (Yedetore & Kim, EMNLP 2024)
ACL