@inproceedings{bradbury-socher-2017-towards,
title = "Towards Neural Machine Translation with Latent Tree Attention",
author = "Bradbury, James and
Socher, Richard",
editor = "Chang, Kai-Wei and
Chang, Ming-Wei and
Srikumar, Vivek and
Rush, Alexander M.",
booktitle = "Proceedings of the 2nd Workshop on Structured Prediction for Natural Language Processing",
month = sep,
year = "2017",
address = "Copenhagen, Denmark",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/W17-4303/",
doi = "10.18653/v1/W17-4303",
pages = "12--16",
abstract = "Building models that take advantage of the hierarchical structure of language without a priori annotation is a longstanding goal in natural language processing. We introduce such a model for the task of machine translation, pairing a recurrent neural network grammar encoder with a novel attentional RNNG decoder and applying policy gradient reinforcement learning to induce unsupervised tree structures on both the source and target. When trained on character-level datasets with no explicit segmentation or parse annotation, the model learns a plausible segmentation and shallow parse, obtaining performance close to an attentional baseline."
}
Markdown (Informal)
[Towards Neural Machine Translation with Latent Tree Attention](https://preview.aclanthology.org/add-emnlp-2024-awards/W17-4303/) (Bradbury & Socher, 2017)
ACL