@inproceedings{chang-rafferty-2020-encodings,
title = "Encodings of Source Syntax: Similarities in {NMT} Representations Across Target Languages",
author = "Chang, Tyler A. and
Rafferty, Anna",
editor = "Gella, Spandana and
Welbl, Johannes and
Rei, Marek and
Petroni, Fabio and
Lewis, Patrick and
Strubell, Emma and
Seo, Minjoon and
Hajishirzi, Hannaneh",
booktitle = "Proceedings of the 5th Workshop on Representation Learning for NLP",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.repl4nlp-1.2",
doi = "10.18653/v1/2020.repl4nlp-1.2",
pages = "7--16",
abstract = "We train neural machine translation (NMT) models from English to six target languages, using NMT encoder representations to predict ancestor constituent labels of source language words. We find that NMT encoders learn similar source syntax regardless of NMT target language, relying on explicit morphosyntactic cues to extract syntactic features from source sentences. Furthermore, the NMT encoders outperform RNNs trained directly on several of the constituent label prediction tasks, suggesting that NMT encoder representations can be used effectively for natural language tasks involving syntax. However, both the NMT encoders and the directly-trained RNNs learn substantially different syntactic information from a probabilistic context-free grammar (PCFG) parser. Despite lower overall accuracy scores, the PCFG often performs well on sentences for which the RNN-based models perform poorly, suggesting that RNN architectures are constrained in the types of syntax they can learn.",
}
Markdown (Informal)
[Encodings of Source Syntax: Similarities in NMT Representations Across Target Languages](https://aclanthology.org/2020.repl4nlp-1.2) (Chang & Rafferty, RepL4NLP 2020)
ACL