@inproceedings{yu-etal-2018-approximate,
title = "Approximate Dynamic Oracle for Dependency Parsing with Reinforcement Learning",
author = "Yu, Xiang and
Vu, Ngoc Thang and
Kuhn, Jonas",
editor = "de Marneffe, Marie-Catherine and
Lynn, Teresa and
Schuster, Sebastian",
booktitle = "Proceedings of the Second Workshop on Universal Dependencies ({UDW} 2018)",
month = nov,
year = "2018",
address = "Brussels, Belgium",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/W18-6021/",
doi = "10.18653/v1/W18-6021",
pages = "183--191",
abstract = "We present a general approach with reinforcement learning (RL) to approximate dynamic oracles for transition systems where exact dynamic oracles are difficult to derive. We treat oracle parsing as a reinforcement learning problem, design the reward function inspired by the classical dynamic oracle, and use Deep Q-Learning (DQN) techniques to train the oracle with gold trees as features. The combination of a priori knowledge and data-driven methods enables an efficient dynamic oracle, which improves the parser performance over static oracles in several transition systems."
}
Markdown (Informal)
[Approximate Dynamic Oracle for Dependency Parsing with Reinforcement Learning](https://preview.aclanthology.org/fix-sig-urls/W18-6021/) (Yu et al., UDW 2018)
ACL