@inproceedings{fonseca-martins-2020-revisiting,
title = "Revisiting Higher-Order Dependency Parsers",
author = "Fonseca, Erick and
Martins, Andr{\'e} F. T.",
editor = "Jurafsky, Dan and
Chai, Joyce and
Schluter, Natalie and
Tetreault, Joel",
booktitle = "Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2020.acl-main.776/",
doi = "10.18653/v1/2020.acl-main.776",
pages = "8795--8800",
abstract = "Neural encoders have allowed dependency parsers to shift from higher-order structured models to simpler first-order ones, making decoding faster and still achieving better accuracy than non-neural parsers. This has led to a belief that neural encoders can implicitly encode structural constraints, such as siblings and grandparents in a tree. We tested this hypothesis and found that neural parsers may benefit from higher-order features, even when employing a powerful pre-trained encoder, such as BERT. While the gains of higher-order features are small in the presence of a powerful encoder, they are consistent for long-range dependencies and long sentences. In particular, higher-order models are more accurate on full sentence parses and on the exact match of modifier lists, indicating that they deal better with larger, more complex structures."
}
Markdown (Informal)
[Revisiting Higher-Order Dependency Parsers](https://preview.aclanthology.org/fix-sig-urls/2020.acl-main.776/) (Fonseca & Martins, ACL 2020)
ACL
- Erick Fonseca and André F. T. Martins. 2020. Revisiting Higher-Order Dependency Parsers. In Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics, pages 8795–8800, Online. Association for Computational Linguistics.