@inproceedings{liu-etal-2019-discourse-representation,
title = "Discourse Representation Structure Parsing with Recurrent Neural Networks and the Transformer Model",
author = "Liu, Jiangming and
Cohen, Shay B. and
Lapata, Mirella",
booktitle = "Proceedings of the {IWCS} Shared Task on Semantic Parsing",
month = may,
year = "2019",
address = "Gothenburg, Sweden",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/W19-1203",
doi = "10.18653/v1/W19-1203",
abstract = "We describe the systems we developed for Discourse Representation Structure (DRS) parsing as part of the IWCS-2019 Shared Task of DRS Parsing.1 Our systems are based on sequence-to-sequence modeling. To implement our model, we use the open-source neural machine translation system implemented in PyTorch, OpenNMT-py. We experimented with a variety of encoder-decoder models based on recurrent neural networks and the Transformer model. We conduct experiments on the standard benchmark of the Parallel Meaning Bank (PMB 2.2). Our best system achieves a score of 84.8{\%} F1 in the DRS parsing shared task.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="liu-etal-2019-discourse-representation">
<titleInfo>
<title>Discourse Representation Structure Parsing with Recurrent Neural Networks and the Transformer Model</title>
</titleInfo>
<name type="personal">
<namePart type="given">Jiangming</namePart>
<namePart type="family">Liu</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Shay</namePart>
<namePart type="given">B</namePart>
<namePart type="family">Cohen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Mirella</namePart>
<namePart type="family">Lapata</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2019-may</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the IWCS Shared Task on Semantic Parsing</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Gothenburg, Sweden</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>We describe the systems we developed for Discourse Representation Structure (DRS) parsing as part of the IWCS-2019 Shared Task of DRS Parsing.1 Our systems are based on sequence-to-sequence modeling. To implement our model, we use the open-source neural machine translation system implemented in PyTorch, OpenNMT-py. We experimented with a variety of encoder-decoder models based on recurrent neural networks and the Transformer model. We conduct experiments on the standard benchmark of the Parallel Meaning Bank (PMB 2.2). Our best system achieves a score of 84.8% F1 in the DRS parsing shared task.</abstract>
<identifier type="citekey">liu-etal-2019-discourse-representation</identifier>
<identifier type="doi">10.18653/v1/W19-1203</identifier>
<location>
<url>https://aclanthology.org/W19-1203</url>
</location>
<part>
<date>2019-may</date>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Discourse Representation Structure Parsing with Recurrent Neural Networks and the Transformer Model
%A Liu, Jiangming
%A Cohen, Shay B.
%A Lapata, Mirella
%S Proceedings of the IWCS Shared Task on Semantic Parsing
%D 2019
%8 may
%I Association for Computational Linguistics
%C Gothenburg, Sweden
%F liu-etal-2019-discourse-representation
%X We describe the systems we developed for Discourse Representation Structure (DRS) parsing as part of the IWCS-2019 Shared Task of DRS Parsing.1 Our systems are based on sequence-to-sequence modeling. To implement our model, we use the open-source neural machine translation system implemented in PyTorch, OpenNMT-py. We experimented with a variety of encoder-decoder models based on recurrent neural networks and the Transformer model. We conduct experiments on the standard benchmark of the Parallel Meaning Bank (PMB 2.2). Our best system achieves a score of 84.8% F1 in the DRS parsing shared task.
%R 10.18653/v1/W19-1203
%U https://aclanthology.org/W19-1203
%U https://doi.org/10.18653/v1/W19-1203
Markdown (Informal)
[Discourse Representation Structure Parsing with Recurrent Neural Networks and the Transformer Model](https://aclanthology.org/W19-1203) (Liu et al., 2019)
ACL