@inproceedings{shin-lee-2018-multi,
title = "Multi-encoder Transformer Network for Automatic Post-Editing",
author = "Shin, Jaehun and
Lee, Jong-Hyeok",
booktitle = "Proceedings of the Third Conference on Machine Translation: Shared Task Papers",
month = oct,
year = "2018",
address = "Belgium, Brussels",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/W18-6470",
doi = "10.18653/v1/W18-6470",
pages = "840--845",
abstract = "This paper describes the POSTECH{'}s submission to the WMT 2018 shared task on Automatic Post-Editing (APE). We propose a new neural end-to-end post-editing model based on the transformer network. We modified the encoder-decoder attention to reflect the relation between the machine translation output, the source and the post-edited translation in APE problem. Experiments on WMT17 English-German APE data set show an improvement in both TER and BLEU score over the best result of WMT17 APE shared task. Our primary submission achieves -4.52 TER and +6.81 BLEU score on PBSMT task and -0.13 TER and +0.40 BLEU score for NMT task compare to the baseline.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="shin-lee-2018-multi">
<titleInfo>
<title>Multi-encoder Transformer Network for Automatic Post-Editing</title>
</titleInfo>
<name type="personal">
<namePart type="given">Jaehun</namePart>
<namePart type="family">Shin</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Jong-Hyeok</namePart>
<namePart type="family">Lee</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2018-oct</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Third Conference on Machine Translation: Shared Task Papers</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Belgium, Brussels</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>This paper describes the POSTECH’s submission to the WMT 2018 shared task on Automatic Post-Editing (APE). We propose a new neural end-to-end post-editing model based on the transformer network. We modified the encoder-decoder attention to reflect the relation between the machine translation output, the source and the post-edited translation in APE problem. Experiments on WMT17 English-German APE data set show an improvement in both TER and BLEU score over the best result of WMT17 APE shared task. Our primary submission achieves -4.52 TER and +6.81 BLEU score on PBSMT task and -0.13 TER and +0.40 BLEU score for NMT task compare to the baseline.</abstract>
<identifier type="citekey">shin-lee-2018-multi</identifier>
<identifier type="doi">10.18653/v1/W18-6470</identifier>
<location>
<url>https://aclanthology.org/W18-6470</url>
</location>
<part>
<date>2018-oct</date>
<extent unit="page">
<start>840</start>
<end>845</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Multi-encoder Transformer Network for Automatic Post-Editing
%A Shin, Jaehun
%A Lee, Jong-Hyeok
%S Proceedings of the Third Conference on Machine Translation: Shared Task Papers
%D 2018
%8 oct
%I Association for Computational Linguistics
%C Belgium, Brussels
%F shin-lee-2018-multi
%X This paper describes the POSTECH’s submission to the WMT 2018 shared task on Automatic Post-Editing (APE). We propose a new neural end-to-end post-editing model based on the transformer network. We modified the encoder-decoder attention to reflect the relation between the machine translation output, the source and the post-edited translation in APE problem. Experiments on WMT17 English-German APE data set show an improvement in both TER and BLEU score over the best result of WMT17 APE shared task. Our primary submission achieves -4.52 TER and +6.81 BLEU score on PBSMT task and -0.13 TER and +0.40 BLEU score for NMT task compare to the baseline.
%R 10.18653/v1/W18-6470
%U https://aclanthology.org/W18-6470
%U https://doi.org/10.18653/v1/W18-6470
%P 840-845
Markdown (Informal)
[Multi-encoder Transformer Network for Automatic Post-Editing](https://aclanthology.org/W18-6470) (Shin & Lee, 2018)
ACL