@inproceedings{pal-etal-2018-transformer,
title = "A Transformer-Based Multi-Source Automatic Post-Editing System",
author = {Pal, Santanu and
Herbig, Nico and
Kr{\"u}ger, Antonio and
van Genabith, Josef},
booktitle = "Proceedings of the Third Conference on Machine Translation: Shared Task Papers",
month = oct,
year = "2018",
address = "Belgium, Brussels",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/W18-6468",
doi = "10.18653/v1/W18-6468",
pages = "827--835",
abstract = "This paper presents our English{--}German Automatic Post-Editing (APE) system submitted to the APE Task organized at WMT 2018 (Chatterjee et al., 2018). The proposed model is an extension of the transformer architecture: two separate self-attention-based encoders encode the machine translation output (mt) and the source (src), followed by a joint encoder that attends over a combination of these two encoded sequences (encsrc and encmt) for generating the post-edited sentence. We compare this multi-source architecture (i.e, {src, mt} → pe) to a monolingual transformer (i.e., mt → pe) model and an ensemble combining the multi-source {src, mt} → pe and single-source mt → pe models. For both the PBSMT and the NMT task, the ensemble yields the best results, followed by the multi-source model and last the single-source approach. Our best model, the ensemble, achieves a BLEU score of 66.16 and 74.22 for the PBSMT and NMT task, respectively.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="pal-etal-2018-transformer">
<titleInfo>
<title>A Transformer-Based Multi-Source Automatic Post-Editing System</title>
</titleInfo>
<name type="personal">
<namePart type="given">Santanu</namePart>
<namePart type="family">Pal</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Nico</namePart>
<namePart type="family">Herbig</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Antonio</namePart>
<namePart type="family">Krüger</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Josef</namePart>
<namePart type="family">van Genabith</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2018-oct</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Third Conference on Machine Translation: Shared Task Papers</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Belgium, Brussels</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>This paper presents our English–German Automatic Post-Editing (APE) system submitted to the APE Task organized at WMT 2018 (Chatterjee et al., 2018). The proposed model is an extension of the transformer architecture: two separate self-attention-based encoders encode the machine translation output (mt) and the source (src), followed by a joint encoder that attends over a combination of these two encoded sequences (encsrc and encmt) for generating the post-edited sentence. We compare this multi-source architecture (i.e, src, mt → pe) to a monolingual transformer (i.e., mt → pe) model and an ensemble combining the multi-source src, mt → pe and single-source mt → pe models. For both the PBSMT and the NMT task, the ensemble yields the best results, followed by the multi-source model and last the single-source approach. Our best model, the ensemble, achieves a BLEU score of 66.16 and 74.22 for the PBSMT and NMT task, respectively.</abstract>
<identifier type="citekey">pal-etal-2018-transformer</identifier>
<identifier type="doi">10.18653/v1/W18-6468</identifier>
<location>
<url>https://aclanthology.org/W18-6468</url>
</location>
<part>
<date>2018-oct</date>
<extent unit="page">
<start>827</start>
<end>835</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T A Transformer-Based Multi-Source Automatic Post-Editing System
%A Pal, Santanu
%A Herbig, Nico
%A Krüger, Antonio
%A van Genabith, Josef
%S Proceedings of the Third Conference on Machine Translation: Shared Task Papers
%D 2018
%8 oct
%I Association for Computational Linguistics
%C Belgium, Brussels
%F pal-etal-2018-transformer
%X This paper presents our English–German Automatic Post-Editing (APE) system submitted to the APE Task organized at WMT 2018 (Chatterjee et al., 2018). The proposed model is an extension of the transformer architecture: two separate self-attention-based encoders encode the machine translation output (mt) and the source (src), followed by a joint encoder that attends over a combination of these two encoded sequences (encsrc and encmt) for generating the post-edited sentence. We compare this multi-source architecture (i.e, src, mt → pe) to a monolingual transformer (i.e., mt → pe) model and an ensemble combining the multi-source src, mt → pe and single-source mt → pe models. For both the PBSMT and the NMT task, the ensemble yields the best results, followed by the multi-source model and last the single-source approach. Our best model, the ensemble, achieves a BLEU score of 66.16 and 74.22 for the PBSMT and NMT task, respectively.
%R 10.18653/v1/W18-6468
%U https://aclanthology.org/W18-6468
%U https://doi.org/10.18653/v1/W18-6468
%P 827-835
Markdown (Informal)
[A Transformer-Based Multi-Source Automatic Post-Editing System](https://aclanthology.org/W18-6468) (Pal et al., 2018)
ACL