@inproceedings{kim-etal-2019-qe,
title = "{QE} {BERT}: Bilingual {BERT} Using Multi-task Learning for Neural Quality Estimation",
author = "Kim, Hyun and
Lim, Joon-Ho and
Kim, Hyun-Ki and
Na, Seung-Hoon",
booktitle = "Proceedings of the Fourth Conference on Machine Translation (Volume 3: Shared Task Papers, Day 2)",
month = aug,
year = "2019",
address = "Florence, Italy",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/W19-5407",
doi = "10.18653/v1/W19-5407",
pages = "85--89",
abstract = "For translation quality estimation at word and sentence levels, this paper presents a novel approach based on BERT that recently has achieved impressive results on various natural language processing tasks. Our proposed model is re-purposed BERT for the translation quality estimation and uses multi-task learning for the sentence-level task and word-level subtasks (i.e., source word, target word, and target gap). Experimental results on Quality Estimation shared task of WMT19 show that our systems show competitive results and provide significant improvements over the baseline.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="kim-etal-2019-qe">
<titleInfo>
<title>QE BERT: Bilingual BERT Using Multi-task Learning for Neural Quality Estimation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Hyun</namePart>
<namePart type="family">Kim</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Joon-Ho</namePart>
<namePart type="family">Lim</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Hyun-Ki</namePart>
<namePart type="family">Kim</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Seung-Hoon</namePart>
<namePart type="family">Na</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2019-aug</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Fourth Conference on Machine Translation (Volume 3: Shared Task Papers, Day 2)</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Florence, Italy</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>For translation quality estimation at word and sentence levels, this paper presents a novel approach based on BERT that recently has achieved impressive results on various natural language processing tasks. Our proposed model is re-purposed BERT for the translation quality estimation and uses multi-task learning for the sentence-level task and word-level subtasks (i.e., source word, target word, and target gap). Experimental results on Quality Estimation shared task of WMT19 show that our systems show competitive results and provide significant improvements over the baseline.</abstract>
<identifier type="citekey">kim-etal-2019-qe</identifier>
<identifier type="doi">10.18653/v1/W19-5407</identifier>
<location>
<url>https://aclanthology.org/W19-5407</url>
</location>
<part>
<date>2019-aug</date>
<extent unit="page">
<start>85</start>
<end>89</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T QE BERT: Bilingual BERT Using Multi-task Learning for Neural Quality Estimation
%A Kim, Hyun
%A Lim, Joon-Ho
%A Kim, Hyun-Ki
%A Na, Seung-Hoon
%S Proceedings of the Fourth Conference on Machine Translation (Volume 3: Shared Task Papers, Day 2)
%D 2019
%8 aug
%I Association for Computational Linguistics
%C Florence, Italy
%F kim-etal-2019-qe
%X For translation quality estimation at word and sentence levels, this paper presents a novel approach based on BERT that recently has achieved impressive results on various natural language processing tasks. Our proposed model is re-purposed BERT for the translation quality estimation and uses multi-task learning for the sentence-level task and word-level subtasks (i.e., source word, target word, and target gap). Experimental results on Quality Estimation shared task of WMT19 show that our systems show competitive results and provide significant improvements over the baseline.
%R 10.18653/v1/W19-5407
%U https://aclanthology.org/W19-5407
%U https://doi.org/10.18653/v1/W19-5407
%P 85-89
Markdown (Informal)
[QE BERT: Bilingual BERT Using Multi-task Learning for Neural Quality Estimation](https://aclanthology.org/W19-5407) (Kim et al., 2019)
ACL