@inproceedings{mohammed-etal-2020-just,
title = "{JUST} System for {WMT}20 Chat Translation Task",
author = "Mohammed, Roweida and
Al-Ayyoub, Mahmoud and
Abdullah, Malak",
booktitle = "Proceedings of the Fifth Conference on Machine Translation",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.wmt-1.59",
pages = "479--482",
abstract = "Machine Translation (MT) is a sub-field of Artificial Intelligence and Natural Language Processing that investigates and studies the ways of automatically translating a text from one language to another. In this paper, we present the details of our submission to the WMT20 Chat Translation Task, which consists of two language directions, English {--}{\textgreater} German and German {--}{\textgreater} English. The major feature of our system is applying a pre-trained BERT embedding with a bidirectional recurrent neural network. Our system ensembles three models, each with different hyperparameters. Despite being trained on a very small corpus, our model produces surprisingly good results.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="mohammed-etal-2020-just">
<titleInfo>
<title>JUST System for WMT20 Chat Translation Task</title>
</titleInfo>
<name type="personal">
<namePart type="given">Roweida</namePart>
<namePart type="family">Mohammed</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Mahmoud</namePart>
<namePart type="family">Al-Ayyoub</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Malak</namePart>
<namePart type="family">Abdullah</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-nov</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Fifth Conference on Machine Translation</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Machine Translation (MT) is a sub-field of Artificial Intelligence and Natural Language Processing that investigates and studies the ways of automatically translating a text from one language to another. In this paper, we present the details of our submission to the WMT20 Chat Translation Task, which consists of two language directions, English –\textgreater German and German –\textgreater English. The major feature of our system is applying a pre-trained BERT embedding with a bidirectional recurrent neural network. Our system ensembles three models, each with different hyperparameters. Despite being trained on a very small corpus, our model produces surprisingly good results.</abstract>
<identifier type="citekey">mohammed-etal-2020-just</identifier>
<location>
<url>https://aclanthology.org/2020.wmt-1.59</url>
</location>
<part>
<date>2020-nov</date>
<extent unit="page">
<start>479</start>
<end>482</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T JUST System for WMT20 Chat Translation Task
%A Mohammed, Roweida
%A Al-Ayyoub, Mahmoud
%A Abdullah, Malak
%S Proceedings of the Fifth Conference on Machine Translation
%D 2020
%8 nov
%I Association for Computational Linguistics
%C Online
%F mohammed-etal-2020-just
%X Machine Translation (MT) is a sub-field of Artificial Intelligence and Natural Language Processing that investigates and studies the ways of automatically translating a text from one language to another. In this paper, we present the details of our submission to the WMT20 Chat Translation Task, which consists of two language directions, English –\textgreater German and German –\textgreater English. The major feature of our system is applying a pre-trained BERT embedding with a bidirectional recurrent neural network. Our system ensembles three models, each with different hyperparameters. Despite being trained on a very small corpus, our model produces surprisingly good results.
%U https://aclanthology.org/2020.wmt-1.59
%P 479-482
Markdown (Informal)
[JUST System for WMT20 Chat Translation Task](https://aclanthology.org/2020.wmt-1.59) (Mohammed et al., WMT 2020)
ACL
- Roweida Mohammed, Mahmoud Al-Ayyoub, and Malak Abdullah. 2020. JUST System for WMT20 Chat Translation Task. In Proceedings of the Fifth Conference on Machine Translation, pages 479–482, Online. Association for Computational Linguistics.