@inproceedings{imankulova-etal-2019-japanese,
title = "{J}apanese-{R}ussian {TMU} Neural Machine Translation System using Multilingual Model for {WAT} 2019",
author = "Imankulova, Aizhan and
Kaneko, Masahiro and
Komachi, Mamoru",
booktitle = "Proceedings of the 6th Workshop on Asian Translation",
month = nov,
year = "2019",
address = "Hong Kong, China",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/D19-5221",
doi = "10.18653/v1/D19-5221",
pages = "165--170",
abstract = "We introduce our system that is submitted to the News Commentary task (Japanese{\textless}-{\textgreater}Russian) of the 6th Workshop on Asian Translation. The goal of this shared task is to study extremely low resource situations for distant language pairs. It is known that using parallel corpora of different language pair as training data is effective for multilingual neural machine translation model in extremely low resource scenarios. Therefore, to improve the translation quality of Japanese{\textless}-{\textgreater}Russian language pair, our method leverages other in-domain Japanese-English and English-Russian parallel corpora as additional training data for our multilingual NMT model.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="imankulova-etal-2019-japanese">
<titleInfo>
<title>Japanese-Russian TMU Neural Machine Translation System using Multilingual Model for WAT 2019</title>
</titleInfo>
<name type="personal">
<namePart type="given">Aizhan</namePart>
<namePart type="family">Imankulova</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Masahiro</namePart>
<namePart type="family">Kaneko</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Mamoru</namePart>
<namePart type="family">Komachi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2019-nov</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 6th Workshop on Asian Translation</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Hong Kong, China</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>We introduce our system that is submitted to the News Commentary task (Japanese\textless-\textgreaterRussian) of the 6th Workshop on Asian Translation. The goal of this shared task is to study extremely low resource situations for distant language pairs. It is known that using parallel corpora of different language pair as training data is effective for multilingual neural machine translation model in extremely low resource scenarios. Therefore, to improve the translation quality of Japanese\textless-\textgreaterRussian language pair, our method leverages other in-domain Japanese-English and English-Russian parallel corpora as additional training data for our multilingual NMT model.</abstract>
<identifier type="citekey">imankulova-etal-2019-japanese</identifier>
<identifier type="doi">10.18653/v1/D19-5221</identifier>
<location>
<url>https://aclanthology.org/D19-5221</url>
</location>
<part>
<date>2019-nov</date>
<extent unit="page">
<start>165</start>
<end>170</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Japanese-Russian TMU Neural Machine Translation System using Multilingual Model for WAT 2019
%A Imankulova, Aizhan
%A Kaneko, Masahiro
%A Komachi, Mamoru
%S Proceedings of the 6th Workshop on Asian Translation
%D 2019
%8 nov
%I Association for Computational Linguistics
%C Hong Kong, China
%F imankulova-etal-2019-japanese
%X We introduce our system that is submitted to the News Commentary task (Japanese\textless-\textgreaterRussian) of the 6th Workshop on Asian Translation. The goal of this shared task is to study extremely low resource situations for distant language pairs. It is known that using parallel corpora of different language pair as training data is effective for multilingual neural machine translation model in extremely low resource scenarios. Therefore, to improve the translation quality of Japanese\textless-\textgreaterRussian language pair, our method leverages other in-domain Japanese-English and English-Russian parallel corpora as additional training data for our multilingual NMT model.
%R 10.18653/v1/D19-5221
%U https://aclanthology.org/D19-5221
%U https://doi.org/10.18653/v1/D19-5221
%P 165-170
Markdown (Informal)
[Japanese-Russian TMU Neural Machine Translation System using Multilingual Model for WAT 2019](https://aclanthology.org/D19-5221) (Imankulova et al., EMNLP 2019)
ACL