@inproceedings{dabre-sumita-2019-nicts-supervised,
title = "{NICT}{'}s Supervised Neural Machine Translation Systems for the {WMT}19 Translation Robustness Task",
author = "Dabre, Raj and
Sumita, Eiichiro",
booktitle = "Proceedings of the Fourth Conference on Machine Translation (Volume 2: Shared Task Papers, Day 1)",
month = aug,
year = "2019",
address = "Florence, Italy",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/W19-5362",
doi = "10.18653/v1/W19-5362",
pages = "533--536",
abstract = "In this paper we describe our neural machine translation (NMT) systems for Japanese↔English translation which we submitted to the translation robustness task. We focused on leveraging transfer learning via fine tuning to improve translation quality. We used a fairly well established domain adaptation technique called Mixed Fine Tuning (MFT) (Chu et. al., 2017) to improve translation quality for Japanese↔English. We also trained bi-directional NMT models instead of uni-directional ones as the former are known to be quite robust, especially in low-resource scenarios. However, given the noisy nature of the in-domain training data, the improvements we obtained are rather modest.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="dabre-sumita-2019-nicts-supervised">
<titleInfo>
<title>NICT’s Supervised Neural Machine Translation Systems for the WMT19 Translation Robustness Task</title>
</titleInfo>
<name type="personal">
<namePart type="given">Raj</namePart>
<namePart type="family">Dabre</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Eiichiro</namePart>
<namePart type="family">Sumita</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2019-aug</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Fourth Conference on Machine Translation (Volume 2: Shared Task Papers, Day 1)</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Florence, Italy</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>In this paper we describe our neural machine translation (NMT) systems for Japanese↔English translation which we submitted to the translation robustness task. We focused on leveraging transfer learning via fine tuning to improve translation quality. We used a fairly well established domain adaptation technique called Mixed Fine Tuning (MFT) (Chu et. al., 2017) to improve translation quality for Japanese↔English. We also trained bi-directional NMT models instead of uni-directional ones as the former are known to be quite robust, especially in low-resource scenarios. However, given the noisy nature of the in-domain training data, the improvements we obtained are rather modest.</abstract>
<identifier type="citekey">dabre-sumita-2019-nicts-supervised</identifier>
<identifier type="doi">10.18653/v1/W19-5362</identifier>
<location>
<url>https://aclanthology.org/W19-5362</url>
</location>
<part>
<date>2019-aug</date>
<extent unit="page">
<start>533</start>
<end>536</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T NICT’s Supervised Neural Machine Translation Systems for the WMT19 Translation Robustness Task
%A Dabre, Raj
%A Sumita, Eiichiro
%S Proceedings of the Fourth Conference on Machine Translation (Volume 2: Shared Task Papers, Day 1)
%D 2019
%8 aug
%I Association for Computational Linguistics
%C Florence, Italy
%F dabre-sumita-2019-nicts-supervised
%X In this paper we describe our neural machine translation (NMT) systems for Japanese↔English translation which we submitted to the translation robustness task. We focused on leveraging transfer learning via fine tuning to improve translation quality. We used a fairly well established domain adaptation technique called Mixed Fine Tuning (MFT) (Chu et. al., 2017) to improve translation quality for Japanese↔English. We also trained bi-directional NMT models instead of uni-directional ones as the former are known to be quite robust, especially in low-resource scenarios. However, given the noisy nature of the in-domain training data, the improvements we obtained are rather modest.
%R 10.18653/v1/W19-5362
%U https://aclanthology.org/W19-5362
%U https://doi.org/10.18653/v1/W19-5362
%P 533-536
Markdown (Informal)
[NICT’s Supervised Neural Machine Translation Systems for the WMT19 Translation Robustness Task](https://aclanthology.org/W19-5362) (Dabre & Sumita, 2019)
ACL