@inproceedings{escolano-etal-2019-bilingual,
title = "From Bilingual to Multilingual Neural Machine Translation by Incremental Training",
author = "Escolano, Carlos and
Costa-juss{\`a}, Marta R. and
Fonollosa, Jos{\'e} A. R.",
booktitle = "Proceedings of the 57th Annual Meeting of the Association for Computational Linguistics: Student Research Workshop",
month = jul,
year = "2019",
address = "Florence, Italy",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/P19-2033",
doi = "10.18653/v1/P19-2033",
pages = "236--242",
abstract = "Multilingual Neural Machine Translation approaches are based on the use of task specific models and the addition of one more language can only be done by retraining the whole system. In this work, we propose a new training schedule that allows the system to scale to more languages without modification of the previous components based on joint training and language-independent encoder/decoder modules allowing for zero-shot translation. This work in progress shows close results to state-of-the-art in the WMT task.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="escolano-etal-2019-bilingual">
<titleInfo>
<title>From Bilingual to Multilingual Neural Machine Translation by Incremental Training</title>
</titleInfo>
<name type="personal">
<namePart type="given">Carlos</namePart>
<namePart type="family">Escolano</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Marta</namePart>
<namePart type="given">R</namePart>
<namePart type="family">Costa-jussà</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">José</namePart>
<namePart type="given">A</namePart>
<namePart type="given">R</namePart>
<namePart type="family">Fonollosa</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2019-jul</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 57th Annual Meeting of the Association for Computational Linguistics: Student Research Workshop</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Florence, Italy</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Multilingual Neural Machine Translation approaches are based on the use of task specific models and the addition of one more language can only be done by retraining the whole system. In this work, we propose a new training schedule that allows the system to scale to more languages without modification of the previous components based on joint training and language-independent encoder/decoder modules allowing for zero-shot translation. This work in progress shows close results to state-of-the-art in the WMT task.</abstract>
<identifier type="citekey">escolano-etal-2019-bilingual</identifier>
<identifier type="doi">10.18653/v1/P19-2033</identifier>
<location>
<url>https://aclanthology.org/P19-2033</url>
</location>
<part>
<date>2019-jul</date>
<extent unit="page">
<start>236</start>
<end>242</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T From Bilingual to Multilingual Neural Machine Translation by Incremental Training
%A Escolano, Carlos
%A Costa-jussà, Marta R.
%A Fonollosa, José A. R.
%S Proceedings of the 57th Annual Meeting of the Association for Computational Linguistics: Student Research Workshop
%D 2019
%8 jul
%I Association for Computational Linguistics
%C Florence, Italy
%F escolano-etal-2019-bilingual
%X Multilingual Neural Machine Translation approaches are based on the use of task specific models and the addition of one more language can only be done by retraining the whole system. In this work, we propose a new training schedule that allows the system to scale to more languages without modification of the previous components based on joint training and language-independent encoder/decoder modules allowing for zero-shot translation. This work in progress shows close results to state-of-the-art in the WMT task.
%R 10.18653/v1/P19-2033
%U https://aclanthology.org/P19-2033
%U https://doi.org/10.18653/v1/P19-2033
%P 236-242
Markdown (Informal)
[From Bilingual to Multilingual Neural Machine Translation by Incremental Training](https://aclanthology.org/P19-2033) (Escolano et al., ACL 2019)
ACL