@inproceedings{schneider-waibel-2020-towards,
title = "Towards Stream Translation: Adaptive Computation Time for Simultaneous Machine Translation",
author = "Schneider, Felix and
Waibel, Alexander",
booktitle = "Proceedings of the 17th International Conference on Spoken Language Translation",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.iwslt-1.28",
doi = "10.18653/v1/2020.iwslt-1.28",
pages = "228--236",
abstract = "Simultaneous machine translation systems rely on a policy to schedule read and write operations in order to begin translating a source sentence before it is complete. In this paper, we demonstrate the use of Adaptive Computation Time (ACT) as an adaptive, learned policy for simultaneous machine translation using the transformer model and as a more numerically stable alternative to Monotonic Infinite Lookback Attention (MILk). We achieve state-of-the-art results in terms of latency-quality tradeoffs. We also propose a method to use our model on unsegmented input, i.e. without sentence boundaries, simulating the condition of translating output from automatic speech recognition. We present first benchmark results on this task.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="schneider-waibel-2020-towards">
<titleInfo>
<title>Towards Stream Translation: Adaptive Computation Time for Simultaneous Machine Translation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Felix</namePart>
<namePart type="family">Schneider</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Alexander</namePart>
<namePart type="family">Waibel</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-jul</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 17th International Conference on Spoken Language Translation</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Simultaneous machine translation systems rely on a policy to schedule read and write operations in order to begin translating a source sentence before it is complete. In this paper, we demonstrate the use of Adaptive Computation Time (ACT) as an adaptive, learned policy for simultaneous machine translation using the transformer model and as a more numerically stable alternative to Monotonic Infinite Lookback Attention (MILk). We achieve state-of-the-art results in terms of latency-quality tradeoffs. We also propose a method to use our model on unsegmented input, i.e. without sentence boundaries, simulating the condition of translating output from automatic speech recognition. We present first benchmark results on this task.</abstract>
<identifier type="citekey">schneider-waibel-2020-towards</identifier>
<identifier type="doi">10.18653/v1/2020.iwslt-1.28</identifier>
<location>
<url>https://aclanthology.org/2020.iwslt-1.28</url>
</location>
<part>
<date>2020-jul</date>
<extent unit="page">
<start>228</start>
<end>236</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Towards Stream Translation: Adaptive Computation Time for Simultaneous Machine Translation
%A Schneider, Felix
%A Waibel, Alexander
%S Proceedings of the 17th International Conference on Spoken Language Translation
%D 2020
%8 jul
%I Association for Computational Linguistics
%C Online
%F schneider-waibel-2020-towards
%X Simultaneous machine translation systems rely on a policy to schedule read and write operations in order to begin translating a source sentence before it is complete. In this paper, we demonstrate the use of Adaptive Computation Time (ACT) as an adaptive, learned policy for simultaneous machine translation using the transformer model and as a more numerically stable alternative to Monotonic Infinite Lookback Attention (MILk). We achieve state-of-the-art results in terms of latency-quality tradeoffs. We also propose a method to use our model on unsegmented input, i.e. without sentence boundaries, simulating the condition of translating output from automatic speech recognition. We present first benchmark results on this task.
%R 10.18653/v1/2020.iwslt-1.28
%U https://aclanthology.org/2020.iwslt-1.28
%U https://doi.org/10.18653/v1/2020.iwslt-1.28
%P 228-236
Markdown (Informal)
[Towards Stream Translation: Adaptive Computation Time for Simultaneous Machine Translation](https://aclanthology.org/2020.iwslt-1.28) (Schneider & Waibel, IWSLT 2020)
ACL