@inproceedings{potapczyk-przybysz-2020-srpols,
title = "{SRPOL}`s System for the {IWSLT} 2020 End-to-End Speech Translation Task",
author = "Potapczyk, Tomasz and
Przybysz, Pawel",
editor = {Federico, Marcello and
Waibel, Alex and
Knight, Kevin and
Nakamura, Satoshi and
Ney, Hermann and
Niehues, Jan and
St{\"u}ker, Sebastian and
Wu, Dekai and
Mariani, Joseph and
Yvon, Francois},
booktitle = "Proceedings of the 17th International Conference on Spoken Language Translation",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.iwslt-1.9/",
doi = "10.18653/v1/2020.iwslt-1.9",
pages = "89--94",
abstract = "We took part in the offline End-to-End English to German TED lectures translation task. We based our solution on our last year`s submission. We used a slightly altered Transformer architecture with ResNet-like convolutional layer preparing the audio input to Transformer encoder. To improve the model`s quality of translation we introduced two regularization techniques and trained on machine translated Librispeech corpus in addition to iwslt-corpus, TEDLIUM2 andMust{\_}C corpora. Our best model scored almost 3 BLEU higher than last year`s model. To segment 2020 test set we used exactly the same procedure as last year."
}
Markdown (Informal)
[SRPOL’s System for the IWSLT 2020 End-to-End Speech Translation Task](https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.iwslt-1.9/) (Potapczyk & Przybysz, IWSLT 2020)
ACL