@inproceedings{miftahutdinov-etal-2020-kfu,
title = "{KFU} {NLP} Team at {SMM}4{H} 2020 Tasks: Cross-lingual Transfer Learning with Pretrained Language Models for Drug Reactions",
author = "Miftahutdinov, Zulfat and
Sakhovskiy, Andrey and
Tutubalina, Elena",
editor = "Gonzalez-Hernandez, Graciela and
Klein, Ari Z. and
Flores, Ivan and
Weissenbacher, Davy and
Magge, Arjun and
O'Connor, Karen and
Sarker, Abeed and
Minard, Anne-Lyse and
Tutubalina, Elena and
Miftahutdinov, Zulfat and
Alimova, Ilseyar",
booktitle = "Proceedings of the Fifth Social Media Mining for Health Applications Workshop {\&} Shared Task",
month = dec,
year = "2020",
address = "Barcelona, Spain (Online)",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2020.smm4h-1.8/",
pages = "51--56",
abstract = "This paper describes neural models developed for the Social Media Mining for Health (SMM4H) 2020 shared tasks. Specifically, we participated in two tasks. We investigate the use of a language representation model BERT pretrained on a large-scale corpus of 5 million health-related user reviews in English and Russian. The ensemble of neural networks for extraction and normalization of adverse drug reactions ranked first among 7 teams at the SMM4H 2020 Task 3 and obtained a relaxed F1 of 46{\%}. The BERT-based multilingual model for classification of English and Russian tweets that report adverse reactions ranked second among 16 and 7 teams at two first subtasks of the SMM4H 2019 Task 2 and obtained a relaxed F1 of 58{\%} on English tweets and 51{\%} on Russian tweets."
}
Markdown (Informal)
[KFU NLP Team at SMM4H 2020 Tasks: Cross-lingual Transfer Learning with Pretrained Language Models for Drug Reactions](https://preview.aclanthology.org/fix-sig-urls/2020.smm4h-1.8/) (Miftahutdinov et al., SMM4H 2020)
ACL