@inproceedings{valdes-etal-2021-uach,
title = "{UACH}-{INAOE} at {SMM}4{H}: a {BERT} based approach for classification of {COVID}-19 {T}witter posts",
author = "Valdes, Alberto and
Lopez, Jesus and
Montes, Manuel",
editor = "Magge, Arjun and
Klein, Ari and
Miranda-Escalada, Antonio and
Al-garadi, Mohammed Ali and
Alimova, Ilseyar and
Miftahutdinov, Zulfat and
Farre-Maduell, Eulalia and
Lopez, Salvador Lima and
Flores, Ivan and
O'Connor, Karen and
Weissenbacher, Davy and
Tutubalina, Elena and
Sarker, Abeed and
Banda, Juan M and
Krallinger, Martin and
Gonzalez-Hernandez, Graciela",
booktitle = "Proceedings of the Sixth Social Media Mining for Health ({\#}SMM4H) Workshop and Shared Task",
month = jun,
year = "2021",
address = "Mexico City, Mexico",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2021.smm4h-1.10/",
doi = "10.18653/v1/2021.smm4h-1.10",
pages = "65--68",
abstract = "This work describes the participation of the Universidad Aut{\'o}noma de Chihuahua - Instituto Nacional de Astrof{\'i}sica, {\'O}ptica y Electr{\'o}nica team at the Social Media Mining for Health Applications (SMM4H) 2021 shared task. Our team participated in task 5 and 6, both focused on the automatic classification of Twitter posts related to COVID-19. Task 5 was oriented on solving a binary classification problem, trying to identify self-reporting tweets of potential cases of COVID-19. Task 6 objective was to classify tweets containing COVID-19 symptoms. For both tasks we used models based on bidirectional encoder representations from transformers (BERT). Our objective was to determine if a model pretrained on a corpus in the domain of interest can outperform one trained on a much larger general domain corpus. Our F1 results were encouraging, 0.77 and 0.95 for task 5 and 6 respectively, having achieved the highest score among all the participants in the latter."
}
Markdown (Informal)
[UACH-INAOE at SMM4H: a BERT based approach for classification of COVID-19 Twitter posts](https://preview.aclanthology.org/fix-sig-urls/2021.smm4h-1.10/) (Valdes et al., SMM4H 2021)
ACL