@inproceedings{almeida-matos-2021-benchmarking,
title = "Benchmarking a transformer-{FREE} model for ad-hoc retrieval",
author = "Almeida, Tiago and
Matos, S{\'e}rgio",
editor = "Merlo, Paola and
Tiedemann, Jorg and
Tsarfaty, Reut",
booktitle = "Proceedings of the 16th Conference of the European Chapter of the Association for Computational Linguistics: Main Volume",
month = apr,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2021.eacl-main.293/",
doi = "10.18653/v1/2021.eacl-main.293",
pages = "3343--3353",
abstract = "Transformer-based {\textquotedblleft}behemoths{\textquotedblright} have grown in popularity, as well as structurally, shattering multiple NLP benchmarks along the way. However, their real-world usability remains a question. In this work, we empirically assess the feasibility of applying transformer-based models in real-world ad-hoc retrieval applications by comparison to a {\textquotedblleft}greener and more sustainable{\textquotedblright} alternative, comprising only 620 trainable parameters. We present an analysis of their efficacy and efficiency and show that considering limited computational resources, the lighter model running on the CPU achieves a 3 to 20 times speedup in training and 7 to 47 times in inference while maintaining a comparable retrieval performance. Code to reproduce the efficiency experiments is available on {\textquotedblleft}\url{https://github.com/bioinformatics-ua/EACL2021-reproducibility/}{\textquotedblleft}."
}
Markdown (Informal)
[Benchmarking a transformer-FREE model for ad-hoc retrieval](https://preview.aclanthology.org/add-emnlp-2024-awards/2021.eacl-main.293/) (Almeida & Matos, EACL 2021)
ACL
- Tiago Almeida and Sérgio Matos. 2021. Benchmarking a transformer-FREE model for ad-hoc retrieval. In Proceedings of the 16th Conference of the European Chapter of the Association for Computational Linguistics: Main Volume, pages 3343–3353, Online. Association for Computational Linguistics.