@inproceedings{gillin-2022-encoder,
title = "Is Encoder-Decoder Transformer the Shiny Hammer?",
author = "Gillin, Nat",
editor = {Scherrer, Yves and
Jauhiainen, Tommi and
Ljube{\v{s}}i{\'c}, Nikola and
Nakov, Preslav and
Tiedemann, J{\"o}rg and
Zampieri, Marcos},
booktitle = "Proceedings of the Ninth Workshop on NLP for Similar Languages, Varieties and Dialects",
month = oct,
year = "2022",
address = "Gyeongju, Republic of Korea",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2022.vardial-1.9/",
pages = "80--85",
abstract = "We present an approach to multi-class classification using an encoder-decoder transformer model. We trained a network to identify French varieties using the same scripts we use to train an encoder-decoder machine translation model. With some slight modification to the data preparation and inference parameters, we showed that the same tools used for machine translation can be easily re-used to achieve competitive performance for classification. On the French Dialectal Identification (FDI) task, we scored 32.4 on weighted F1, but this is far from a simple naive bayes classifier that outperforms a neural encoder-decoder model at 41.27 weighted F1."
}
Markdown (Informal)
[Is Encoder-Decoder Transformer the Shiny Hammer?](https://preview.aclanthology.org/fix-sig-urls/2022.vardial-1.9/) (Gillin, VarDial 2022)
ACL
- Nat Gillin. 2022. Is Encoder-Decoder Transformer the Shiny Hammer?. In Proceedings of the Ninth Workshop on NLP for Similar Languages, Varieties and Dialects, pages 80–85, Gyeongju, Republic of Korea. Association for Computational Linguistics.