@inproceedings{simoulin-crabbe-2021-many,
title = "How Many Layers and Why? {A}n Analysis of the Model Depth in Transformers",
author = "Simoulin, Antoine and
Crabb{\'e}, Benoit",
editor = "Kabbara, Jad and
Lin, Haitao and
Paullada, Amandalynne and
Vamvas, Jannis",
booktitle = "Proceedings of the 59th Annual Meeting of the Association for Computational Linguistics and the 11th International Joint Conference on Natural Language Processing: Student Research Workshop",
month = aug,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2021.acl-srw.23/",
doi = "10.18653/v1/2021.acl-srw.23",
pages = "221--228",
abstract = "In this study, we investigate the role of the multiple layers in deep transformer models. We design a variant of Albert that dynamically adapts the number of layers for each token of the input. The key specificity of Albert is that weights are tied across layers. Therefore, the stack of encoder layers iteratively repeats the application of the same transformation function on the input. We interpret the repetition of this application as an iterative process where the token contextualized representations are progressively refined. We analyze this process at the token level during pre-training, fine-tuning, and inference. We show that tokens do not require the same amount of iterations and that difficult or crucial tokens for the task are subject to more iterations."
}
Markdown (Informal)
[How Many Layers and Why? An Analysis of the Model Depth in Transformers](https://preview.aclanthology.org/fix-sig-urls/2021.acl-srw.23/) (Simoulin & Crabbé, ACL-IJCNLP 2021)
ACL
- Antoine Simoulin and Benoit Crabbé. 2021. How Many Layers and Why? An Analysis of the Model Depth in Transformers. In Proceedings of the 59th Annual Meeting of the Association for Computational Linguistics and the 11th International Joint Conference on Natural Language Processing: Student Research Workshop, pages 221–228, Online. Association for Computational Linguistics.