@inproceedings{tanti-etal-2021-language,
title = "On the Language-specificity of Multilingual {BERT} and the Impact of Fine-tuning",
author = "Tanti, Marc and
van der Plas, Lonneke and
Borg, Claudia and
Gatt, Albert",
editor = "Bastings, Jasmijn and
Belinkov, Yonatan and
Dupoux, Emmanuel and
Giulianelli, Mario and
Hupkes, Dieuwke and
Pinter, Yuval and
Sajjad, Hassan",
booktitle = "Proceedings of the Fourth BlackboxNLP Workshop on Analyzing and Interpreting Neural Networks for NLP",
month = nov,
year = "2021",
address = "Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Author-page-Marten-During-lu/2021.blackboxnlp-1.15/",
doi = "10.18653/v1/2021.blackboxnlp-1.15",
pages = "214--227",
abstract = "Recent work has shown evidence that the knowledge acquired by multilingual BERT (mBERT) has two components: a language-specific and a language-neutral one. This paper analyses the relationship between them, in the context of fine-tuning on two tasks {--} POS tagging and natural language inference {--} which require the model to bring to bear different degrees of language-specific knowledge. Visualisations reveal that mBERT loses the ability to cluster representations by language after fine-tuning, a result that is supported by evidence from language identification experiments. However, further experiments on {\textquoteleft}unlearning' language-specific representations using gradient reversal and iterative adversarial learning are shown not to add further improvement to the language-independent component over and above the effect of fine-tuning. The results presented here suggest that the process of fine-tuning causes a reorganisation of the model`s limited representational capacity, enhancing language-independent representations at the expense of language-specific ones."
}
Markdown (Informal)
[On the Language-specificity of Multilingual BERT and the Impact of Fine-tuning](https://preview.aclanthology.org/Author-page-Marten-During-lu/2021.blackboxnlp-1.15/) (Tanti et al., BlackboxNLP 2021)
ACL