@inproceedings{stefanik-etal-2022-adaptor,
title = "Adaptor: Objective-Centric Adaptation Framework for Language Models",
author = "{\v{S}}tef{\'a}nik, Michal and
Novotn{\'y}, V{\'i}t and
Groverov{\'a}, Nikola and
Sojka, Petr",
editor = "Basile, Valerio and
Kozareva, Zornitsa and
Stajner, Sanja",
booktitle = "Proceedings of the 60th Annual Meeting of the Association for Computational Linguistics: System Demonstrations",
month = may,
year = "2022",
address = "Dublin, Ireland",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.acl-demo.26/",
doi = "10.18653/v1/2022.acl-demo.26",
pages = "261--269",
abstract = "This paper introduces Adaptor library, which transposes traditional model-centric approach composed of pre-training + fine-tuning steps to objective-centric approach, composing the training process by applications of selected objectives. We survey research directions that can benefit from enhanced objective-centric experimentation in multitask training, custom objectives development, dynamic training curricula, or domain adaptation. Adaptor aims to ease reproducibility of these research directions in practice. Finally, we demonstrate the practical applicability of Adaptor in selected unsupervised domain adaptation scenarios."
}
Markdown (Informal)
[Adaptor: Objective-Centric Adaptation Framework for Language Models](https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.acl-demo.26/) (Štefánik et al., ACL 2022)
ACL