@inproceedings{miao-etal-2020-right,
title = "Do you have the right scissors? Tailoring Pre-trained Language Models via {M}onte-{C}arlo Methods",
author = "Miao, Ning and
Song, Yuxuan and
Zhou, Hao and
Li, Lei",
editor = "Jurafsky, Dan and
Chai, Joyce and
Schluter, Natalie and
Tetreault, Joel",
booktitle = "Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.acl-main.314/",
doi = "10.18653/v1/2020.acl-main.314",
pages = "3436--3441",
abstract = "It has been a common approach to pre-train a language model on a large corpus and fine-tune it on task-specific data. In practice, we observe that fine-tuning a pre-trained model on a small dataset may lead to over- and/or under-estimate problem. In this paper, we propose MC-Tailor, a novel method to alleviate the above issue in text generation tasks by truncating and transferring the probability mass from over-estimated regions to under-estimated ones. Experiments on a variety of text generation datasets show that MC-Tailor consistently and significantly outperforms the fine-tuning approach."
}
Markdown (Informal)
[Do you have the right scissors? Tailoring Pre-trained Language Models via Monte-Carlo Methods](https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.acl-main.314/) (Miao et al., ACL 2020)
ACL