@inproceedings{fei-etal-2020-retrofitting,
title = "Retrofitting Structure-aware Transformer Language Model for End Tasks",
author = "Fei, Hao and
Ren, Yafeng and
Ji, Donghong",
editor = "Webber, Bonnie and
Cohn, Trevor and
He, Yulan and
Liu, Yang",
booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing (EMNLP)",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.emnlp-main.168/",
doi = "10.18653/v1/2020.emnlp-main.168",
pages = "2151--2161",
abstract = "We consider retrofitting structure-aware Transformer language model for facilitating end tasks by proposing to exploit syntactic distance to encode both the phrasal constituency and dependency connection into the language model. A middle-layer structural learning strategy is leveraged for structure integration, accomplished with main semantic task training under multi-task learning scheme. Experimental results show that the retrofitted structure-aware Transformer language model achieves improved perplexity, meanwhile inducing accurate syntactic phrases. By performing structure-aware fine-tuning, our model achieves significant improvements for both semantic- and syntactic-dependent tasks."
}
Markdown (Informal)
[Retrofitting Structure-aware Transformer Language Model for End Tasks](https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.emnlp-main.168/) (Fei et al., EMNLP 2020)
ACL