@inproceedings{wang-komachi-2020-tmu,
    title = "{TMU}-{NLP} System Using {BERT}-based Pre-trained Model to the {NLP}-{TEA} {CGED} Shared Task 2020",
    author = "Wang, Hongfei  and
      Komachi, Mamoru",
    editor = "YANG, Erhong  and
      XUN, Endong  and
      ZHANG, Baolin  and
      RAO, Gaoqi",
    booktitle = "Proceedings of the 6th Workshop on Natural Language Processing Techniques for Educational Applications",
    month = dec,
    year = "2020",
    address = "Suzhou, China",
    publisher = "Association for Computational Linguistics",
    url = "https://preview.aclanthology.org/ingest-emnlp/2020.nlptea-1.11/",
    doi = "10.18653/v1/2020.nlptea-1.11",
    pages = "87--90",
    abstract = "In this paper, we introduce our system for NLPTEA 2020 shared task of Chinese Grammatical Error Diagnosis (CGED). In recent years, pre-trained models have been extensively studied, and several downstream tasks have benefited from their utilization. In this study, we treat the grammar error diagnosis (GED) task as a grammatical error correction (GEC) problem and propose a method that incorporates a pre-trained model into an encoder-decoder model to solve this problem."
}Markdown (Informal)
[TMU-NLP System Using BERT-based Pre-trained Model to the NLP-TEA CGED Shared Task 2020](https://preview.aclanthology.org/ingest-emnlp/2020.nlptea-1.11/) (Wang & Komachi, NLP-TEA 2020)
ACL