@inproceedings{mutlu-etal-2019-team,
    title = "Team {H}oward {B}eale at {S}em{E}val-2019 Task 4: Hyperpartisan News Detection with {BERT}",
    author = "Mutlu, Osman  and
      Can, Ozan Arkan  and
      Dayanik, Erenay",
    editor = "May, Jonathan  and
      Shutova, Ekaterina  and
      Herbelot, Aurelie  and
      Zhu, Xiaodan  and
      Apidianaki, Marianna  and
      Mohammad, Saif M.",
    booktitle = "Proceedings of the 13th International Workshop on Semantic Evaluation",
    month = jun,
    year = "2019",
    address = "Minneapolis, Minnesota, USA",
    publisher = "Association for Computational Linguistics",
    url = "https://preview.aclanthology.org/iwcs-25-ingestion/S19-2175/",
    doi = "10.18653/v1/S19-2175",
    pages = "1007--1011",
    abstract = "This paper describes our system for SemEval-2019 Task 4: Hyperpartisan News Detection (Kiesel et al., 2019). We use pretrained BERT (Devlin et al., 2018) architecture and investigate the effect of different fine tuning regimes on the final classification task. We show that additional pretraining on news domain improves the performance on the Hyperpartisan News Detection task. Our system ranked 8th out of 42 teams with 78.3{\%} accuracy on the held-out test dataset."
}Markdown (Informal)
[Team Howard Beale at SemEval-2019 Task 4: Hyperpartisan News Detection with BERT](https://preview.aclanthology.org/iwcs-25-ingestion/S19-2175/) (Mutlu et al., SemEval 2019)
ACL