@inproceedings{xu-etal-2020-dombert,
title = "{D}om{BERT}: Domain-oriented Language Model for Aspect-based Sentiment Analysis",
author = "Xu, Hu and
Liu, Bing and
Shu, Lei and
Yu, Philip",
editor = "Cohn, Trevor and
He, Yulan and
Liu, Yang",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2020",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2020.findings-emnlp.156/",
doi = "10.18653/v1/2020.findings-emnlp.156",
pages = "1725--1731",
abstract = "This paper focuses on learning domain-oriented language models driven by end tasks, which aims to combine the worlds of both general-purpose language models (such as ELMo and BERT) and domain-specific language understanding. We propose DomBERT, an extension of BERT to learn from both in-domain corpus and relevant domain corpora. This helps in learning domain language models with low-resources. Experiments are conducted on an assortment of tasks in aspect-based sentiment analysis (ABSA), demonstrating promising results."
}
Markdown (Informal)
[DomBERT: Domain-oriented Language Model for Aspect-based Sentiment Analysis](https://preview.aclanthology.org/add-emnlp-2024-awards/2020.findings-emnlp.156/) (Xu et al., Findings 2020)
ACL