@inproceedings{aharoni-goldberg-2020-unsupervised,
title = "Unsupervised Domain Clusters in Pretrained Language Models",
author = "Aharoni, Roee and
Goldberg, Yoav",
booktitle = "Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.acl-main.692",
doi = "10.18653/v1/2020.acl-main.692",
pages = "7747--7763",
abstract = "The notion of {``}in-domain data{''} in NLP is often over-simplistic and vague, as textual data varies in many nuanced linguistic aspects such as topic, style or level of formality. In addition, domain labels are many times unavailable, making it challenging to build domain-specific systems. We show that massive pre-trained language models implicitly learn sentence representations that cluster by domains without supervision {--} suggesting a simple data-driven definition of domains in textual data. We harness this property and propose domain data selection methods based on such models, which require only a small set of in-domain monolingual data. We evaluate our data selection methods for neural machine translation across five diverse domains, where they outperform an established approach as measured by both BLEU and precision and recall with respect to an oracle selection.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="aharoni-goldberg-2020-unsupervised">
<titleInfo>
<title>Unsupervised Domain Clusters in Pretrained Language Models</title>
</titleInfo>
<name type="personal">
<namePart type="given">Roee</namePart>
<namePart type="family">Aharoni</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yoav</namePart>
<namePart type="family">Goldberg</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-jul</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>The notion of “in-domain data” in NLP is often over-simplistic and vague, as textual data varies in many nuanced linguistic aspects such as topic, style or level of formality. In addition, domain labels are many times unavailable, making it challenging to build domain-specific systems. We show that massive pre-trained language models implicitly learn sentence representations that cluster by domains without supervision – suggesting a simple data-driven definition of domains in textual data. We harness this property and propose domain data selection methods based on such models, which require only a small set of in-domain monolingual data. We evaluate our data selection methods for neural machine translation across five diverse domains, where they outperform an established approach as measured by both BLEU and precision and recall with respect to an oracle selection.</abstract>
<identifier type="citekey">aharoni-goldberg-2020-unsupervised</identifier>
<identifier type="doi">10.18653/v1/2020.acl-main.692</identifier>
<location>
<url>https://aclanthology.org/2020.acl-main.692</url>
</location>
<part>
<date>2020-jul</date>
<extent unit="page">
<start>7747</start>
<end>7763</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Unsupervised Domain Clusters in Pretrained Language Models
%A Aharoni, Roee
%A Goldberg, Yoav
%S Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics
%D 2020
%8 jul
%I Association for Computational Linguistics
%C Online
%F aharoni-goldberg-2020-unsupervised
%X The notion of “in-domain data” in NLP is often over-simplistic and vague, as textual data varies in many nuanced linguistic aspects such as topic, style or level of formality. In addition, domain labels are many times unavailable, making it challenging to build domain-specific systems. We show that massive pre-trained language models implicitly learn sentence representations that cluster by domains without supervision – suggesting a simple data-driven definition of domains in textual data. We harness this property and propose domain data selection methods based on such models, which require only a small set of in-domain monolingual data. We evaluate our data selection methods for neural machine translation across five diverse domains, where they outperform an established approach as measured by both BLEU and precision and recall with respect to an oracle selection.
%R 10.18653/v1/2020.acl-main.692
%U https://aclanthology.org/2020.acl-main.692
%U https://doi.org/10.18653/v1/2020.acl-main.692
%P 7747-7763
Markdown (Informal)
[Unsupervised Domain Clusters in Pretrained Language Models](https://aclanthology.org/2020.acl-main.692) (Aharoni & Goldberg, ACL 2020)
ACL