@inproceedings{holgate-erk-2021-politeness,
title = "{``}Politeness, you simpleton!{''} retorted [{MASK}]: Masked prediction of literary characters",
author = "Holgate, Eric and
Erk, Katrin",
booktitle = "Proceedings of the 14th International Conference on Computational Semantics (IWCS)",
month = jun,
year = "2021",
address = "Groningen, The Netherlands (online)",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.iwcs-1.19",
pages = "202--211",
abstract = "What is the best way to learn embeddings for entities, and what can be learned from them? We consider this question for the case of literary characters. We address the highly challenging task of guessing, from a sentence in the novel, which character is being talked about, and we probe the embeddings to see what information they encode about their literary characters. We find that when continuously trained, entity embeddings do well at the masked entity prediction task, and that they encode considerable information about the traits and characteristics of the entities.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="holgate-erk-2021-politeness">
<titleInfo>
<title>“Politeness, you simpleton!” retorted [MASK]: Masked prediction of literary characters</title>
</titleInfo>
<name type="personal">
<namePart type="given">Eric</namePart>
<namePart type="family">Holgate</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Katrin</namePart>
<namePart type="family">Erk</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-jun</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 14th International Conference on Computational Semantics (IWCS)</title>
</titleInfo>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Groningen, The Netherlands (online)</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>What is the best way to learn embeddings for entities, and what can be learned from them? We consider this question for the case of literary characters. We address the highly challenging task of guessing, from a sentence in the novel, which character is being talked about, and we probe the embeddings to see what information they encode about their literary characters. We find that when continuously trained, entity embeddings do well at the masked entity prediction task, and that they encode considerable information about the traits and characteristics of the entities.</abstract>
<identifier type="citekey">holgate-erk-2021-politeness</identifier>
<location>
<url>https://aclanthology.org/2021.iwcs-1.19</url>
</location>
<part>
<date>2021-jun</date>
<extent unit="page">
<start>202</start>
<end>211</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T “Politeness, you simpleton!” retorted [MASK]: Masked prediction of literary characters
%A Holgate, Eric
%A Erk, Katrin
%S Proceedings of the 14th International Conference on Computational Semantics (IWCS)
%D 2021
%8 jun
%I Association for Computational Linguistics
%C Groningen, The Netherlands (online)
%F holgate-erk-2021-politeness
%X What is the best way to learn embeddings for entities, and what can be learned from them? We consider this question for the case of literary characters. We address the highly challenging task of guessing, from a sentence in the novel, which character is being talked about, and we probe the embeddings to see what information they encode about their literary characters. We find that when continuously trained, entity embeddings do well at the masked entity prediction task, and that they encode considerable information about the traits and characteristics of the entities.
%U https://aclanthology.org/2021.iwcs-1.19
%P 202-211
Markdown (Informal)
[“Politeness, you simpleton!” retorted [MASK]: Masked prediction of literary characters](https://aclanthology.org/2021.iwcs-1.19) (Holgate & Erk, IWCS 2021)
ACL