@inproceedings{szanto-berend-2020-prosperamnet,
title = "{P}rosper{AM}net at {F}in{C}ausal 2020, Task 1 {\&} 2: Modeling causality in financial texts using multi-headed transformers",
author = "Sz{\'a}nt{\'o}, Zsolt and
Berend, G{\'a}bor",
booktitle = "Proceedings of the 1st Joint Workshop on Financial Narrative Processing and MultiLing Financial Summarisation",
month = dec,
year = "2020",
address = "Barcelona, Spain (Online)",
publisher = "COLING",
url = "https://aclanthology.org/2020.fnp-1.13",
pages = "80--84",
abstract = "This paper introduces our efforts at the FinCasual shared task for modeling causality in financial utterances. Our approach uses the commonly and successfully applied strategy of fine-tuning a transformer-based language model with a twist, i.e. we modified the training and inference mechanism such that our model produces multiple predictions for the same instance. By designing such a model that returns k{\textgreater}1 predictions at the same time, we not only obtain a more resource efficient training (as opposed to fine-tuning some pre-trained language model k independent times), but our results indicate that we are also capable of obtaining comparable or even better evaluation scores that way. We compare multiple strategies for combining the k predictions of our model. Our submissions got ranked third on both subtasks of the shared task.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="szanto-berend-2020-prosperamnet">
<titleInfo>
<title>ProsperAMnet at FinCausal 2020, Task 1 & 2: Modeling causality in financial texts using multi-headed transformers</title>
</titleInfo>
<name type="personal">
<namePart type="given">Zsolt</namePart>
<namePart type="family">Szántó</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Gábor</namePart>
<namePart type="family">Berend</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2020-dec</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 1st Joint Workshop on Financial Narrative Processing and MultiLing Financial Summarisation</title>
</titleInfo>
<originInfo>
<publisher>COLING</publisher>
<place>
<placeTerm type="text">Barcelona, Spain (Online)</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>This paper introduces our efforts at the FinCasual shared task for modeling causality in financial utterances. Our approach uses the commonly and successfully applied strategy of fine-tuning a transformer-based language model with a twist, i.e. we modified the training and inference mechanism such that our model produces multiple predictions for the same instance. By designing such a model that returns k\textgreater1 predictions at the same time, we not only obtain a more resource efficient training (as opposed to fine-tuning some pre-trained language model k independent times), but our results indicate that we are also capable of obtaining comparable or even better evaluation scores that way. We compare multiple strategies for combining the k predictions of our model. Our submissions got ranked third on both subtasks of the shared task.</abstract>
<identifier type="citekey">szanto-berend-2020-prosperamnet</identifier>
<location>
<url>https://aclanthology.org/2020.fnp-1.13</url>
</location>
<part>
<date>2020-dec</date>
<extent unit="page">
<start>80</start>
<end>84</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T ProsperAMnet at FinCausal 2020, Task 1 & 2: Modeling causality in financial texts using multi-headed transformers
%A Szántó, Zsolt
%A Berend, Gábor
%S Proceedings of the 1st Joint Workshop on Financial Narrative Processing and MultiLing Financial Summarisation
%D 2020
%8 dec
%I COLING
%C Barcelona, Spain (Online)
%F szanto-berend-2020-prosperamnet
%X This paper introduces our efforts at the FinCasual shared task for modeling causality in financial utterances. Our approach uses the commonly and successfully applied strategy of fine-tuning a transformer-based language model with a twist, i.e. we modified the training and inference mechanism such that our model produces multiple predictions for the same instance. By designing such a model that returns k\textgreater1 predictions at the same time, we not only obtain a more resource efficient training (as opposed to fine-tuning some pre-trained language model k independent times), but our results indicate that we are also capable of obtaining comparable or even better evaluation scores that way. We compare multiple strategies for combining the k predictions of our model. Our submissions got ranked third on both subtasks of the shared task.
%U https://aclanthology.org/2020.fnp-1.13
%P 80-84
Markdown (Informal)
[ProsperAMnet at FinCausal 2020, Task 1 & 2: Modeling causality in financial texts using multi-headed transformers](https://aclanthology.org/2020.fnp-1.13) (Szántó & Berend, FNP 2020)
ACL