@inproceedings{sazid-mercer-2022-unified,
title = "A Unified Representation and a Decoupled Deep Learning Architecture for Argumentation Mining of Students' Persuasive Essays",
author = "Sazid, Muhammad Tawsif and
Mercer, Robert E.",
editor = "Lapesa, Gabriella and
Schneider, Jodi and
Jo, Yohan and
Saha, Sougata",
booktitle = "Proceedings of the 9th Workshop on Argument Mining",
month = oct,
year = "2022",
address = "Online and in Gyeongju, Republic of Korea",
publisher = "International Conference on Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.argmining-1.6/",
pages = "74--83",
abstract = "We develop a novel unified representation for the argumentation mining task facilitating the extracting from text and the labelling of the non-argumentative units and argumentation components{---}premises, claims, and major claims{---}and the argumentative relations{---}premise to claim or premise in a support or attack relation, and claim to major-claim in a for or against relation{---}in an end-to-end machine learning pipeline. This tightly integrated representation combines the component and relation identification sub-problems and enables a unitary solution for detecting argumentation structures. This new representation together with a new deep learning architecture composed of a mixed embedding method, a multi-head attention layer, two biLSTM layers, and a final linear layer obtain state-of-the-art accuracy on the Persuasive Essays dataset. Also, we have introduced a decoupled solution to identify the entities and relations first, and on top of that, a second model is used to detect distance between the detected related components. An augmentation of the corpus (paragraph version) by including copies of major claims has further increased the performance."
}
Markdown (Informal)
[A Unified Representation and a Decoupled Deep Learning Architecture for Argumentation Mining of Students’ Persuasive Essays](https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.argmining-1.6/) (Sazid & Mercer, ArgMining 2022)
ACL