@inproceedings{kim-hwang-2024-need,
title = "All You Need is Attention: Lightweight Attention-based Data Augmentation for Text Classification",
author = "Kim, Junehyung and
Hwang, Sungjae",
editor = "Al-Onaizan, Yaser and
Bansal, Mohit and
Chen, Yun-Nung",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2024",
month = nov,
year = "2024",
address = "Miami, Florida, USA",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2024.findings-emnlp.752/",
doi = "10.18653/v1/2024.findings-emnlp.752",
pages = "12866--12873",
abstract = "This paper introduces LADAM, a novel method for enhancing the performance of text classification tasks. LADAM employs attention mechanisms to exchange semantically similar words between sentences. This approach generates a greater diversity of synthetic sentences compared to simpler operations like random insertions, while maintaining the context of the original sentences. Additionally, LADAM is an easy-to-use, lightweight technique that does not require external datasets or large language models. Our experimental results across five datasets demonstrate that LADAM consistently outperforms baseline methods across diverse text classification conditions."
}
Markdown (Informal)
[All You Need is Attention: Lightweight Attention-based Data Augmentation for Text Classification](https://preview.aclanthology.org/jlcl-multiple-ingestion/2024.findings-emnlp.752/) (Kim & Hwang, Findings 2024)
ACL