@inproceedings{xu-etal-2024-reduction-synthesis,
title = "Reduction-Synthesis: Plug-and-Play for Sentiment Style Transfer",
author = "Xu, Sheng and
Fukumoto, Fumiyo and
Suzuki, Yoshimi",
editor = "Mahamood, Saad and
Minh, Nguyen Le and
Ippolito, Daphne",
booktitle = "Proceedings of the 17th International Natural Language Generation Conference",
month = sep,
year = "2024",
address = "Tokyo, Japan",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2024.inlg-main.28/",
pages = "330--343",
abstract = "Sentiment style transfer (SST), a variant of text style transfer (TST), has recently attracted extensive interest. Some disentangling-based approaches have improved performance, while most still struggle to properly transfer the input as the sentiment style is intertwined with the content of the text. To alleviate the issue, we propose a plug-and-play method that leverages an iterative self-refinement algorithm with a large language model (LLM). Our approach separates the straightforward Seq2Seq generation into two phases: (1) Reduction phase which generates a style-free sequence for a given text, and (2) Synthesis phase which generates the target text by leveraging the sequence output from the first phase. The experimental results on two datasets demonstrate that our transfer strategy is effective for challenging SST cases where the baseline methods perform poorly. Our code is available online."
}
Markdown (Informal)
[Reduction-Synthesis: Plug-and-Play for Sentiment Style Transfer](https://preview.aclanthology.org/jlcl-multiple-ingestion/2024.inlg-main.28/) (Xu et al., INLG 2024)
ACL