@inproceedings{mao-etal-2024-metapro,
title = "{M}eta{P}ro 2.0: Computational Metaphor Processing on the Effectiveness of Anomalous Language Modeling",
author = "Mao, Rui and
He, Kai and
Ong, Claudia and
Liu, Qian and
Cambria, Erik",
editor = "Ku, Lun-Wei and
Martins, Andre and
Srikumar, Vivek",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2024",
month = aug,
year = "2024",
address = "Bangkok, Thailand",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2024.findings-acl.590/",
doi = "10.18653/v1/2024.findings-acl.590",
pages = "9891--9908",
abstract = "Metaphor interpretation is a difficult task in natural language understanding. The development of relevant techniques in this domain is slow, mostly because of the lack of large annotated datasets and effective pre-trained language models (PLMs) for metaphor learning. Thus, we propose a large annotated dataset and a PLM for the metaphor interpretation task. Our foundation model is based on a novel anomalous language modeling (ALM) method, which we benchmark with comparable PLM baselines on the new dataset, finding that it largely improves model performance on metaphor identification and interpretation."
}
Markdown (Informal)
[MetaPro 2.0: Computational Metaphor Processing on the Effectiveness of Anomalous Language Modeling](https://preview.aclanthology.org/fix-sig-urls/2024.findings-acl.590/) (Mao et al., Findings 2024)
ACL