@inproceedings{zhang-etal-2023-nag,
title = "{NAG}-{NER}: a Unified Non-Autoregressive Generation Framework for Various {NER} Tasks",
author = "Zhang, Xinpeng and
Tan, Ming and
Zhang, Jingfan and
Zhu, Wei",
editor = "Sitaram, Sunayana and
Beigman Klebanov, Beata and
Williams, Jason D",
booktitle = "Proceedings of the 61st Annual Meeting of the Association for Computational Linguistics (Volume 5: Industry Track)",
month = jul,
year = "2023",
address = "Toronto, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.acl-industry.65/",
doi = "10.18653/v1/2023.acl-industry.65",
pages = "676--686",
abstract = "Recently, the recognition of flat, nested, and discontinuous entities by a unified generative model framework has received increasing attention both in the research field and industry. However, the current generative NER methods force the entities to be generated in a predefined order, suffering from error propagation and inefficient decoding. In this work, we propose a unified non-autoregressive generation (NAG) framework for general NER tasks, referred to as NAG-NER. First, we propose to generate entities as a set instead of a sequence, avoiding error propagation. Second, we propose incorporating NAG in NER tasks for efficient decoding by treating each entity as a target sequence. Third, to enhance the generation performances of the NAG decoder, we employ the NAG encoder to detect potential entity mentions. Extensive experiments show that our NAG-NER model outperforms the state-of-the-art generative NER models on three benchmark NER datasets of different types and two of our proprietary NER tasks.{\textbackslash}footnote{\{}Code will be publicly available to the research community upon acceptance.{\}}"
}
Markdown (Informal)
[NAG-NER: a Unified Non-Autoregressive Generation Framework for Various NER Tasks](https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.acl-industry.65/) (Zhang et al., ACL 2023)
ACL