@inproceedings{yang-etal-2022-factmix,
title = "{F}act{M}ix: Using a Few Labeled In-domain Examples to Generalize to Cross-domain Named Entity Recognition",
author = "Yang, Linyi and
Yuan, Lifan and
Cui, Leyang and
Gao, Wenyang and
Zhang, Yue",
editor = "Calzolari, Nicoletta and
Huang, Chu-Ren and
Kim, Hansaem and
Pustejovsky, James and
Wanner, Leo and
Choi, Key-Sun and
Ryu, Pum-Mo and
Chen, Hsin-Hsi and
Donatelli, Lucia and
Ji, Heng and
Kurohashi, Sadao and
Paggio, Patrizia and
Xue, Nianwen and
Kim, Seokhwan and
Hahm, Younggyun and
He, Zhong and
Lee, Tony Kyungil and
Santus, Enrico and
Bond, Francis and
Na, Seung-Hoon",
booktitle = "Proceedings of the 29th International Conference on Computational Linguistics",
month = oct,
year = "2022",
address = "Gyeongju, Republic of Korea",
publisher = "International Committee on Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2022.coling-1.476/",
pages = "5360--5371",
abstract = "Few-shot Named Entity Recognition (NER) is imperative for entity tagging in limited resource domains and thus received proper attention in recent years. Existing approaches for few-shot NER are evaluated mainly under in-domain settings. In contrast, little is known about how these inherently faithful models perform in cross-domain NER using a few labeled in-domain examples. This paper proposes a two-step rationale-centric data augmentation method to improve the model`s generalization ability. Results on several datasets show that our model-agnostic method significantly improves the performance of cross-domain NER tasks compared to previous state-of-the-art methods compared to the counterfactual data augmentation and prompt-tuning methods."
}
Markdown (Informal)
[FactMix: Using a Few Labeled In-domain Examples to Generalize to Cross-domain Named Entity Recognition](https://preview.aclanthology.org/add-emnlp-2024-awards/2022.coling-1.476/) (Yang et al., COLING 2022)
ACL