@inproceedings{shi-etal-2023-adaptive,
title = "Adaptive End-to-End Metric Learning for Zero-Shot Cross-Domain Slot Filling",
author = "Shi, Yuanjun and
Wu, Linzhi and
Shao, Minglai",
editor = "Bouamor, Houda and
Pino, Juan and
Bali, Kalika",
booktitle = "Proceedings of the 2023 Conference on Empirical Methods in Natural Language Processing",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.emnlp-main.387/",
doi = "10.18653/v1/2023.emnlp-main.387",
pages = "6291--6301",
abstract = "Recently slot filling has witnessed great development thanks to deep learning and the availability of large-scale annotated data. However, it poses a critical challenge to handle a novel domain whose samples are never seen during training. The recognition performance might be greatly degraded due to severe domain shifts. Most prior works deal with this problem in a two-pass pipeline manner based on metric learning. In practice, these dominant pipeline models may be limited in computational efficiency and generalization capacity because of non-parallel inference and context-free discrete label embeddings. To this end, we re-examine the typical metric-based methods, and propose a new adaptive end-to-end metric learning scheme for the challenging zero-shot slot filling. Considering simplicity, efficiency and generalizability, we present a cascade-style joint learning framework coupled with context-aware soft label representations and slot-level contrastive representation learning to mitigate the data and label shift problems effectively. Extensive experiments on public benchmarks demonstrate the superiority of the proposed approach over a series of competitive baselines."
}
Markdown (Informal)
[Adaptive End-to-End Metric Learning for Zero-Shot Cross-Domain Slot Filling](https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.emnlp-main.387/) (Shi et al., EMNLP 2023)
ACL