@inproceedings{xu-etal-2021-zero,
title = "Zero-Shot Compositional Concept Learning",
author = "Xu, Guangyue and
Kordjamshidi, Parisa and
Chai, Joyce",
editor = "Lee, Hung-Yi and
Mohtarami, Mitra and
Li, Shang-Wen and
Jin, Di and
Korpusik, Mandy and
Dong, Shuyan and
Vu, Ngoc Thang and
Hakkani-Tur, Dilek",
booktitle = "Proceedings of the 1st Workshop on Meta Learning and Its Applications to Natural Language Processing",
month = aug,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2021.metanlp-1.3/",
doi = "10.18653/v1/2021.metanlp-1.3",
pages = "19--27",
abstract = "In this paper, we study the problem of recognizing compositional attribute-object concepts within the zero-shot learning (ZSL) framework. We propose an episode-based cross-attention (EpiCA) network which combines merits of cross-attention mechanism and episode-based training strategy to recognize novel compositional concepts. Firstly, EpiCA bases on cross-attention to correlate conceptvisual information and utilizes the gated pooling layer to build contextualized representations for both images and concepts. The updated representations are used for a more indepth multi-modal relevance calculation for concept recognition. Secondly, a two-phase episode training strategy, especially the ransductive phase, is adopted to utilize unlabeled test examples to alleviate the low-resource learning problem. Experiments on two widelyused zero-shot compositional learning (ZSCL) benchmarks have demonstrated the effectiveness of the model compared with recent approaches on both conventional and generalized ZSCL settings."
}
Markdown (Informal)
[Zero-Shot Compositional Concept Learning](https://preview.aclanthology.org/fix-sig-urls/2021.metanlp-1.3/) (Xu et al., MetaNLP 2021)
ACL
- Guangyue Xu, Parisa Kordjamshidi, and Joyce Chai. 2021. Zero-Shot Compositional Concept Learning. In Proceedings of the 1st Workshop on Meta Learning and Its Applications to Natural Language Processing, pages 19–27, Online. Association for Computational Linguistics.