@inproceedings{peng-etal-2022-distill,
title = "Distill The Image to Nowhere: Inversion Knowledge Distillation for Multimodal Machine Translation",
author = "Peng, Ru and
Zeng, Yawen and
Zhao, Jake",
editor = "Goldberg, Yoav and
Kozareva, Zornitsa and
Zhang, Yue",
booktitle = "Proceedings of the 2022 Conference on Empirical Methods in Natural Language Processing",
month = dec,
year = "2022",
address = "Abu Dhabi, United Arab Emirates",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/moar-dois/2022.emnlp-main.152/",
doi = "10.18653/v1/2022.emnlp-main.152",
pages = "2379--2390",
abstract = "Past works on multimodal machine translation (MMT) elevate bilingual setup by incorporating additional aligned vision information.However, an image-must requirement of the multimodal dataset largely hinders MMT{'}s development {---} namely that it demands an aligned form of [image, source text, target text].This limitation is generally troublesome during the inference phase especially when the aligned image is not provided as in the normal NMT setup.Thus, in this work, we introduce IKD-MMT, a novel MMT framework to support the image-free inference phase via an inversion knowledge distillation scheme.In particular, a multimodal feature generator is executed with a knowledge distillation module, which directly generates the multimodal feature from (only) source texts as the input.While there have been a few prior works entertaining the possibility to support image-free inference for machine translation, their performances have yet to rival the image-must translation.In our experiments, we identify our method as the first image-free approach to comprehensively rival or even surpass (almost) all image-must frameworks, and achieved the state-of-the-art result on the often-used Multi30k benchmark. Our code and data are availableat: https://github.com/pengr/IKD-mmt/tree/master.."
}
Markdown (Informal)
[Distill The Image to Nowhere: Inversion Knowledge Distillation for Multimodal Machine Translation](https://preview.aclanthology.org/moar-dois/2022.emnlp-main.152/) (Peng et al., EMNLP 2022)
ACL