@inproceedings{hou-etal-2025-btw,
title = "{BTW}: A Non-Parametric Variance Stabilization Framework for Multimodal Model Integration",
author = "Hou, Jun and
Wang, Le and
Wang, Xuan",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.815/",
doi = "10.18653/v1/2025.findings-emnlp.815",
pages = "15089--15103",
ISBN = "979-8-89176-335-7",
abstract = "Mixture-of-Experts (MoE) models have become increasingly powerful in multimodal learning by enabling modular specialization across modalities. However, their effectiveness remains unclear when additional modalities introduce more noise than complementary information. Existing approaches, such as the Partial Information Decomposition, struggle to scale beyond two modalities and lack the resolution needed for instance-level control. We propose **B**eyond **T**wo-modality **W**eighting (**BTW**), a bi-level, non-parametric weighting framework that combines instance-level Kullback-Leibler (KL) divergence and modality-level mutual information (MI) to dynamically adjust modality importance during training. Our method does not require additional parameters and can be applied to an arbitrary number of modalities. Specifically, BTW computes per-example KL weights by measuring the divergence between each unimodal and the current multimodal prediction, and modality-wide MI weights by estimating global alignment between unimodal and multimodal outputs. Extensive experiments on sentiment regression and clinical classification demonstrate that our method significantly improves regression performance and multiclass classification accuracy."
}Markdown (Informal)
[BTW: A Non-Parametric Variance Stabilization Framework for Multimodal Model Integration](https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.815/) (Hou et al., Findings 2025)
ACL