@inproceedings{ai-fang-2023-multilingual,
title = "Multilingual Pre-training with Self-supervision from Global Co-occurrence Information",
author = "Ai, Xi and
Fang, Bin",
editor = "Rogers, Anna and
Boyd-Graber, Jordan and
Okazaki, Naoaki",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2023",
month = jul,
year = "2023",
address = "Toronto, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.findings-acl.475/",
doi = "10.18653/v1/2023.findings-acl.475",
pages = "7526--7543",
abstract = "Global co-occurrence information is the primary source of structural information on multilingual corpora, and we find that analogical/parallel compound words across languages have similar co-occurrence counts/frequencies (normalized) giving weak but stable self-supervision for cross-lingual transfer. Following the observation, we aim at associating contextualized representations with relevant (contextualized) representations across languages with the help of co-occurrence counts. The result is MLM-GC (MLM with Global Co-occurrence) pre-training that the model learns local bidirectional information from MLM and global co-occurrence information from a log-bilinear regression. Experiments show that MLM-GC pre-training substantially outperforms MLM pre-training for 4 downstream cross-lingual tasks and 1 additional monolingual task, showing the advantages of forming isomorphic spaces across languages."
}
Markdown (Informal)
[Multilingual Pre-training with Self-supervision from Global Co-occurrence Information](https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.findings-acl.475/) (Ai & Fang, Findings 2023)
ACL