@inproceedings{shao-etal-2020-multi,
title = "Multi-level Alignment Pretraining for Multi-lingual Semantic Parsing",
author = "Shao, Bo and
Gong, Yeyun and
Qi, Weizhen and
Duan, Nan and
Lin, Xiaola",
editor = "Scott, Donia and
Bel, Nuria and
Zong, Chengqing",
booktitle = "Proceedings of the 28th International Conference on Computational Linguistics",
month = dec,
year = "2020",
address = "Barcelona, Spain (Online)",
publisher = "International Committee on Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.coling-main.289/",
doi = "10.18653/v1/2020.coling-main.289",
pages = "3246--3256",
abstract = "In this paper, we present a multi-level alignment pretraining method in a unified architecture formulti-lingual semantic parsing. In this architecture, we use an adversarial training method toalign the space of different languages and use sentence level and word level parallel corpus assupervision information to align the semantic of different languages. Finally, we jointly train themulti-level alignment and semantic parsing tasks. We conduct experiments on a publicly avail-able multi-lingual semantic parsing dataset ATIS and a newly constructed dataset. Experimentalresults show that our model outperforms state-of-the-art methods on both datasets."
}
Markdown (Informal)
[Multi-level Alignment Pretraining for Multi-lingual Semantic Parsing](https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.coling-main.289/) (Shao et al., COLING 2020)
ACL