@inproceedings{rao-etal-2021-cross-lingual,
title = "Cross-Lingual Leveled Reading Based on Language-Invariant Features",
author = "Rao, Simin and
Zheng, Hua and
Li, Sujian",
editor = "Moens, Marie-Francine and
Huang, Xuanjing and
Specia, Lucia and
Yih, Scott Wen-tau",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2021",
month = nov,
year = "2021",
address = "Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2021.findings-emnlp.227/",
doi = "10.18653/v1/2021.findings-emnlp.227",
pages = "2677--2682",
abstract = "Leveled reading (LR) aims to automatically classify texts by the cognitive levels of readers, which is fundamental in providing appropriate reading materials regarding different reading capabilities. However, most state-of-the-art LR methods rely on the availability of copious annotated resources, which prevents their adaptation to low-resource languages like Chinese. In our work, to tackle LR in Chinese, we explore how different language transfer methods perform on English-Chinese LR. Specifically, we focus on adversarial training and cross-lingual pre-training method to transfer the LR knowledge learned from annotated data in the resource-rich English language to Chinese. For evaluation, we first introduce the age-based standard to align datasets with different leveling standards. Then we conduct experiments in both zero-shot and few-shot settings. Comparing these two methods, quantitative and qualitative evaluations show that the cross-lingual pre-training method effectively captures the language-invariant features between English and Chinese. We conduct analysis to propose further improvement in cross-lingual LR."
}
Markdown (Informal)
[Cross-Lingual Leveled Reading Based on Language-Invariant Features](https://preview.aclanthology.org/fix-sig-urls/2021.findings-emnlp.227/) (Rao et al., Findings 2021)
ACL