@inproceedings{jin-kann-2017-exploring,
title = "Exploring Cross-Lingual Transfer of Morphological Knowledge In Sequence-to-Sequence Models",
author = "Jin, Huiming and
Kann, Katharina",
editor = "Faruqui, Manaal and
Schuetze, Hinrich and
Trancoso, Isabel and
Yaghoobzadeh, Yadollah",
booktitle = "Proceedings of the First Workshop on Subword and Character Level Models in {NLP}",
month = sep,
year = "2017",
address = "Copenhagen, Denmark",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/W17-4110/",
doi = "10.18653/v1/W17-4110",
pages = "70--75",
abstract = "Multi-task training is an effective method to mitigate the data sparsity problem. It has recently been applied for cross-lingual transfer learning for paradigm completion{---}the task of producing inflected forms of lemmata{---}with sequence-to-sequence networks. However, it is still vague how the model transfers knowledge across languages, as well as if and which information is shared. To investigate this, we propose a set of data-dependent experiments using an existing encoder-decoder recurrent neural network for the task. Our results show that indeed the performance gains surpass a pure regularization effect and that knowledge about language and morphology can be transferred."
}
Markdown (Informal)
[Exploring Cross-Lingual Transfer of Morphological Knowledge In Sequence-to-Sequence Models](https://preview.aclanthology.org/add-emnlp-2024-awards/W17-4110/) (Jin & Kann, SCLeM 2017)
ACL