@inproceedings{brandl-hollenstein-2022-every,
title = "Every word counts: A multilingual analysis of individual human alignment with model attention",
author = "Brandl, Stephanie and
Hollenstein, Nora",
editor = "He, Yulan and
Ji, Heng and
Li, Sujian and
Liu, Yang and
Chang, Chua-Hui",
booktitle = "Proceedings of the 2nd Conference of the Asia-Pacific Chapter of the Association for Computational Linguistics and the 12th International Joint Conference on Natural Language Processing (Volume 2: Short Papers)",
month = nov,
year = "2022",
address = "Online only",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.aacl-short.10/",
doi = "10.18653/v1/2022.aacl-short.10",
pages = "72--77",
abstract = "Human fixation patterns have been shown to correlate strongly with Transformer-based attention. Those correlation analyses are usually carried out without taking into account individual differences between participants and are mostly done on monolingual datasets making it difficult to generalise findings. In this paper, we analyse eye-tracking data from speakers of 13 different languages reading both in their native language (L1) and in English as language learners (L2). We find considerable differences between languages but also that individual reading behaviour such as skipping rate, total reading time and vocabulary knowledge (LexTALE) influence the alignment between humans and models to an extent that should be considered in future studies."
}
Markdown (Informal)
[Every word counts: A multilingual analysis of individual human alignment with model attention](https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.aacl-short.10/) (Brandl & Hollenstein, AACL-IJCNLP 2022)
ACL