@inproceedings{felice-buttery-2019-entropy,
title = "Entropy as a Proxy for Gap Complexity in Open Cloze Tests",
author = "Felice, Mariano and
Buttery, Paula",
editor = "Mitkov, Ruslan and
Angelova, Galia",
booktitle = "Proceedings of the International Conference on Recent Advances in Natural Language Processing (RANLP 2019)",
month = sep,
year = "2019",
address = "Varna, Bulgaria",
publisher = "INCOMA Ltd.",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/R19-1037/",
doi = "10.26615/978-954-452-056-4_037",
pages = "323--327",
abstract = "This paper presents a pilot study of entropy as a measure of gap complexity in open cloze tests aimed at learners of English. Entropy is used to quantify the information content in each gap, which can be used to estimate complexity. Our study shows that average gap entropy correlates positively with proficiency levels while individual gap entropy can capture contextual complexity. To the best of our knowledge, this is the first unsupervised information-theoretical approach to evaluating the quality of cloze tests."
}
Markdown (Informal)
[Entropy as a Proxy for Gap Complexity in Open Cloze Tests](https://preview.aclanthology.org/add-emnlp-2024-awards/R19-1037/) (Felice & Buttery, RANLP 2019)
ACL