@inproceedings{rita-etal-2020-lazimpa,
title = "{\textquotedblleft}{L}az{I}mpa{\textquotedblright}: Lazy and Impatient neural agents learn to communicate efficiently",
author = "Rita, Mathieu and
Chaabouni, Rahma and
Dupoux, Emmanuel",
editor = "Fern{\'a}ndez, Raquel and
Linzen, Tal",
booktitle = "Proceedings of the 24th Conference on Computational Natural Language Learning",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2020.conll-1.26/",
doi = "10.18653/v1/2020.conll-1.26",
pages = "335--343",
abstract = "Previous work has shown that artificial neural agents naturally develop surprisingly non-efficient codes. This is illustrated by the fact that in a referential game involving a speaker and a listener neural networks optimizing accurate transmission over a discrete channel, the emergent messages fail to achieve an optimal length. Furthermore, frequent messages tend to be longer than infrequent ones, a pattern contrary to the Zipf Law of Abbreviation (ZLA) observed in all natural languages. Here, we show that near-optimal and ZLA-compatible messages can emerge, but only if both the speaker and the listener are modified. We hence introduce a new communication system, {\textquotedblleft}LazImpa{\textquotedblright}, where the speaker is made increasingly lazy, i.e., avoids long messages, and the listener impatient, i.e., seeks to guess the intended content as soon as possible."
}
Markdown (Informal)
[“LazImpa”: Lazy and Impatient neural agents learn to communicate efficiently](https://preview.aclanthology.org/add-emnlp-2024-awards/2020.conll-1.26/) (Rita et al., CoNLL 2020)
ACL