See information entropy on Wiktionary
{ "head_templates": [ { "args": { "1": "-" }, "expansion": "information entropy (uncountable)", "name": "en-noun" } ], "lang": "English", "lang_code": "en", "pos": "noun", "senses": [ { "categories": [ { "kind": "other", "name": "English entries with incorrect language header", "parents": [ "Entries with incorrect language header", "Entry maintenance" ], "source": "w" }, { "kind": "other", "name": "Entries with translation boxes", "parents": [], "source": "w" }, { "kind": "other", "name": "Pages with 1 entry", "parents": [], "source": "w" }, { "kind": "other", "name": "Pages with entries", "parents": [], "source": "w" }, { "kind": "other", "name": "Terms with Mandarin translations", "parents": [], "source": "w" }, { "kind": "topical", "langcode": "en", "name": "Information theory", "orig": "en:Information theory", "parents": [ "Applied mathematics", "Mathematics", "Formal sciences", "Sciences", "All topics", "Fundamental" ], "source": "w" }, { "kind": "topical", "langcode": "en", "name": "Statistics", "orig": "en:Statistics", "parents": [ "Formal sciences", "Mathematics", "Sciences", "All topics", "Fundamental" ], "source": "w" } ], "glosses": [ "A measure of the uncertainty associated with a random variable; a measure of the average information content one is missing when one does not know the value of the random variable (usually in units such as bits); the amount of information (measured in, say, bits) contained per average instance of a character in a stream of characters." ], "id": "en-information_entropy-en-noun-ikkWH7kB", "links": [ [ "information theory", "information theory" ], [ "measure", "measure" ], [ "uncertainty", "uncertainty" ], [ "random variable", "random variable" ], [ "average", "average" ], [ "information content", "information content" ], [ "miss", "miss" ], [ "value", "value" ], [ "bit", "bit" ] ], "raw_glosses": [ "(information theory) A measure of the uncertainty associated with a random variable; a measure of the average information content one is missing when one does not know the value of the random variable (usually in units such as bits); the amount of information (measured in, say, bits) contained per average instance of a character in a stream of characters." ], "related": [ { "word": "variety" } ], "synonyms": [ { "word": "Shannon entropy" } ], "tags": [ "uncountable" ], "topics": [ "computing", "engineering", "information-theory", "mathematics", "natural-sciences", "physical-sciences", "sciences" ], "translations": [ { "code": "cmn", "lang": "Chinese Mandarin", "roman": "xìnxī shāng", "sense": "Translations", "word": "信息熵" }, { "code": "cmn", "lang": "Chinese Mandarin", "sense": "Translations", "word": "資訊熵" }, { "code": "cmn", "lang": "Chinese Mandarin", "roman": "zīxùn shāng", "sense": "Translations", "word": "资讯熵" } ], "wikipedia": [ "information entropy" ] } ], "word": "information entropy" }
{ "head_templates": [ { "args": { "1": "-" }, "expansion": "information entropy (uncountable)", "name": "en-noun" } ], "lang": "English", "lang_code": "en", "pos": "noun", "related": [ { "word": "variety" } ], "senses": [ { "categories": [ "English entries with incorrect language header", "English lemmas", "English multiword terms", "English nouns", "English uncountable nouns", "Entries with translation boxes", "Pages with 1 entry", "Pages with entries", "Terms with Mandarin translations", "Translation table header lacks gloss", "en:Information theory", "en:Statistics" ], "glosses": [ "A measure of the uncertainty associated with a random variable; a measure of the average information content one is missing when one does not know the value of the random variable (usually in units such as bits); the amount of information (measured in, say, bits) contained per average instance of a character in a stream of characters." ], "links": [ [ "information theory", "information theory" ], [ "measure", "measure" ], [ "uncertainty", "uncertainty" ], [ "random variable", "random variable" ], [ "average", "average" ], [ "information content", "information content" ], [ "miss", "miss" ], [ "value", "value" ], [ "bit", "bit" ] ], "raw_glosses": [ "(information theory) A measure of the uncertainty associated with a random variable; a measure of the average information content one is missing when one does not know the value of the random variable (usually in units such as bits); the amount of information (measured in, say, bits) contained per average instance of a character in a stream of characters." ], "tags": [ "uncountable" ], "topics": [ "computing", "engineering", "information-theory", "mathematics", "natural-sciences", "physical-sciences", "sciences" ], "wikipedia": [ "information entropy" ] } ], "synonyms": [ { "word": "Shannon entropy" } ], "translations": [ { "code": "cmn", "lang": "Chinese Mandarin", "roman": "xìnxī shāng", "sense": "Translations", "word": "信息熵" }, { "code": "cmn", "lang": "Chinese Mandarin", "sense": "Translations", "word": "資訊熵" }, { "code": "cmn", "lang": "Chinese Mandarin", "roman": "zīxùn shāng", "sense": "Translations", "word": "资讯熵" } ], "word": "information entropy" }
Download raw JSONL data for information entropy meaning in All languages combined (2.1kB)
This page is a part of the kaikki.org machine-readable All languages combined dictionary. This dictionary is based on structured data extracted on 2024-12-21 from the enwiktionary dump dated 2024-12-04 using wiktextract (d8cb2f3 and 4e554ae). The data shown on this site has been post-processed and various details (e.g., extra categories) removed, some information disambiguated, and additional data merged from other sources. See the raw data download page for the unprocessed wiktextract data.
If you use this data in academic research, please cite Tatu Ylonen: Wiktextract: Wiktionary as Machine-Readable Structured Data, Proceedings of the 13th Conference on Language Resources and Evaluation (LREC), pp. 1317-1325, Marseille, 20-25 June 2022. Linking to the relevant page(s) under https://kaikki.org would also be greatly appreciated.