See negentropy on Wiktionary
{ "antonyms": [ { "word": "entropy" } ], "derived": [ { "_dis1": "0 0", "word": "negentropic" } ], "etymology_templates": [ { "args": { "1": "en", "2": "nega-", "3": "entropy" }, "expansion": "nega- + entropy", "name": "af" }, { "args": { "1": "en", "2": "negative", "3": "entropy", "nocap": "1" }, "expansion": "blend of negative + entropy", "name": "blend" } ], "etymology_text": "From nega- + entropy.\nA blend of negative + entropy, coined by the French physicist Léon Brillouin (1889–1969). The term negative entropy was introduced by the Austrian physicist Erwin Schrödinger (1887–1961) in his book What is Life? (1944, based on lectures delivered in February 1943).", "forms": [ { "form": "negentropies", "tags": [ "plural" ] } ], "head_templates": [ { "args": { "1": "~" }, "expansion": "negentropy (countable and uncountable, plural negentropies)", "name": "en-noun" } ], "hyphenation": [ "neg‧en‧tro‧py" ], "lang": "English", "lang_code": "en", "pos": "noun", "senses": [ { "categories": [ { "kind": "topical", "langcode": "en", "name": "Thermodynamics", "orig": "en:Thermodynamics", "parents": [ "Physics", "Sciences", "All topics", "Fundamental" ], "source": "w" }, { "_dis": "50 50", "kind": "other", "name": "English blends", "parents": [], "source": "w+disamb" }, { "_dis": "49 51", "kind": "other", "name": "English entries with incorrect language header", "parents": [ "Entries with incorrect language header", "Entry maintenance" ], "source": "w+disamb" }, { "_dis": "50 50", "kind": "other", "name": "English terms prefixed with nega-", "parents": [], "source": "w+disamb" }, { "_dis": "49 51", "kind": "other", "name": "Pages with 1 entry", "parents": [], "source": "w+disamb" }, { "_dis": "50 50", "kind": "other", "name": "Pages with entries", "parents": [], "source": "w+disamb" }, { "_dis": "58 42", "kind": "topical", "langcode": "en", "name": "Physical quantities", "orig": "en:Physical quantities", "parents": [ "Physics", "Sciences", "All topics", "Fundamental" ], "source": "w+disamb" } ], "examples": [ { "ref": "1956 April, Brillouin, Léon, “Summary of Thermodynamics”, in Science and Information Theory, New York, N.Y.: Academic Press, →OCLC, section 2 (The Two Principles of Thermodynamics; Entropy and Negentropy), pages 116–117:", "text": "An isolated system contains negentropy if it reveals a possibility for doing mechanical or electrical work: […] A tank of compressed gas in a room at atmospheric pressure, a vacuum tank in a similar room, a charged battery, any device that can produce high grade energy (mechanical work) or be degraded by some irreversible process (thermal conduction, electrical resistivity, friction, viscosity) is a source of negentropy. […] If a living organism needs food, it is only for the negentropy it can get from it, and which is needed to make up for the losses due to mechanical work done, or simple degradation processes in the living system. Energy contained in food does not really matter, since energy is conserved and never gets lost, but negentropy is the important factor.", "type": "quote" }, { "ref": "1977, Boris G. Kuznetsov, Philosophy of Optimism, Moscow: Progress Publishers, page 35:", "text": "The same magnitude, but with a minus sign, i.e., the measure of macroscopic ordering, the measure of non-uniformity in the distribution of heat, the measure of the differences in temperature—temperature gradients—is called negentropy.", "type": "quote" }, { "ref": "2013, John E. Mayfield, “The Evolution of Complexity”, in The Engine of Complexity: Evolution as Computation, New York, N.Y.: Columbia University Press, →ISBN, page 277:", "text": "Negentropy is the difference between the entropy of a system and the entropy of the same system at equilibrium (i.e., when it has its maximum possible entropy). Since entropy measures the amount of disorder, negentropy measures order.", "type": "quote" } ], "glosses": [ "The difference between the entropy of a system and the maximum possible entropy of the same system." ], "id": "en-negentropy-en-noun-nfQ1Sp~h", "links": [ [ "thermodynamics", "thermodynamics" ], [ "difference", "difference" ], [ "entropy", "entropy" ], [ "system", "system" ], [ "maximum", "maximum" ] ], "raw_glosses": [ "(thermodynamics) The difference between the entropy of a system and the maximum possible entropy of the same system." ], "tags": [ "countable", "uncountable" ], "topics": [ "natural-sciences", "physical-sciences", "physics", "thermodynamics" ] }, { "categories": [ { "kind": "topical", "langcode": "en", "name": "Information theory", "orig": "en:Information theory", "parents": [ "Applied mathematics", "Mathematics", "Formal sciences", "Sciences", "All topics", "Fundamental" ], "source": "w" }, { "_dis": "50 50", "kind": "other", "name": "English blends", "parents": [], "source": "w+disamb" }, { "_dis": "49 51", "kind": "other", "name": "English entries with incorrect language header", "parents": [ "Entries with incorrect language header", "Entry maintenance" ], "source": "w+disamb" }, { "_dis": "50 50", "kind": "other", "name": "English terms prefixed with nega-", "parents": [], "source": "w+disamb" }, { "_dis": "49 51", "kind": "other", "name": "Pages with 1 entry", "parents": [], "source": "w+disamb" }, { "_dis": "50 50", "kind": "other", "name": "Pages with entries", "parents": [], "source": "w+disamb" } ], "examples": [ { "ref": "1990, Aleksandr I. Zotin, “Thermodynamics of Nonequilibrium Processes”, in Thermodynamic Bases of Biological Processes: Physiological Reactions and Adaptations, Berlin, New York, N.Y.: Walter de Gruyter, →ISBN, section 1.3.2 (Maxwell’s Demon and Negentropy Effects), page 46:", "text": "Brillouin (1956) made an attempt to link thermodynamic notions to the notions of information theory. To this end he employs the idea of bound information or information consistent with some microstates of a physical system. […] Brillouin has shown that bound information is equal to entropy decrease or negentropy increase. This is a so called negentropy principle of information, according to which negentropy can be turned into information and vice versa as follows from (1.103).", "type": "quote" } ], "glosses": [ "The difference between the entropy of a probability distribution and the maximum possible entropy of the same probability distribution." ], "id": "en-negentropy-en-noun-IGSsfaon", "links": [ [ "information theory", "information theory" ], [ "probability distribution", "probability distribution" ] ], "raw_glosses": [ "(information theory) The difference between the entropy of a probability distribution and the maximum possible entropy of the same probability distribution." ], "tags": [ "countable", "uncountable" ], "topics": [ "computing", "engineering", "information-theory", "mathematics", "natural-sciences", "physical-sciences", "sciences" ] } ], "sounds": [ { "ipa": "/ˌnɛɡˈɛntɹɒpi/", "tags": [ "Received-Pronunciation" ] }, { "ipa": "/ˌnɛɡˈɛntɹəpi/", "tags": [ "General-American" ] } ], "synonyms": [ { "_dis1": "0 0", "word": "centropy" }, { "_dis1": "0 0", "word": "disentropy" }, { "_dis1": "0 0", "word": "entaxy" }, { "_dis1": "0 0", "word": "extropy" } ], "wikipedia": [ "Erwin Schrödinger", "Léon Brillouin", "What is Life?" ], "word": "negentropy" }
{ "antonyms": [ { "word": "entropy" } ], "categories": [ "English blends", "English countable nouns", "English entries with incorrect language header", "English lemmas", "English nouns", "English terms prefixed with nega-", "English uncountable nouns", "Pages with 1 entry", "Pages with entries", "en:Physical quantities" ], "derived": [ { "word": "negentropic" } ], "etymology_templates": [ { "args": { "1": "en", "2": "nega-", "3": "entropy" }, "expansion": "nega- + entropy", "name": "af" }, { "args": { "1": "en", "2": "negative", "3": "entropy", "nocap": "1" }, "expansion": "blend of negative + entropy", "name": "blend" } ], "etymology_text": "From nega- + entropy.\nA blend of negative + entropy, coined by the French physicist Léon Brillouin (1889–1969). The term negative entropy was introduced by the Austrian physicist Erwin Schrödinger (1887–1961) in his book What is Life? (1944, based on lectures delivered in February 1943).", "forms": [ { "form": "negentropies", "tags": [ "plural" ] } ], "head_templates": [ { "args": { "1": "~" }, "expansion": "negentropy (countable and uncountable, plural negentropies)", "name": "en-noun" } ], "hyphenation": [ "neg‧en‧tro‧py" ], "lang": "English", "lang_code": "en", "pos": "noun", "senses": [ { "categories": [ "English terms with quotations", "en:Thermodynamics" ], "examples": [ { "ref": "1956 April, Brillouin, Léon, “Summary of Thermodynamics”, in Science and Information Theory, New York, N.Y.: Academic Press, →OCLC, section 2 (The Two Principles of Thermodynamics; Entropy and Negentropy), pages 116–117:", "text": "An isolated system contains negentropy if it reveals a possibility for doing mechanical or electrical work: […] A tank of compressed gas in a room at atmospheric pressure, a vacuum tank in a similar room, a charged battery, any device that can produce high grade energy (mechanical work) or be degraded by some irreversible process (thermal conduction, electrical resistivity, friction, viscosity) is a source of negentropy. […] If a living organism needs food, it is only for the negentropy it can get from it, and which is needed to make up for the losses due to mechanical work done, or simple degradation processes in the living system. Energy contained in food does not really matter, since energy is conserved and never gets lost, but negentropy is the important factor.", "type": "quote" }, { "ref": "1977, Boris G. Kuznetsov, Philosophy of Optimism, Moscow: Progress Publishers, page 35:", "text": "The same magnitude, but with a minus sign, i.e., the measure of macroscopic ordering, the measure of non-uniformity in the distribution of heat, the measure of the differences in temperature—temperature gradients—is called negentropy.", "type": "quote" }, { "ref": "2013, John E. Mayfield, “The Evolution of Complexity”, in The Engine of Complexity: Evolution as Computation, New York, N.Y.: Columbia University Press, →ISBN, page 277:", "text": "Negentropy is the difference between the entropy of a system and the entropy of the same system at equilibrium (i.e., when it has its maximum possible entropy). Since entropy measures the amount of disorder, negentropy measures order.", "type": "quote" } ], "glosses": [ "The difference between the entropy of a system and the maximum possible entropy of the same system." ], "links": [ [ "thermodynamics", "thermodynamics" ], [ "difference", "difference" ], [ "entropy", "entropy" ], [ "system", "system" ], [ "maximum", "maximum" ] ], "raw_glosses": [ "(thermodynamics) The difference between the entropy of a system and the maximum possible entropy of the same system." ], "tags": [ "countable", "uncountable" ], "topics": [ "natural-sciences", "physical-sciences", "physics", "thermodynamics" ] }, { "categories": [ "English terms with quotations", "en:Information theory" ], "examples": [ { "ref": "1990, Aleksandr I. Zotin, “Thermodynamics of Nonequilibrium Processes”, in Thermodynamic Bases of Biological Processes: Physiological Reactions and Adaptations, Berlin, New York, N.Y.: Walter de Gruyter, →ISBN, section 1.3.2 (Maxwell’s Demon and Negentropy Effects), page 46:", "text": "Brillouin (1956) made an attempt to link thermodynamic notions to the notions of information theory. To this end he employs the idea of bound information or information consistent with some microstates of a physical system. […] Brillouin has shown that bound information is equal to entropy decrease or negentropy increase. This is a so called negentropy principle of information, according to which negentropy can be turned into information and vice versa as follows from (1.103).", "type": "quote" } ], "glosses": [ "The difference between the entropy of a probability distribution and the maximum possible entropy of the same probability distribution." ], "links": [ [ "information theory", "information theory" ], [ "probability distribution", "probability distribution" ] ], "raw_glosses": [ "(information theory) The difference between the entropy of a probability distribution and the maximum possible entropy of the same probability distribution." ], "tags": [ "countable", "uncountable" ], "topics": [ "computing", "engineering", "information-theory", "mathematics", "natural-sciences", "physical-sciences", "sciences" ] } ], "sounds": [ { "ipa": "/ˌnɛɡˈɛntɹɒpi/", "tags": [ "Received-Pronunciation" ] }, { "ipa": "/ˌnɛɡˈɛntɹəpi/", "tags": [ "General-American" ] } ], "synonyms": [ { "word": "centropy" }, { "word": "disentropy" }, { "word": "entaxy" }, { "word": "extropy" } ], "wikipedia": [ "Erwin Schrödinger", "Léon Brillouin", "What is Life?" ], "word": "negentropy" }
Download raw JSONL data for negentropy meaning in All languages combined (5.4kB)
This page is a part of the kaikki.org machine-readable All languages combined dictionary. This dictionary is based on structured data extracted on 2024-11-06 from the enwiktionary dump dated 2024-10-02 using wiktextract (fbeafe8 and 7f03c9b). The data shown on this site has been post-processed and various details (e.g., extra categories) removed, some information disambiguated, and additional data merged from other sources. See the raw data download page for the unprocessed wiktextract data.
If you use this data in academic research, please cite Tatu Ylonen: Wiktextract: Wiktionary as Machine-Readable Structured Data, Proceedings of the 13th Conference on Language Resources and Evaluation (LREC), pp. 1317-1325, Marseille, 20-25 June 2022. Linking to the relevant page(s) under https://kaikki.org would also be greatly appreciated.