See existential risk in All languages combined, or Wiktionary
{ "categories": [ { "_dis": "0 0", "kind": "other", "langcode": "en", "name": "Futurology", "orig": "en:Futurology", "parents": [], "source": "w+disamb" } ], "etymology_templates": [ { "args": { "1": "en", "2": "Q460475", "in": "2002", "nocap": "1" }, "expansion": "coined by philosopher and writer Nick Bostrom in 2002", "name": "coin" } ], "etymology_text": "The \"human extinction\" sense was coined by philosopher and writer Nick Bostrom in 2002.", "forms": [ { "form": "existential risks", "tags": [ "plural" ] } ], "head_templates": [ { "args": { "1": "~" }, "expansion": "existential risk (countable and uncountable, plural existential risks)", "name": "en-noun" } ], "lang": "English", "lang_code": "en", "pos": "noun", "senses": [ { "categories": [], "examples": [ { "ref": "2019 April 18, Gregory Travis, “How the Boeing 737 Max Disaster Looks to a Software Developer”, in IEEE Spectrum:", "text": "In an industry that relies more than anything on the appearance of total control, total safety, these two crashes pose as close to an existential risk as you can get.", "type": "quote" }, { "ref": "2020 March 30, “Tesla Faces Existential Risks”, in Seeking Alpha:", "text": "I believe TSLA faces existential risk based on what is happening in the world today, and that this recent scare and economic recession will only catalyze further share price decline.", "type": "quote" } ], "glosses": [ "A risk which could destroy or permanently damage an entity; a risk to one's existence." ], "id": "en-existential_risk-en-noun-UTvCSzH8", "links": [ [ "risk", "risk" ], [ "existence", "existence" ] ], "tags": [ "countable", "uncountable" ] }, { "categories": [ { "_dis": "21 79", "kind": "other", "name": "English entries with incorrect language header", "parents": [ "Entries with incorrect language header", "Entry maintenance" ], "source": "w+disamb" }, { "_dis": "13 87", "kind": "other", "name": "Pages with 1 entry", "parents": [], "source": "w+disamb" }, { "_dis": "7 93", "kind": "other", "name": "Pages with entries", "parents": [], "source": "w+disamb" } ], "examples": [ { "ref": "2008, Eliezer Yudkowsky, “Cognitive Biases Potentially Affecting Judgment of Global Risks”, in Nick Bostrom, Milan M. Ćirković, editors, Global Catastrophic Risks, New York: Oxford University Press, →ISBN:", "text": "The scenario of humanity going extinct in the next century is a disjunctive event. It could happen as a result of any of the existential risks we already know about—or some other cause which none of us foresaw.", "type": "quote" }, { "ref": "2013 February, Nick Bostrom, “Existential Risk Prevention as Global Priority”, in Global Policy, volume 4, number 1, archived from the original on 2020-09-08:", "text": "But perhaps the strongest reason for judging the total existential risk within the next few centuries to be significant is the extreme magnitude of the values at stake.", "type": "quote" }, { "ref": "2023 May 2, Josh Taylor, Alex Hern, “‘Godfather of AI’ Geoffrey Hinton quits Google and warns over dangers of misinformation”, in The Guardian, →ISSN:", "text": "The man often touted as the godfather of AI has quit Google, citing concerns over the flood of misinformation, the possibility for AI to upend the job market, and the “existential risk” posed by the creation of a true digital intelligence.", "type": "quote" }, { "ref": "2023 July 15, George Monbiot, “With our food systems on the verge of collapse, it’s the plutocrats v life on Earth”, in The Guardian, →ISSN:", "text": "So why isn’t this all over the front pages? Why, when governments know we’re facing existential risk, do they fail to act?", "type": "quote" }, { "ref": "2023 November 20, Karen Hao, Charlie Warzel, “Inside the Chaos at OpenAI”, in The Atlantic:", "text": "Altman’s dismissal by OpenAI’s board on Friday was the culmination of a power struggle between the company’s two ideological extremes—one group born from Silicon Valley techno-optimism, energized by rapid commercialization; the other steeped in fears that AI represents an existential risk to humanity and must be controlled with extreme caution.", "type": "quote" } ], "glosses": [ "A hypothetical future event which could cause human extinction or permanently and severely curtail humanity's potential." ], "id": "en-existential_risk-en-noun-CSkIY73g", "links": [ [ "extinction", "extinction" ] ], "raw_glosses": [ "(specifically) A hypothetical future event which could cause human extinction or permanently and severely curtail humanity's potential." ], "tags": [ "countable", "specifically", "uncountable" ] } ], "wikipedia": [ "global catastrophic risk" ], "word": "existential risk" }
{ "categories": [ "English coinages", "English countable nouns", "English entries with incorrect language header", "English lemmas", "English multiword terms", "English nouns", "English terms coined by Nick Bostrom", "English uncountable nouns", "Pages with 1 entry", "Pages with entries", "en:Futurology" ], "etymology_templates": [ { "args": { "1": "en", "2": "Q460475", "in": "2002", "nocap": "1" }, "expansion": "coined by philosopher and writer Nick Bostrom in 2002", "name": "coin" } ], "etymology_text": "The \"human extinction\" sense was coined by philosopher and writer Nick Bostrom in 2002.", "forms": [ { "form": "existential risks", "tags": [ "plural" ] } ], "head_templates": [ { "args": { "1": "~" }, "expansion": "existential risk (countable and uncountable, plural existential risks)", "name": "en-noun" } ], "lang": "English", "lang_code": "en", "pos": "noun", "senses": [ { "categories": [ "English terms with quotations" ], "examples": [ { "ref": "2019 April 18, Gregory Travis, “How the Boeing 737 Max Disaster Looks to a Software Developer”, in IEEE Spectrum:", "text": "In an industry that relies more than anything on the appearance of total control, total safety, these two crashes pose as close to an existential risk as you can get.", "type": "quote" }, { "ref": "2020 March 30, “Tesla Faces Existential Risks”, in Seeking Alpha:", "text": "I believe TSLA faces existential risk based on what is happening in the world today, and that this recent scare and economic recession will only catalyze further share price decline.", "type": "quote" } ], "glosses": [ "A risk which could destroy or permanently damage an entity; a risk to one's existence." ], "links": [ [ "risk", "risk" ], [ "existence", "existence" ] ], "tags": [ "countable", "uncountable" ] }, { "categories": [ "English terms with quotations" ], "examples": [ { "ref": "2008, Eliezer Yudkowsky, “Cognitive Biases Potentially Affecting Judgment of Global Risks”, in Nick Bostrom, Milan M. Ćirković, editors, Global Catastrophic Risks, New York: Oxford University Press, →ISBN:", "text": "The scenario of humanity going extinct in the next century is a disjunctive event. It could happen as a result of any of the existential risks we already know about—or some other cause which none of us foresaw.", "type": "quote" }, { "ref": "2013 February, Nick Bostrom, “Existential Risk Prevention as Global Priority”, in Global Policy, volume 4, number 1, archived from the original on 2020-09-08:", "text": "But perhaps the strongest reason for judging the total existential risk within the next few centuries to be significant is the extreme magnitude of the values at stake.", "type": "quote" }, { "ref": "2023 May 2, Josh Taylor, Alex Hern, “‘Godfather of AI’ Geoffrey Hinton quits Google and warns over dangers of misinformation”, in The Guardian, →ISSN:", "text": "The man often touted as the godfather of AI has quit Google, citing concerns over the flood of misinformation, the possibility for AI to upend the job market, and the “existential risk” posed by the creation of a true digital intelligence.", "type": "quote" }, { "ref": "2023 July 15, George Monbiot, “With our food systems on the verge of collapse, it’s the plutocrats v life on Earth”, in The Guardian, →ISSN:", "text": "So why isn’t this all over the front pages? Why, when governments know we’re facing existential risk, do they fail to act?", "type": "quote" }, { "ref": "2023 November 20, Karen Hao, Charlie Warzel, “Inside the Chaos at OpenAI”, in The Atlantic:", "text": "Altman’s dismissal by OpenAI’s board on Friday was the culmination of a power struggle between the company’s two ideological extremes—one group born from Silicon Valley techno-optimism, energized by rapid commercialization; the other steeped in fears that AI represents an existential risk to humanity and must be controlled with extreme caution.", "type": "quote" } ], "glosses": [ "A hypothetical future event which could cause human extinction or permanently and severely curtail humanity's potential." ], "links": [ [ "extinction", "extinction" ] ], "raw_glosses": [ "(specifically) A hypothetical future event which could cause human extinction or permanently and severely curtail humanity's potential." ], "tags": [ "countable", "specifically", "uncountable" ] } ], "wikipedia": [ "global catastrophic risk" ], "word": "existential risk" }
Download raw JSONL data for existential risk meaning in English (4.3kB)
This page is a part of the kaikki.org machine-readable English dictionary. This dictionary is based on structured data extracted on 2024-12-21 from the enwiktionary dump dated 2024-12-04 using wiktextract (d8cb2f3 and 4e554ae). The data shown on this site has been post-processed and various details (e.g., extra categories) removed, some information disambiguated, and additional data merged from other sources. See the raw data download page for the unprocessed wiktextract data.
If you use this data in academic research, please cite Tatu Ylonen: Wiktextract: Wiktionary as Machine-Readable Structured Data, Proceedings of the 13th Conference on Language Resources and Evaluation (LREC), pp. 1317-1325, Marseille, 20-25 June 2022. Linking to the relevant page(s) under https://kaikki.org would also be greatly appreciated.