See MoE in All languages combined, or Wiktionary
{ "head_templates": [ { "args": { "1": "?" }, "expansion": "MoE", "name": "en-noun" } ], "lang": "English", "lang_code": "en", "pos": "noun", "senses": [ { "alt_of": [ { "word": "Ministry of Education" } ], "categories": [ { "kind": "other", "name": "English links with redundant alt parameters", "parents": [ "Links with redundant alt parameters", "Entry maintenance" ], "source": "w" }, { "kind": "other", "name": "English links with redundant wikilinks", "parents": [ "Links with redundant wikilinks", "Entry maintenance" ], "source": "w" } ], "glosses": [ "Initialism of Ministry of Education." ], "id": "en-MoE-en-noun-Bz2Sw6eL", "links": [ [ "Ministry of Education", "w:Ministry of Education" ] ], "tags": [ "abbreviation", "alt-of", "initialism" ] }, { "alt_of": [ { "word": "mixture of experts" } ], "categories": [ { "kind": "topical", "langcode": "en", "name": "Machine learning", "orig": "en:Machine learning", "parents": [ "Artificial intelligence", "Computer science", "Cybernetics", "Computing", "Sciences", "Applied mathematics", "Systems theory", "Technology", "All topics", "Mathematics", "Systems", "Fundamental", "Formal sciences", "Interdisciplinary fields", "Society" ], "source": "w" }, { "_dis": "32 49 19", "kind": "other", "name": "English entries with incorrect language header", "parents": [ "Entries with incorrect language header", "Entry maintenance" ], "source": "w+disamb" }, { "_dis": "13 50 37", "kind": "other", "name": "Pages with 1 entry", "parents": [], "source": "w+disamb" }, { "_dis": "9 79 11", "kind": "other", "name": "Pages with entries", "parents": [], "source": "w+disamb" } ], "examples": [ { "ref": "2023, Gerhard Paaß, Sven Giesselbach, Foundation Models for Natural Language Processing: Pre-trained Language Models Integrating Media, Springer Nature, →ISBN, page 130:", "text": "GLaM [51] is an autoregressive mixture-of-experts (MoE) model with up to 1200B parameters.", "type": "quote" } ], "glosses": [ "Initialism of mixture of experts." ], "id": "en-MoE-en-noun-xnwZWyrP", "links": [ [ "machine learning", "machine learning" ], [ "mixture of experts", "w:mixture of experts" ] ], "qualifier": "machine learning", "raw_glosses": [ "(machine learning) Initialism of mixture of experts." ], "tags": [ "abbreviation", "alt-of", "initialism" ] }, { "alt_of": [ { "word": "margin of error" } ], "categories": [ { "kind": "topical", "langcode": "en", "name": "Statistics", "orig": "en:Statistics", "parents": [ "Formal sciences", "Mathematics", "Sciences", "All topics", "Fundamental" ], "source": "w" } ], "glosses": [ "Initialism of margin of error." ], "id": "en-MoE-en-noun-s1e1OXDO", "links": [ [ "statistics", "statistics" ], [ "margin of error", "margin of error#English" ] ], "raw_glosses": [ "(statistics) Initialism of margin of error." ], "tags": [ "abbreviation", "alt-of", "initialism" ], "topics": [ "mathematics", "sciences", "statistics" ] } ], "synonyms": [ { "_dis1": "42 9 49", "word": "MOE" } ], "word": "MoE" }
{ "categories": [ "English countable nouns", "English entries with incorrect language header", "English lemmas", "English nouns", "English nouns with unknown or uncertain plurals", "Pages with 1 entry", "Pages with entries" ], "head_templates": [ { "args": { "1": "?" }, "expansion": "MoE", "name": "en-noun" } ], "lang": "English", "lang_code": "en", "pos": "noun", "senses": [ { "alt_of": [ { "word": "Ministry of Education" } ], "categories": [ "English initialisms", "English links with redundant alt parameters", "English links with redundant wikilinks" ], "glosses": [ "Initialism of Ministry of Education." ], "links": [ [ "Ministry of Education", "w:Ministry of Education" ] ], "tags": [ "abbreviation", "alt-of", "initialism" ] }, { "alt_of": [ { "word": "mixture of experts" } ], "categories": [ "English initialisms", "English terms with quotations", "en:Machine learning" ], "examples": [ { "ref": "2023, Gerhard Paaß, Sven Giesselbach, Foundation Models for Natural Language Processing: Pre-trained Language Models Integrating Media, Springer Nature, →ISBN, page 130:", "text": "GLaM [51] is an autoregressive mixture-of-experts (MoE) model with up to 1200B parameters.", "type": "quote" } ], "glosses": [ "Initialism of mixture of experts." ], "links": [ [ "machine learning", "machine learning" ], [ "mixture of experts", "w:mixture of experts" ] ], "qualifier": "machine learning", "raw_glosses": [ "(machine learning) Initialism of mixture of experts." ], "tags": [ "abbreviation", "alt-of", "initialism" ] }, { "alt_of": [ { "word": "margin of error" } ], "categories": [ "English initialisms", "en:Statistics" ], "glosses": [ "Initialism of margin of error." ], "links": [ [ "statistics", "statistics" ], [ "margin of error", "margin of error#English" ] ], "raw_glosses": [ "(statistics) Initialism of margin of error." ], "tags": [ "abbreviation", "alt-of", "initialism" ], "topics": [ "mathematics", "sciences", "statistics" ] } ], "synonyms": [ { "word": "MOE" } ], "word": "MoE" }
Download raw JSONL data for MoE meaning in English (1.9kB)
This page is a part of the kaikki.org machine-readable English dictionary. This dictionary is based on structured data extracted on 2025-02-17 from the enwiktionary dump dated 2025-02-02 using wiktextract (ca09fec and c40eb85). The data shown on this site has been post-processed and various details (e.g., extra categories) removed, some information disambiguated, and additional data merged from other sources. See the raw data download page for the unprocessed wiktextract data.
If you use this data in academic research, please cite Tatu Ylonen: Wiktextract: Wiktionary as Machine-Readable Structured Data, Proceedings of the 13th Conference on Language Resources and Evaluation (LREC), pp. 1317-1325, Marseille, 20-25 June 2022. Linking to the relevant page(s) under https://kaikki.org would also be greatly appreciated.