See cross-attention on Wiktionary
{ "etymology_templates": [ { "args": { "1": "en", "2": "cross-", "3": "attention" }, "expansion": "cross- + attention", "name": "af" } ], "etymology_text": "From cross- + attention.", "forms": [ { "form": "cross attention", "tags": [ "alternative" ] } ], "head_templates": [ { "args": { "1": "-" }, "expansion": "cross-attention (uncountable)", "name": "en-noun" } ], "lang": "English", "lang_code": "en", "pos": "noun", "senses": [ { "antonyms": [ { "word": "self-attention" } ], "categories": [ { "kind": "other", "name": "English entries with incorrect language header", "parents": [], "source": "w" }, { "kind": "other", "name": "English terms prefixed with cross-", "parents": [], "source": "w" }, { "kind": "other", "name": "Pages with 1 entry", "parents": [], "source": "w" }, { "kind": "other", "name": "Pages with entries", "parents": [], "source": "w" }, { "kind": "other", "langcode": "en", "name": "Machine learning", "orig": "en:Machine learning", "parents": [], "source": "w" } ], "examples": [ { "bold_text_offsets": [ [ 4, 19 ], [ 88, 103 ] ], "ref": "2024, Benoit Liquet, Sarat Moka, Yoni Nazarathy, Mathematical Engineering of Deep Learning, CRC Press, page 286:", "text": "The cross attention layer inside each transformer decoder block is in fact a multi-head cross attention layer […]", "type": "quote" } ], "glosses": [ "A form of attention (machine learning method) where two different input sequences are compared, i.e. the keys and queries differ." ], "id": "en-cross-attention-en-noun-f8FW3H07", "links": [ [ "machine learning", "machine learning" ], [ "attention", "attention" ], [ "input", "input" ], [ "sequence", "sequence" ], [ "key", "key" ], [ "queries", "query" ] ], "qualifier": "machine learning", "raw_glosses": [ "(machine learning) A form of attention (machine learning method) where two different input sequences are compared, i.e. the keys and queries differ." ], "tags": [ "uncountable" ] } ], "word": "cross-attention" }
{ "etymology_templates": [ { "args": { "1": "en", "2": "cross-", "3": "attention" }, "expansion": "cross- + attention", "name": "af" } ], "etymology_text": "From cross- + attention.", "forms": [ { "form": "cross attention", "tags": [ "alternative" ] } ], "head_templates": [ { "args": { "1": "-" }, "expansion": "cross-attention (uncountable)", "name": "en-noun" } ], "lang": "English", "lang_code": "en", "pos": "noun", "senses": [ { "antonyms": [ { "word": "self-attention" } ], "categories": [ "English entries with incorrect language header", "English lemmas", "English multiword terms", "English nouns", "English terms prefixed with cross-", "English terms with quotations", "English uncountable nouns", "Pages with 1 entry", "Pages with entries", "en:Machine learning" ], "examples": [ { "bold_text_offsets": [ [ 4, 19 ], [ 88, 103 ] ], "ref": "2024, Benoit Liquet, Sarat Moka, Yoni Nazarathy, Mathematical Engineering of Deep Learning, CRC Press, page 286:", "text": "The cross attention layer inside each transformer decoder block is in fact a multi-head cross attention layer […]", "type": "quote" } ], "glosses": [ "A form of attention (machine learning method) where two different input sequences are compared, i.e. the keys and queries differ." ], "links": [ [ "machine learning", "machine learning" ], [ "attention", "attention" ], [ "input", "input" ], [ "sequence", "sequence" ], [ "key", "key" ], [ "queries", "query" ] ], "qualifier": "machine learning", "raw_glosses": [ "(machine learning) A form of attention (machine learning method) where two different input sequences are compared, i.e. the keys and queries differ." ], "tags": [ "uncountable" ] } ], "word": "cross-attention" }
Download raw JSONL data for cross-attention meaning in All languages combined (1.6kB)
This page is a part of the kaikki.org machine-readable All languages combined dictionary. This dictionary is based on structured data extracted on 2025-05-19 from the enwiktionary dump dated 2025-05-01 using wiktextract (c3cc510 and 1d3fdbf). The data shown on this site has been post-processed and various details (e.g., extra categories) removed, some information disambiguated, and additional data merged from other sources. See the raw data download page for the unprocessed wiktextract data.
If you use this data in academic research, please cite Tatu Ylonen: Wiktextract: Wiktionary as Machine-Readable Structured Data, Proceedings of the 13th Conference on Language Resources and Evaluation (LREC), pp. 1317-1325, Marseille, 20-25 June 2022. Linking to the relevant page(s) under https://kaikki.org would also be greatly appreciated.