diff --git a/demo/dask-scl-poc.ipynb b/demo/dask-scl-poc.ipynb
new file mode 100644
index 00000000..a52412e3
--- /dev/null
+++ b/demo/dask-scl-poc.ipynb
@@ -0,0 +1,8698 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "MJUe",
+ "metadata": {},
+ "source": [
+ "# Semantique with Dask:\n",
+ "# Sentinel-2 Scene Classification Layer PoC\n",
+ "\n",
+ "Simple cloud & snow time-series and filtering. Based on other semantique demo recipes."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "id": "vblA",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "application/javascript": [
+ "(function(root) {\n",
+ " function now() {\n",
+ " return new Date();\n",
+ " }\n",
+ "\n",
+ " const force = true;\n",
+ " const py_version = '3.8.0'.replace('rc', '-rc.').replace('.dev', '-dev.');\n",
+ " const reloading = false;\n",
+ " const Bokeh = root.Bokeh;\n",
+ "\n",
+ " // Set a timeout for this load but only if we are not already initializing\n",
+ " if (typeof (root._bokeh_timeout) === \"undefined\" || (force || !root._bokeh_is_initializing)) {\n",
+ " root._bokeh_timeout = Date.now() + 5000;\n",
+ " root._bokeh_failed_load = false;\n",
+ " }\n",
+ "\n",
+ " function run_callbacks() {\n",
+ " try {\n",
+ " root._bokeh_onload_callbacks.forEach(function(callback) {\n",
+ " if (callback != null)\n",
+ " callback();\n",
+ " });\n",
+ " } finally {\n",
+ " delete root._bokeh_onload_callbacks;\n",
+ " }\n",
+ " console.debug(\"Bokeh: all callbacks have finished\");\n",
+ " }\n",
+ "\n",
+ " function load_libs(css_urls, js_urls, js_modules, js_exports, callback) {\n",
+ " if (css_urls == null) css_urls = [];\n",
+ " if (js_urls == null) js_urls = [];\n",
+ " if (js_modules == null) js_modules = [];\n",
+ " if (js_exports == null) js_exports = {};\n",
+ "\n",
+ " root._bokeh_onload_callbacks.push(callback);\n",
+ "\n",
+ " if (root._bokeh_is_loading > 0) {\n",
+ " // Don't load bokeh if it is still initializing\n",
+ " console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n",
+ " return null;\n",
+ " } else if (js_urls.length === 0 && js_modules.length === 0 && Object.keys(js_exports).length === 0) {\n",
+ " // There is nothing to load\n",
+ " run_callbacks();\n",
+ " return null;\n",
+ " }\n",
+ "\n",
+ " function on_load() {\n",
+ " root._bokeh_is_loading--;\n",
+ " if (root._bokeh_is_loading === 0) {\n",
+ " console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n",
+ " run_callbacks()\n",
+ " }\n",
+ " }\n",
+ " window._bokeh_on_load = on_load\n",
+ "\n",
+ " function on_error(e) {\n",
+ " const src_el = e.srcElement\n",
+ " console.error(\"failed to load \" + (src_el.href || src_el.src));\n",
+ " }\n",
+ "\n",
+ " const skip = [];\n",
+ " if (window.requirejs) {\n",
+ " window.requirejs.config({'packages': {}, 'paths': {}, 'shim': {}});\n",
+ " root._bokeh_is_loading = css_urls.length + 0;\n",
+ " } else {\n",
+ " root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length + Object.keys(js_exports).length;\n",
+ " }\n",
+ "\n",
+ " const existing_stylesheets = []\n",
+ " const links = document.getElementsByTagName('link')\n",
+ " for (let i = 0; i < links.length; i++) {\n",
+ " const link = links[i]\n",
+ " if (link.href != null) {\n",
+ " existing_stylesheets.push(link.href)\n",
+ " }\n",
+ " }\n",
+ " for (let i = 0; i < css_urls.length; i++) {\n",
+ " const url = css_urls[i];\n",
+ " const escaped = encodeURI(url)\n",
+ " if (existing_stylesheets.indexOf(escaped) !== -1) {\n",
+ " on_load()\n",
+ " continue;\n",
+ " }\n",
+ " const element = document.createElement(\"link\");\n",
+ " element.onload = on_load;\n",
+ " element.onerror = on_error;\n",
+ " element.rel = \"stylesheet\";\n",
+ " element.type = \"text/css\";\n",
+ " element.href = url;\n",
+ " console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n",
+ " document.body.appendChild(element);\n",
+ " } var existing_scripts = []\n",
+ " const scripts = document.getElementsByTagName('script')\n",
+ " for (let i = 0; i < scripts.length; i++) {\n",
+ " var script = scripts[i]\n",
+ " if (script.src != null) {\n",
+ " existing_scripts.push(script.src)\n",
+ " }\n",
+ " }\n",
+ " for (let i = 0; i < js_urls.length; i++) {\n",
+ " const url = js_urls[i];\n",
+ " const escaped = encodeURI(url)\n",
+ " if (skip.indexOf(escaped) !== -1 || existing_scripts.indexOf(escaped) !== -1) {\n",
+ " if (!window.requirejs) {\n",
+ " on_load();\n",
+ " }\n",
+ " continue;\n",
+ " }\n",
+ " const element = document.createElement('script');\n",
+ " element.onload = on_load;\n",
+ " element.onerror = on_error;\n",
+ " element.async = false;\n",
+ " element.src = url;\n",
+ " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n",
+ " document.head.appendChild(element);\n",
+ " }\n",
+ " for (let i = 0; i < js_modules.length; i++) {\n",
+ " const url = js_modules[i];\n",
+ " const escaped = encodeURI(url)\n",
+ " if (skip.indexOf(escaped) !== -1 || existing_scripts.indexOf(escaped) !== -1) {\n",
+ " if (!window.requirejs) {\n",
+ " on_load();\n",
+ " }\n",
+ " continue;\n",
+ " }\n",
+ " var element = document.createElement('script');\n",
+ " element.onload = on_load;\n",
+ " element.onerror = on_error;\n",
+ " element.async = false;\n",
+ " element.src = url;\n",
+ " element.type = \"module\";\n",
+ " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n",
+ " document.head.appendChild(element);\n",
+ " }\n",
+ " for (const name in js_exports) {\n",
+ " const url = js_exports[name];\n",
+ " const escaped = encodeURI(url)\n",
+ " if (skip.indexOf(escaped) >= 0 || root[name] != null) {\n",
+ " if (!window.requirejs) {\n",
+ " on_load();\n",
+ " }\n",
+ " continue;\n",
+ " }\n",
+ " var element = document.createElement('script');\n",
+ " element.onerror = on_error;\n",
+ " element.async = false;\n",
+ " element.type = \"module\";\n",
+ " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n",
+ " element.textContent = `\n",
+ " import ${name} from \"${url}\"\n",
+ " window.${name} = ${name}\n",
+ " window._bokeh_on_load()\n",
+ " `\n",
+ " document.head.appendChild(element);\n",
+ " }\n",
+ " if (!js_urls.length && !js_modules.length) {\n",
+ " on_load()\n",
+ " }\n",
+ " };\n",
+ "\n",
+ " function inject_raw_css(css) {\n",
+ " const element = document.createElement(\"style\");\n",
+ " element.appendChild(document.createTextNode(css));\n",
+ " document.body.appendChild(element);\n",
+ " }\n",
+ "\n",
+ " const js_urls = [\"https://cdn.holoviz.org/panel/1.8.2/dist/bundled/reactiveesm/es-module-shims@^1.10.0/dist/es-module-shims.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-3.8.0.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-gl-3.8.0.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-3.8.0.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-3.8.0.min.js\", \"https://cdn.holoviz.org/panel/1.8.2/dist/panel.min.js\"];\n",
+ " const js_modules = [];\n",
+ " const js_exports = {};\n",
+ " const css_urls = [];\n",
+ " const inline_js = [ function(Bokeh) {\n",
+ " Bokeh.set_log_level(\"info\");\n",
+ " },\n",
+ "function(Bokeh) {} // ensure no trailing comma for IE\n",
+ " ];\n",
+ "\n",
+ " function run_inline_js() {\n",
+ " if ((root.Bokeh !== undefined) || (force === true)) {\n",
+ " for (let i = 0; i < inline_js.length; i++) {\n",
+ " try {\n",
+ " inline_js[i].call(root, root.Bokeh);\n",
+ " } catch(e) {\n",
+ " if (!reloading) {\n",
+ " throw e;\n",
+ " }\n",
+ " }\n",
+ " }\n",
+ " // Cache old bokeh versions\n",
+ " if (Bokeh != undefined && !reloading) {\n",
+ " var NewBokeh = root.Bokeh;\n",
+ " if (Bokeh.versions === undefined) {\n",
+ " Bokeh.versions = new Map();\n",
+ " }\n",
+ " if (NewBokeh.version !== Bokeh.version) {\n",
+ " Bokeh.versions.set(NewBokeh.version, NewBokeh)\n",
+ " }\n",
+ " root.Bokeh = Bokeh;\n",
+ " }\n",
+ " } else if (Date.now() < root._bokeh_timeout) {\n",
+ " setTimeout(run_inline_js, 100);\n",
+ " } else if (!root._bokeh_failed_load) {\n",
+ " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n",
+ " root._bokeh_failed_load = true;\n",
+ " }\n",
+ " root._bokeh_is_initializing = false\n",
+ " }\n",
+ "\n",
+ " function load_or_wait() {\n",
+ " // Implement a backoff loop that tries to ensure we do not load multiple\n",
+ " // versions of Bokeh and its dependencies at the same time.\n",
+ " // In recent versions we use the root._bokeh_is_initializing flag\n",
+ " // to determine whether there is an ongoing attempt to initialize\n",
+ " // bokeh, however for backward compatibility we also try to ensure\n",
+ " // that we do not start loading a newer (Panel>=1.0 and Bokeh>3) version\n",
+ " // before older versions are fully initialized.\n",
+ " if (root._bokeh_is_initializing && Date.now() > root._bokeh_timeout) {\n",
+ " // If the timeout and bokeh was not successfully loaded we reset\n",
+ " // everything and try loading again\n",
+ " root._bokeh_timeout = Date.now() + 5000;\n",
+ " root._bokeh_is_initializing = false;\n",
+ " root._bokeh_onload_callbacks = undefined;\n",
+ " root._bokeh_is_loading = 0\n",
+ " console.log(\"Bokeh: BokehJS was loaded multiple times but one version failed to initialize.\");\n",
+ " load_or_wait();\n",
+ " } else if (root._bokeh_is_initializing || (typeof root._bokeh_is_initializing === \"undefined\" && root._bokeh_onload_callbacks !== undefined)) {\n",
+ " setTimeout(load_or_wait, 100);\n",
+ " } else {\n",
+ " root._bokeh_is_initializing = true\n",
+ " root._bokeh_onload_callbacks = []\n",
+ " const bokeh_loaded = root.Bokeh != null && (root.Bokeh.version === py_version || (root.Bokeh.versions !== undefined && root.Bokeh.versions.has(py_version)));\n",
+ " if (!reloading && !bokeh_loaded) {\n",
+ " if (root.Bokeh) {\n",
+ " root.Bokeh = undefined;\n",
+ " }\n",
+ " console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n",
+ " }\n",
+ " load_libs(css_urls, js_urls, js_modules, js_exports, function() {\n",
+ " console.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n",
+ " run_inline_js();\n",
+ " });\n",
+ " }\n",
+ " }\n",
+ " // Give older versions of the autoload script a head-start to ensure\n",
+ " // they initialize before we start loading newer version.\n",
+ " setTimeout(load_or_wait, 100)\n",
+ "}(window));"
+ ],
+ "application/vnd.holoviews_load.v0+json": "(function(root) {\n function now() {\n return new Date();\n }\n\n const force = true;\n const py_version = '3.8.0'.replace('rc', '-rc.').replace('.dev', '-dev.');\n const reloading = false;\n const Bokeh = root.Bokeh;\n\n // Set a timeout for this load but only if we are not already initializing\n if (typeof (root._bokeh_timeout) === \"undefined\" || (force || !root._bokeh_is_initializing)) {\n root._bokeh_timeout = Date.now() + 5000;\n root._bokeh_failed_load = false;\n }\n\n function run_callbacks() {\n try {\n root._bokeh_onload_callbacks.forEach(function(callback) {\n if (callback != null)\n callback();\n });\n } finally {\n delete root._bokeh_onload_callbacks;\n }\n console.debug(\"Bokeh: all callbacks have finished\");\n }\n\n function load_libs(css_urls, js_urls, js_modules, js_exports, callback) {\n if (css_urls == null) css_urls = [];\n if (js_urls == null) js_urls = [];\n if (js_modules == null) js_modules = [];\n if (js_exports == null) js_exports = {};\n\n root._bokeh_onload_callbacks.push(callback);\n\n if (root._bokeh_is_loading > 0) {\n // Don't load bokeh if it is still initializing\n console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n return null;\n } else if (js_urls.length === 0 && js_modules.length === 0 && Object.keys(js_exports).length === 0) {\n // There is nothing to load\n run_callbacks();\n return null;\n }\n\n function on_load() {\n root._bokeh_is_loading--;\n if (root._bokeh_is_loading === 0) {\n console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n run_callbacks()\n }\n }\n window._bokeh_on_load = on_load\n\n function on_error(e) {\n const src_el = e.srcElement\n console.error(\"failed to load \" + (src_el.href || src_el.src));\n }\n\n const skip = [];\n if (window.requirejs) {\n window.requirejs.config({'packages': {}, 'paths': {}, 'shim': {}});\n root._bokeh_is_loading = css_urls.length + 0;\n } else {\n root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length + Object.keys(js_exports).length;\n }\n\n const existing_stylesheets = []\n const links = document.getElementsByTagName('link')\n for (let i = 0; i < links.length; i++) {\n const link = links[i]\n if (link.href != null) {\n existing_stylesheets.push(link.href)\n }\n }\n for (let i = 0; i < css_urls.length; i++) {\n const url = css_urls[i];\n const escaped = encodeURI(url)\n if (existing_stylesheets.indexOf(escaped) !== -1) {\n on_load()\n continue;\n }\n const element = document.createElement(\"link\");\n element.onload = on_load;\n element.onerror = on_error;\n element.rel = \"stylesheet\";\n element.type = \"text/css\";\n element.href = url;\n console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n document.body.appendChild(element);\n } var existing_scripts = []\n const scripts = document.getElementsByTagName('script')\n for (let i = 0; i < scripts.length; i++) {\n var script = scripts[i]\n if (script.src != null) {\n existing_scripts.push(script.src)\n }\n }\n for (let i = 0; i < js_urls.length; i++) {\n const url = js_urls[i];\n const escaped = encodeURI(url)\n if (skip.indexOf(escaped) !== -1 || existing_scripts.indexOf(escaped) !== -1) {\n if (!window.requirejs) {\n on_load();\n }\n continue;\n }\n const element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error;\n element.async = false;\n element.src = url;\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n for (let i = 0; i < js_modules.length; i++) {\n const url = js_modules[i];\n const escaped = encodeURI(url)\n if (skip.indexOf(escaped) !== -1 || existing_scripts.indexOf(escaped) !== -1) {\n if (!window.requirejs) {\n on_load();\n }\n continue;\n }\n var element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error;\n element.async = false;\n element.src = url;\n element.type = \"module\";\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n for (const name in js_exports) {\n const url = js_exports[name];\n const escaped = encodeURI(url)\n if (skip.indexOf(escaped) >= 0 || root[name] != null) {\n if (!window.requirejs) {\n on_load();\n }\n continue;\n }\n var element = document.createElement('script');\n element.onerror = on_error;\n element.async = false;\n element.type = \"module\";\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n element.textContent = `\n import ${name} from \"${url}\"\n window.${name} = ${name}\n window._bokeh_on_load()\n `\n document.head.appendChild(element);\n }\n if (!js_urls.length && !js_modules.length) {\n on_load()\n }\n };\n\n function inject_raw_css(css) {\n const element = document.createElement(\"style\");\n element.appendChild(document.createTextNode(css));\n document.body.appendChild(element);\n }\n\n const js_urls = [\"https://cdn.holoviz.org/panel/1.8.2/dist/bundled/reactiveesm/es-module-shims@^1.10.0/dist/es-module-shims.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-3.8.0.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-gl-3.8.0.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-3.8.0.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-3.8.0.min.js\", \"https://cdn.holoviz.org/panel/1.8.2/dist/panel.min.js\"];\n const js_modules = [];\n const js_exports = {};\n const css_urls = [];\n const inline_js = [ function(Bokeh) {\n Bokeh.set_log_level(\"info\");\n },\nfunction(Bokeh) {} // ensure no trailing comma for IE\n ];\n\n function run_inline_js() {\n if ((root.Bokeh !== undefined) || (force === true)) {\n for (let i = 0; i < inline_js.length; i++) {\n try {\n inline_js[i].call(root, root.Bokeh);\n } catch(e) {\n if (!reloading) {\n throw e;\n }\n }\n }\n // Cache old bokeh versions\n if (Bokeh != undefined && !reloading) {\n var NewBokeh = root.Bokeh;\n if (Bokeh.versions === undefined) {\n Bokeh.versions = new Map();\n }\n if (NewBokeh.version !== Bokeh.version) {\n Bokeh.versions.set(NewBokeh.version, NewBokeh)\n }\n root.Bokeh = Bokeh;\n }\n } else if (Date.now() < root._bokeh_timeout) {\n setTimeout(run_inline_js, 100);\n } else if (!root._bokeh_failed_load) {\n console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n root._bokeh_failed_load = true;\n }\n root._bokeh_is_initializing = false\n }\n\n function load_or_wait() {\n // Implement a backoff loop that tries to ensure we do not load multiple\n // versions of Bokeh and its dependencies at the same time.\n // In recent versions we use the root._bokeh_is_initializing flag\n // to determine whether there is an ongoing attempt to initialize\n // bokeh, however for backward compatibility we also try to ensure\n // that we do not start loading a newer (Panel>=1.0 and Bokeh>3) version\n // before older versions are fully initialized.\n if (root._bokeh_is_initializing && Date.now() > root._bokeh_timeout) {\n // If the timeout and bokeh was not successfully loaded we reset\n // everything and try loading again\n root._bokeh_timeout = Date.now() + 5000;\n root._bokeh_is_initializing = false;\n root._bokeh_onload_callbacks = undefined;\n root._bokeh_is_loading = 0\n console.log(\"Bokeh: BokehJS was loaded multiple times but one version failed to initialize.\");\n load_or_wait();\n } else if (root._bokeh_is_initializing || (typeof root._bokeh_is_initializing === \"undefined\" && root._bokeh_onload_callbacks !== undefined)) {\n setTimeout(load_or_wait, 100);\n } else {\n root._bokeh_is_initializing = true\n root._bokeh_onload_callbacks = []\n const bokeh_loaded = root.Bokeh != null && (root.Bokeh.version === py_version || (root.Bokeh.versions !== undefined && root.Bokeh.versions.has(py_version)));\n if (!reloading && !bokeh_loaded) {\n if (root.Bokeh) {\n root.Bokeh = undefined;\n }\n console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n }\n load_libs(css_urls, js_urls, js_modules, js_exports, function() {\n console.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n run_inline_js();\n });\n }\n }\n // Give older versions of the autoload script a head-start to ensure\n // they initialize before we start loading newer version.\n setTimeout(load_or_wait, 100)\n}(window));"
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "application/javascript": [
+ "\n",
+ "if ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n",
+ " window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n",
+ "}\n",
+ "\n",
+ "\n",
+ " function JupyterCommManager() {\n",
+ " }\n",
+ "\n",
+ " JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n",
+ " if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n",
+ " var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n",
+ " comm_manager.register_target(comm_id, function(comm) {\n",
+ " comm.on_msg(msg_handler);\n",
+ " });\n",
+ " } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n",
+ " window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n",
+ " comm.onMsg = msg_handler;\n",
+ " });\n",
+ " } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n",
+ " google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n",
+ " var messages = comm.messages[Symbol.asyncIterator]();\n",
+ " function processIteratorResult(result) {\n",
+ " var message = result.value;\n",
+ " var content = {data: message.data, comm_id};\n",
+ " var buffers = []\n",
+ " for (var buffer of message.buffers || []) {\n",
+ " buffers.push(new DataView(buffer))\n",
+ " }\n",
+ " var metadata = message.metadata || {};\n",
+ " var msg = {content, buffers, metadata}\n",
+ " msg_handler(msg);\n",
+ " return messages.next().then(processIteratorResult);\n",
+ " }\n",
+ " return messages.next().then(processIteratorResult);\n",
+ " })\n",
+ " }\n",
+ " }\n",
+ "\n",
+ " JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n",
+ " if (comm_id in window.PyViz.comms) {\n",
+ " return window.PyViz.comms[comm_id];\n",
+ " } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n",
+ " var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n",
+ " var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n",
+ " if (msg_handler) {\n",
+ " comm.on_msg(msg_handler);\n",
+ " }\n",
+ " } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n",
+ " var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n",
+ " let retries = 0;\n",
+ " const open = () => {\n",
+ " if (comm.active) {\n",
+ " comm.open();\n",
+ " } else if (retries > 3) {\n",
+ " console.warn('Comm target never activated')\n",
+ " } else {\n",
+ " retries += 1\n",
+ " setTimeout(open, 500)\n",
+ " }\n",
+ " }\n",
+ " if (comm.active) {\n",
+ " comm.open();\n",
+ " } else {\n",
+ " setTimeout(open, 500)\n",
+ " }\n",
+ " if (msg_handler) {\n",
+ " comm.onMsg = msg_handler;\n",
+ " }\n",
+ " } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n",
+ " var comm_promise = google.colab.kernel.comms.open(comm_id)\n",
+ " comm_promise.then((comm) => {\n",
+ " window.PyViz.comms[comm_id] = comm;\n",
+ " if (msg_handler) {\n",
+ " var messages = comm.messages[Symbol.asyncIterator]();\n",
+ " function processIteratorResult(result) {\n",
+ " var message = result.value;\n",
+ " var content = {data: message.data};\n",
+ " var metadata = message.metadata || {comm_id};\n",
+ " var msg = {content, metadata}\n",
+ " msg_handler(msg);\n",
+ " return messages.next().then(processIteratorResult);\n",
+ " }\n",
+ " return messages.next().then(processIteratorResult);\n",
+ " }\n",
+ " })\n",
+ " var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n",
+ " return comm_promise.then((comm) => {\n",
+ " comm.send(data, metadata, buffers, disposeOnDone);\n",
+ " });\n",
+ " };\n",
+ " var comm = {\n",
+ " send: sendClosure\n",
+ " };\n",
+ " }\n",
+ " window.PyViz.comms[comm_id] = comm;\n",
+ " return comm;\n",
+ " }\n",
+ " window.PyViz.comm_manager = new JupyterCommManager();\n",
+ " \n",
+ "\n",
+ "\n",
+ "var JS_MIME_TYPE = 'application/javascript';\n",
+ "var HTML_MIME_TYPE = 'text/html';\n",
+ "var EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\n",
+ "var CLASS_NAME = 'output';\n",
+ "\n",
+ "/**\n",
+ " * Render data to the DOM node\n",
+ " */\n",
+ "function render(props, node) {\n",
+ " var div = document.createElement(\"div\");\n",
+ " var script = document.createElement(\"script\");\n",
+ " node.appendChild(div);\n",
+ " node.appendChild(script);\n",
+ "}\n",
+ "\n",
+ "/**\n",
+ " * Handle when a new output is added\n",
+ " */\n",
+ "function handle_add_output(event, handle) {\n",
+ " var output_area = handle.output_area;\n",
+ " var output = handle.output;\n",
+ " if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n",
+ " return\n",
+ " }\n",
+ " var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n",
+ " var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n",
+ " if (id !== undefined) {\n",
+ " var nchildren = toinsert.length;\n",
+ " var html_node = toinsert[nchildren-1].children[0];\n",
+ " html_node.innerHTML = output.data[HTML_MIME_TYPE];\n",
+ " var scripts = [];\n",
+ " var nodelist = html_node.querySelectorAll(\"script\");\n",
+ " for (var i in nodelist) {\n",
+ " if (nodelist.hasOwnProperty(i)) {\n",
+ " scripts.push(nodelist[i])\n",
+ " }\n",
+ " }\n",
+ "\n",
+ " scripts.forEach( function (oldScript) {\n",
+ " var newScript = document.createElement(\"script\");\n",
+ " var attrs = [];\n",
+ " var nodemap = oldScript.attributes;\n",
+ " for (var j in nodemap) {\n",
+ " if (nodemap.hasOwnProperty(j)) {\n",
+ " attrs.push(nodemap[j])\n",
+ " }\n",
+ " }\n",
+ " attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n",
+ " newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n",
+ " oldScript.parentNode.replaceChild(newScript, oldScript);\n",
+ " });\n",
+ " if (JS_MIME_TYPE in output.data) {\n",
+ " toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n",
+ " }\n",
+ " output_area._hv_plot_id = id;\n",
+ " if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n",
+ " window.PyViz.plot_index[id] = Bokeh.index[id];\n",
+ " } else {\n",
+ " window.PyViz.plot_index[id] = null;\n",
+ " }\n",
+ " } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n",
+ " var bk_div = document.createElement(\"div\");\n",
+ " bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n",
+ " var script_attrs = bk_div.children[0].attributes;\n",
+ " for (var i = 0; i < script_attrs.length; i++) {\n",
+ " toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n",
+ " }\n",
+ " // store reference to server id on output_area\n",
+ " output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n",
+ " }\n",
+ "}\n",
+ "\n",
+ "/**\n",
+ " * Handle when an output is cleared or removed\n",
+ " */\n",
+ "function handle_clear_output(event, handle) {\n",
+ " var id = handle.cell.output_area._hv_plot_id;\n",
+ " var server_id = handle.cell.output_area._bokeh_server_id;\n",
+ " if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n",
+ " var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n",
+ " if (server_id !== null) {\n",
+ " comm.send({event_type: 'server_delete', 'id': server_id});\n",
+ " return;\n",
+ " } else if (comm !== null) {\n",
+ " comm.send({event_type: 'delete', 'id': id});\n",
+ " }\n",
+ " delete PyViz.plot_index[id];\n",
+ " if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n",
+ " var doc = window.Bokeh.index[id].model.document\n",
+ " doc.clear();\n",
+ " const i = window.Bokeh.documents.indexOf(doc);\n",
+ " if (i > -1) {\n",
+ " window.Bokeh.documents.splice(i, 1);\n",
+ " }\n",
+ " }\n",
+ "}\n",
+ "\n",
+ "/**\n",
+ " * Handle kernel restart event\n",
+ " */\n",
+ "function handle_kernel_cleanup(event, handle) {\n",
+ " delete PyViz.comms[\"hv-extension-comm\"];\n",
+ " window.PyViz.plot_index = {}\n",
+ "}\n",
+ "\n",
+ "/**\n",
+ " * Handle update_display_data messages\n",
+ " */\n",
+ "function handle_update_output(event, handle) {\n",
+ " handle_clear_output(event, {cell: {output_area: handle.output_area}})\n",
+ " handle_add_output(event, handle)\n",
+ "}\n",
+ "\n",
+ "function register_renderer(events, OutputArea) {\n",
+ " function append_mime(data, metadata, element) {\n",
+ " // create a DOM node to render to\n",
+ " var toinsert = this.create_output_subarea(\n",
+ " metadata,\n",
+ " CLASS_NAME,\n",
+ " EXEC_MIME_TYPE\n",
+ " );\n",
+ " this.keyboard_manager.register_events(toinsert);\n",
+ " // Render to node\n",
+ " var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n",
+ " render(props, toinsert[0]);\n",
+ " element.append(toinsert);\n",
+ " return toinsert\n",
+ " }\n",
+ "\n",
+ " events.on('output_added.OutputArea', handle_add_output);\n",
+ " events.on('output_updated.OutputArea', handle_update_output);\n",
+ " events.on('clear_output.CodeCell', handle_clear_output);\n",
+ " events.on('delete.Cell', handle_clear_output);\n",
+ " events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n",
+ "\n",
+ " OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n",
+ " safe: true,\n",
+ " index: 0\n",
+ " });\n",
+ "}\n",
+ "\n",
+ "if (window.Jupyter !== undefined) {\n",
+ " try {\n",
+ " var events = require('base/js/events');\n",
+ " var OutputArea = require('notebook/js/outputarea').OutputArea;\n",
+ " if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n",
+ " register_renderer(events, OutputArea);\n",
+ " }\n",
+ " } catch(err) {\n",
+ " }\n",
+ "}\n"
+ ],
+ "application/vnd.holoviews_load.v0+json": "\nif ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n}\n\n\n function JupyterCommManager() {\n }\n\n JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n comm_manager.register_target(comm_id, function(comm) {\n comm.on_msg(msg_handler);\n });\n } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n comm.onMsg = msg_handler;\n });\n } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n var messages = comm.messages[Symbol.asyncIterator]();\n function processIteratorResult(result) {\n var message = result.value;\n var content = {data: message.data, comm_id};\n var buffers = []\n for (var buffer of message.buffers || []) {\n buffers.push(new DataView(buffer))\n }\n var metadata = message.metadata || {};\n var msg = {content, buffers, metadata}\n msg_handler(msg);\n return messages.next().then(processIteratorResult);\n }\n return messages.next().then(processIteratorResult);\n })\n }\n }\n\n JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n if (comm_id in window.PyViz.comms) {\n return window.PyViz.comms[comm_id];\n } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n if (msg_handler) {\n comm.on_msg(msg_handler);\n }\n } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n let retries = 0;\n const open = () => {\n if (comm.active) {\n comm.open();\n } else if (retries > 3) {\n console.warn('Comm target never activated')\n } else {\n retries += 1\n setTimeout(open, 500)\n }\n }\n if (comm.active) {\n comm.open();\n } else {\n setTimeout(open, 500)\n }\n if (msg_handler) {\n comm.onMsg = msg_handler;\n }\n } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n var comm_promise = google.colab.kernel.comms.open(comm_id)\n comm_promise.then((comm) => {\n window.PyViz.comms[comm_id] = comm;\n if (msg_handler) {\n var messages = comm.messages[Symbol.asyncIterator]();\n function processIteratorResult(result) {\n var message = result.value;\n var content = {data: message.data};\n var metadata = message.metadata || {comm_id};\n var msg = {content, metadata}\n msg_handler(msg);\n return messages.next().then(processIteratorResult);\n }\n return messages.next().then(processIteratorResult);\n }\n })\n var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n return comm_promise.then((comm) => {\n comm.send(data, metadata, buffers, disposeOnDone);\n });\n };\n var comm = {\n send: sendClosure\n };\n }\n window.PyViz.comms[comm_id] = comm;\n return comm;\n }\n window.PyViz.comm_manager = new JupyterCommManager();\n \n\n\nvar JS_MIME_TYPE = 'application/javascript';\nvar HTML_MIME_TYPE = 'text/html';\nvar EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\nvar CLASS_NAME = 'output';\n\n/**\n * Render data to the DOM node\n */\nfunction render(props, node) {\n var div = document.createElement(\"div\");\n var script = document.createElement(\"script\");\n node.appendChild(div);\n node.appendChild(script);\n}\n\n/**\n * Handle when a new output is added\n */\nfunction handle_add_output(event, handle) {\n var output_area = handle.output_area;\n var output = handle.output;\n if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n return\n }\n var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n if (id !== undefined) {\n var nchildren = toinsert.length;\n var html_node = toinsert[nchildren-1].children[0];\n html_node.innerHTML = output.data[HTML_MIME_TYPE];\n var scripts = [];\n var nodelist = html_node.querySelectorAll(\"script\");\n for (var i in nodelist) {\n if (nodelist.hasOwnProperty(i)) {\n scripts.push(nodelist[i])\n }\n }\n\n scripts.forEach( function (oldScript) {\n var newScript = document.createElement(\"script\");\n var attrs = [];\n var nodemap = oldScript.attributes;\n for (var j in nodemap) {\n if (nodemap.hasOwnProperty(j)) {\n attrs.push(nodemap[j])\n }\n }\n attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n oldScript.parentNode.replaceChild(newScript, oldScript);\n });\n if (JS_MIME_TYPE in output.data) {\n toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n }\n output_area._hv_plot_id = id;\n if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n window.PyViz.plot_index[id] = Bokeh.index[id];\n } else {\n window.PyViz.plot_index[id] = null;\n }\n } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n var bk_div = document.createElement(\"div\");\n bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n var script_attrs = bk_div.children[0].attributes;\n for (var i = 0; i < script_attrs.length; i++) {\n toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n }\n // store reference to server id on output_area\n output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n }\n}\n\n/**\n * Handle when an output is cleared or removed\n */\nfunction handle_clear_output(event, handle) {\n var id = handle.cell.output_area._hv_plot_id;\n var server_id = handle.cell.output_area._bokeh_server_id;\n if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n if (server_id !== null) {\n comm.send({event_type: 'server_delete', 'id': server_id});\n return;\n } else if (comm !== null) {\n comm.send({event_type: 'delete', 'id': id});\n }\n delete PyViz.plot_index[id];\n if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n var doc = window.Bokeh.index[id].model.document\n doc.clear();\n const i = window.Bokeh.documents.indexOf(doc);\n if (i > -1) {\n window.Bokeh.documents.splice(i, 1);\n }\n }\n}\n\n/**\n * Handle kernel restart event\n */\nfunction handle_kernel_cleanup(event, handle) {\n delete PyViz.comms[\"hv-extension-comm\"];\n window.PyViz.plot_index = {}\n}\n\n/**\n * Handle update_display_data messages\n */\nfunction handle_update_output(event, handle) {\n handle_clear_output(event, {cell: {output_area: handle.output_area}})\n handle_add_output(event, handle)\n}\n\nfunction register_renderer(events, OutputArea) {\n function append_mime(data, metadata, element) {\n // create a DOM node to render to\n var toinsert = this.create_output_subarea(\n metadata,\n CLASS_NAME,\n EXEC_MIME_TYPE\n );\n this.keyboard_manager.register_events(toinsert);\n // Render to node\n var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n render(props, toinsert[0]);\n element.append(toinsert);\n return toinsert\n }\n\n events.on('output_added.OutputArea', handle_add_output);\n events.on('output_updated.OutputArea', handle_update_output);\n events.on('clear_output.CodeCell', handle_clear_output);\n events.on('delete.Cell', handle_clear_output);\n events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n\n OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n safe: true,\n index: 0\n });\n}\n\nif (window.Jupyter !== undefined) {\n try {\n var events = require('base/js/events');\n var OutputArea = require('notebook/js/outputarea').OutputArea;\n if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n register_renderer(events, OutputArea);\n }\n } catch(err) {\n }\n}\n"
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "application/vnd.holoviews_exec.v0+json": "",
+ "text/html": [
+ "
\n",
+ ""
+ ]
+ },
+ "metadata": {
+ "application/vnd.holoviews_exec.v0+json": {
+ "id": "696abcde-f3d1-4f4f-bc83-e06dfcc90ff7"
+ }
+ },
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/html": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "application/javascript": [
+ "(function(root) {\n",
+ " function now() {\n",
+ " return new Date();\n",
+ " }\n",
+ "\n",
+ " const force = false;\n",
+ " const py_version = '3.8.0'.replace('rc', '-rc.').replace('.dev', '-dev.');\n",
+ " const reloading = true;\n",
+ " const Bokeh = root.Bokeh;\n",
+ "\n",
+ " // Set a timeout for this load but only if we are not already initializing\n",
+ " if (typeof (root._bokeh_timeout) === \"undefined\" || (force || !root._bokeh_is_initializing)) {\n",
+ " root._bokeh_timeout = Date.now() + 5000;\n",
+ " root._bokeh_failed_load = false;\n",
+ " }\n",
+ "\n",
+ " function run_callbacks() {\n",
+ " try {\n",
+ " root._bokeh_onload_callbacks.forEach(function(callback) {\n",
+ " if (callback != null)\n",
+ " callback();\n",
+ " });\n",
+ " } finally {\n",
+ " delete root._bokeh_onload_callbacks;\n",
+ " }\n",
+ " console.debug(\"Bokeh: all callbacks have finished\");\n",
+ " }\n",
+ "\n",
+ " function load_libs(css_urls, js_urls, js_modules, js_exports, callback) {\n",
+ " if (css_urls == null) css_urls = [];\n",
+ " if (js_urls == null) js_urls = [];\n",
+ " if (js_modules == null) js_modules = [];\n",
+ " if (js_exports == null) js_exports = {};\n",
+ "\n",
+ " root._bokeh_onload_callbacks.push(callback);\n",
+ "\n",
+ " if (root._bokeh_is_loading > 0) {\n",
+ " // Don't load bokeh if it is still initializing\n",
+ " console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n",
+ " return null;\n",
+ " } else if (js_urls.length === 0 && js_modules.length === 0 && Object.keys(js_exports).length === 0) {\n",
+ " // There is nothing to load\n",
+ " run_callbacks();\n",
+ " return null;\n",
+ " }\n",
+ "\n",
+ " function on_load() {\n",
+ " root._bokeh_is_loading--;\n",
+ " if (root._bokeh_is_loading === 0) {\n",
+ " console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n",
+ " run_callbacks()\n",
+ " }\n",
+ " }\n",
+ " window._bokeh_on_load = on_load\n",
+ "\n",
+ " function on_error(e) {\n",
+ " const src_el = e.srcElement\n",
+ " console.error(\"failed to load \" + (src_el.href || src_el.src));\n",
+ " }\n",
+ "\n",
+ " const skip = [];\n",
+ " if (window.requirejs) {\n",
+ " window.requirejs.config({'packages': {}, 'paths': {}, 'shim': {}});\n",
+ " root._bokeh_is_loading = css_urls.length + 0;\n",
+ " } else {\n",
+ " root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length + Object.keys(js_exports).length;\n",
+ " }\n",
+ "\n",
+ " const existing_stylesheets = []\n",
+ " const links = document.getElementsByTagName('link')\n",
+ " for (let i = 0; i < links.length; i++) {\n",
+ " const link = links[i]\n",
+ " if (link.href != null) {\n",
+ " existing_stylesheets.push(link.href)\n",
+ " }\n",
+ " }\n",
+ " for (let i = 0; i < css_urls.length; i++) {\n",
+ " const url = css_urls[i];\n",
+ " const escaped = encodeURI(url)\n",
+ " if (existing_stylesheets.indexOf(escaped) !== -1) {\n",
+ " on_load()\n",
+ " continue;\n",
+ " }\n",
+ " const element = document.createElement(\"link\");\n",
+ " element.onload = on_load;\n",
+ " element.onerror = on_error;\n",
+ " element.rel = \"stylesheet\";\n",
+ " element.type = \"text/css\";\n",
+ " element.href = url;\n",
+ " console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n",
+ " document.body.appendChild(element);\n",
+ " } var existing_scripts = []\n",
+ " const scripts = document.getElementsByTagName('script')\n",
+ " for (let i = 0; i < scripts.length; i++) {\n",
+ " var script = scripts[i]\n",
+ " if (script.src != null) {\n",
+ " existing_scripts.push(script.src)\n",
+ " }\n",
+ " }\n",
+ " for (let i = 0; i < js_urls.length; i++) {\n",
+ " const url = js_urls[i];\n",
+ " const escaped = encodeURI(url)\n",
+ " if (skip.indexOf(escaped) !== -1 || existing_scripts.indexOf(escaped) !== -1) {\n",
+ " if (!window.requirejs) {\n",
+ " on_load();\n",
+ " }\n",
+ " continue;\n",
+ " }\n",
+ " const element = document.createElement('script');\n",
+ " element.onload = on_load;\n",
+ " element.onerror = on_error;\n",
+ " element.async = false;\n",
+ " element.src = url;\n",
+ " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n",
+ " document.head.appendChild(element);\n",
+ " }\n",
+ " for (let i = 0; i < js_modules.length; i++) {\n",
+ " const url = js_modules[i];\n",
+ " const escaped = encodeURI(url)\n",
+ " if (skip.indexOf(escaped) !== -1 || existing_scripts.indexOf(escaped) !== -1) {\n",
+ " if (!window.requirejs) {\n",
+ " on_load();\n",
+ " }\n",
+ " continue;\n",
+ " }\n",
+ " var element = document.createElement('script');\n",
+ " element.onload = on_load;\n",
+ " element.onerror = on_error;\n",
+ " element.async = false;\n",
+ " element.src = url;\n",
+ " element.type = \"module\";\n",
+ " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n",
+ " document.head.appendChild(element);\n",
+ " }\n",
+ " for (const name in js_exports) {\n",
+ " const url = js_exports[name];\n",
+ " const escaped = encodeURI(url)\n",
+ " if (skip.indexOf(escaped) >= 0 || root[name] != null) {\n",
+ " if (!window.requirejs) {\n",
+ " on_load();\n",
+ " }\n",
+ " continue;\n",
+ " }\n",
+ " var element = document.createElement('script');\n",
+ " element.onerror = on_error;\n",
+ " element.async = false;\n",
+ " element.type = \"module\";\n",
+ " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n",
+ " element.textContent = `\n",
+ " import ${name} from \"${url}\"\n",
+ " window.${name} = ${name}\n",
+ " window._bokeh_on_load()\n",
+ " `\n",
+ " document.head.appendChild(element);\n",
+ " }\n",
+ " if (!js_urls.length && !js_modules.length) {\n",
+ " on_load()\n",
+ " }\n",
+ " };\n",
+ "\n",
+ " function inject_raw_css(css) {\n",
+ " const element = document.createElement(\"style\");\n",
+ " element.appendChild(document.createTextNode(css));\n",
+ " document.body.appendChild(element);\n",
+ " }\n",
+ "\n",
+ " const js_urls = [\"https://cdn.holoviz.org/panel/1.8.2/dist/bundled/reactiveesm/es-module-shims@^1.10.0/dist/es-module-shims.min.js\"];\n",
+ " const js_modules = [];\n",
+ " const js_exports = {};\n",
+ " const css_urls = [];\n",
+ " const inline_js = [ function(Bokeh) {\n",
+ " Bokeh.set_log_level(\"info\");\n",
+ " },\n",
+ "function(Bokeh) {} // ensure no trailing comma for IE\n",
+ " ];\n",
+ "\n",
+ " function run_inline_js() {\n",
+ " if ((root.Bokeh !== undefined) || (force === true)) {\n",
+ " for (let i = 0; i < inline_js.length; i++) {\n",
+ " try {\n",
+ " inline_js[i].call(root, root.Bokeh);\n",
+ " } catch(e) {\n",
+ " if (!reloading) {\n",
+ " throw e;\n",
+ " }\n",
+ " }\n",
+ " }\n",
+ " // Cache old bokeh versions\n",
+ " if (Bokeh != undefined && !reloading) {\n",
+ " var NewBokeh = root.Bokeh;\n",
+ " if (Bokeh.versions === undefined) {\n",
+ " Bokeh.versions = new Map();\n",
+ " }\n",
+ " if (NewBokeh.version !== Bokeh.version) {\n",
+ " Bokeh.versions.set(NewBokeh.version, NewBokeh)\n",
+ " }\n",
+ " root.Bokeh = Bokeh;\n",
+ " }\n",
+ " } else if (Date.now() < root._bokeh_timeout) {\n",
+ " setTimeout(run_inline_js, 100);\n",
+ " } else if (!root._bokeh_failed_load) {\n",
+ " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n",
+ " root._bokeh_failed_load = true;\n",
+ " }\n",
+ " root._bokeh_is_initializing = false\n",
+ " }\n",
+ "\n",
+ " function load_or_wait() {\n",
+ " // Implement a backoff loop that tries to ensure we do not load multiple\n",
+ " // versions of Bokeh and its dependencies at the same time.\n",
+ " // In recent versions we use the root._bokeh_is_initializing flag\n",
+ " // to determine whether there is an ongoing attempt to initialize\n",
+ " // bokeh, however for backward compatibility we also try to ensure\n",
+ " // that we do not start loading a newer (Panel>=1.0 and Bokeh>3) version\n",
+ " // before older versions are fully initialized.\n",
+ " if (root._bokeh_is_initializing && Date.now() > root._bokeh_timeout) {\n",
+ " // If the timeout and bokeh was not successfully loaded we reset\n",
+ " // everything and try loading again\n",
+ " root._bokeh_timeout = Date.now() + 5000;\n",
+ " root._bokeh_is_initializing = false;\n",
+ " root._bokeh_onload_callbacks = undefined;\n",
+ " root._bokeh_is_loading = 0\n",
+ " console.log(\"Bokeh: BokehJS was loaded multiple times but one version failed to initialize.\");\n",
+ " load_or_wait();\n",
+ " } else if (root._bokeh_is_initializing || (typeof root._bokeh_is_initializing === \"undefined\" && root._bokeh_onload_callbacks !== undefined)) {\n",
+ " setTimeout(load_or_wait, 100);\n",
+ " } else {\n",
+ " root._bokeh_is_initializing = true\n",
+ " root._bokeh_onload_callbacks = []\n",
+ " const bokeh_loaded = root.Bokeh != null && (root.Bokeh.version === py_version || (root.Bokeh.versions !== undefined && root.Bokeh.versions.has(py_version)));\n",
+ " if (!reloading && !bokeh_loaded) {\n",
+ " if (root.Bokeh) {\n",
+ " root.Bokeh = undefined;\n",
+ " }\n",
+ " console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n",
+ " }\n",
+ " load_libs(css_urls, js_urls, js_modules, js_exports, function() {\n",
+ " console.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n",
+ " run_inline_js();\n",
+ " });\n",
+ " }\n",
+ " }\n",
+ " // Give older versions of the autoload script a head-start to ensure\n",
+ " // they initialize before we start loading newer version.\n",
+ " setTimeout(load_or_wait, 100)\n",
+ "}(window));"
+ ],
+ "application/vnd.holoviews_load.v0+json": "(function(root) {\n function now() {\n return new Date();\n }\n\n const force = false;\n const py_version = '3.8.0'.replace('rc', '-rc.').replace('.dev', '-dev.');\n const reloading = true;\n const Bokeh = root.Bokeh;\n\n // Set a timeout for this load but only if we are not already initializing\n if (typeof (root._bokeh_timeout) === \"undefined\" || (force || !root._bokeh_is_initializing)) {\n root._bokeh_timeout = Date.now() + 5000;\n root._bokeh_failed_load = false;\n }\n\n function run_callbacks() {\n try {\n root._bokeh_onload_callbacks.forEach(function(callback) {\n if (callback != null)\n callback();\n });\n } finally {\n delete root._bokeh_onload_callbacks;\n }\n console.debug(\"Bokeh: all callbacks have finished\");\n }\n\n function load_libs(css_urls, js_urls, js_modules, js_exports, callback) {\n if (css_urls == null) css_urls = [];\n if (js_urls == null) js_urls = [];\n if (js_modules == null) js_modules = [];\n if (js_exports == null) js_exports = {};\n\n root._bokeh_onload_callbacks.push(callback);\n\n if (root._bokeh_is_loading > 0) {\n // Don't load bokeh if it is still initializing\n console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n return null;\n } else if (js_urls.length === 0 && js_modules.length === 0 && Object.keys(js_exports).length === 0) {\n // There is nothing to load\n run_callbacks();\n return null;\n }\n\n function on_load() {\n root._bokeh_is_loading--;\n if (root._bokeh_is_loading === 0) {\n console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n run_callbacks()\n }\n }\n window._bokeh_on_load = on_load\n\n function on_error(e) {\n const src_el = e.srcElement\n console.error(\"failed to load \" + (src_el.href || src_el.src));\n }\n\n const skip = [];\n if (window.requirejs) {\n window.requirejs.config({'packages': {}, 'paths': {}, 'shim': {}});\n root._bokeh_is_loading = css_urls.length + 0;\n } else {\n root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length + Object.keys(js_exports).length;\n }\n\n const existing_stylesheets = []\n const links = document.getElementsByTagName('link')\n for (let i = 0; i < links.length; i++) {\n const link = links[i]\n if (link.href != null) {\n existing_stylesheets.push(link.href)\n }\n }\n for (let i = 0; i < css_urls.length; i++) {\n const url = css_urls[i];\n const escaped = encodeURI(url)\n if (existing_stylesheets.indexOf(escaped) !== -1) {\n on_load()\n continue;\n }\n const element = document.createElement(\"link\");\n element.onload = on_load;\n element.onerror = on_error;\n element.rel = \"stylesheet\";\n element.type = \"text/css\";\n element.href = url;\n console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n document.body.appendChild(element);\n } var existing_scripts = []\n const scripts = document.getElementsByTagName('script')\n for (let i = 0; i < scripts.length; i++) {\n var script = scripts[i]\n if (script.src != null) {\n existing_scripts.push(script.src)\n }\n }\n for (let i = 0; i < js_urls.length; i++) {\n const url = js_urls[i];\n const escaped = encodeURI(url)\n if (skip.indexOf(escaped) !== -1 || existing_scripts.indexOf(escaped) !== -1) {\n if (!window.requirejs) {\n on_load();\n }\n continue;\n }\n const element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error;\n element.async = false;\n element.src = url;\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n for (let i = 0; i < js_modules.length; i++) {\n const url = js_modules[i];\n const escaped = encodeURI(url)\n if (skip.indexOf(escaped) !== -1 || existing_scripts.indexOf(escaped) !== -1) {\n if (!window.requirejs) {\n on_load();\n }\n continue;\n }\n var element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error;\n element.async = false;\n element.src = url;\n element.type = \"module\";\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n for (const name in js_exports) {\n const url = js_exports[name];\n const escaped = encodeURI(url)\n if (skip.indexOf(escaped) >= 0 || root[name] != null) {\n if (!window.requirejs) {\n on_load();\n }\n continue;\n }\n var element = document.createElement('script');\n element.onerror = on_error;\n element.async = false;\n element.type = \"module\";\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n element.textContent = `\n import ${name} from \"${url}\"\n window.${name} = ${name}\n window._bokeh_on_load()\n `\n document.head.appendChild(element);\n }\n if (!js_urls.length && !js_modules.length) {\n on_load()\n }\n };\n\n function inject_raw_css(css) {\n const element = document.createElement(\"style\");\n element.appendChild(document.createTextNode(css));\n document.body.appendChild(element);\n }\n\n const js_urls = [\"https://cdn.holoviz.org/panel/1.8.2/dist/bundled/reactiveesm/es-module-shims@^1.10.0/dist/es-module-shims.min.js\"];\n const js_modules = [];\n const js_exports = {};\n const css_urls = [];\n const inline_js = [ function(Bokeh) {\n Bokeh.set_log_level(\"info\");\n },\nfunction(Bokeh) {} // ensure no trailing comma for IE\n ];\n\n function run_inline_js() {\n if ((root.Bokeh !== undefined) || (force === true)) {\n for (let i = 0; i < inline_js.length; i++) {\n try {\n inline_js[i].call(root, root.Bokeh);\n } catch(e) {\n if (!reloading) {\n throw e;\n }\n }\n }\n // Cache old bokeh versions\n if (Bokeh != undefined && !reloading) {\n var NewBokeh = root.Bokeh;\n if (Bokeh.versions === undefined) {\n Bokeh.versions = new Map();\n }\n if (NewBokeh.version !== Bokeh.version) {\n Bokeh.versions.set(NewBokeh.version, NewBokeh)\n }\n root.Bokeh = Bokeh;\n }\n } else if (Date.now() < root._bokeh_timeout) {\n setTimeout(run_inline_js, 100);\n } else if (!root._bokeh_failed_load) {\n console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n root._bokeh_failed_load = true;\n }\n root._bokeh_is_initializing = false\n }\n\n function load_or_wait() {\n // Implement a backoff loop that tries to ensure we do not load multiple\n // versions of Bokeh and its dependencies at the same time.\n // In recent versions we use the root._bokeh_is_initializing flag\n // to determine whether there is an ongoing attempt to initialize\n // bokeh, however for backward compatibility we also try to ensure\n // that we do not start loading a newer (Panel>=1.0 and Bokeh>3) version\n // before older versions are fully initialized.\n if (root._bokeh_is_initializing && Date.now() > root._bokeh_timeout) {\n // If the timeout and bokeh was not successfully loaded we reset\n // everything and try loading again\n root._bokeh_timeout = Date.now() + 5000;\n root._bokeh_is_initializing = false;\n root._bokeh_onload_callbacks = undefined;\n root._bokeh_is_loading = 0\n console.log(\"Bokeh: BokehJS was loaded multiple times but one version failed to initialize.\");\n load_or_wait();\n } else if (root._bokeh_is_initializing || (typeof root._bokeh_is_initializing === \"undefined\" && root._bokeh_onload_callbacks !== undefined)) {\n setTimeout(load_or_wait, 100);\n } else {\n root._bokeh_is_initializing = true\n root._bokeh_onload_callbacks = []\n const bokeh_loaded = root.Bokeh != null && (root.Bokeh.version === py_version || (root.Bokeh.versions !== undefined && root.Bokeh.versions.has(py_version)));\n if (!reloading && !bokeh_loaded) {\n if (root.Bokeh) {\n root.Bokeh = undefined;\n }\n console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n }\n load_libs(css_urls, js_urls, js_modules, js_exports, function() {\n console.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n run_inline_js();\n });\n }\n }\n // Give older versions of the autoload script a head-start to ensure\n // they initialize before we start loading newer version.\n setTimeout(load_or_wait, 100)\n}(window));"
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "application/javascript": [
+ "\n",
+ "if ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n",
+ " window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n",
+ "}\n",
+ "\n",
+ "\n",
+ " function JupyterCommManager() {\n",
+ " }\n",
+ "\n",
+ " JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n",
+ " if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n",
+ " var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n",
+ " comm_manager.register_target(comm_id, function(comm) {\n",
+ " comm.on_msg(msg_handler);\n",
+ " });\n",
+ " } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n",
+ " window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n",
+ " comm.onMsg = msg_handler;\n",
+ " });\n",
+ " } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n",
+ " google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n",
+ " var messages = comm.messages[Symbol.asyncIterator]();\n",
+ " function processIteratorResult(result) {\n",
+ " var message = result.value;\n",
+ " var content = {data: message.data, comm_id};\n",
+ " var buffers = []\n",
+ " for (var buffer of message.buffers || []) {\n",
+ " buffers.push(new DataView(buffer))\n",
+ " }\n",
+ " var metadata = message.metadata || {};\n",
+ " var msg = {content, buffers, metadata}\n",
+ " msg_handler(msg);\n",
+ " return messages.next().then(processIteratorResult);\n",
+ " }\n",
+ " return messages.next().then(processIteratorResult);\n",
+ " })\n",
+ " }\n",
+ " }\n",
+ "\n",
+ " JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n",
+ " if (comm_id in window.PyViz.comms) {\n",
+ " return window.PyViz.comms[comm_id];\n",
+ " } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n",
+ " var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n",
+ " var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n",
+ " if (msg_handler) {\n",
+ " comm.on_msg(msg_handler);\n",
+ " }\n",
+ " } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n",
+ " var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n",
+ " let retries = 0;\n",
+ " const open = () => {\n",
+ " if (comm.active) {\n",
+ " comm.open();\n",
+ " } else if (retries > 3) {\n",
+ " console.warn('Comm target never activated')\n",
+ " } else {\n",
+ " retries += 1\n",
+ " setTimeout(open, 500)\n",
+ " }\n",
+ " }\n",
+ " if (comm.active) {\n",
+ " comm.open();\n",
+ " } else {\n",
+ " setTimeout(open, 500)\n",
+ " }\n",
+ " if (msg_handler) {\n",
+ " comm.onMsg = msg_handler;\n",
+ " }\n",
+ " } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n",
+ " var comm_promise = google.colab.kernel.comms.open(comm_id)\n",
+ " comm_promise.then((comm) => {\n",
+ " window.PyViz.comms[comm_id] = comm;\n",
+ " if (msg_handler) {\n",
+ " var messages = comm.messages[Symbol.asyncIterator]();\n",
+ " function processIteratorResult(result) {\n",
+ " var message = result.value;\n",
+ " var content = {data: message.data};\n",
+ " var metadata = message.metadata || {comm_id};\n",
+ " var msg = {content, metadata}\n",
+ " msg_handler(msg);\n",
+ " return messages.next().then(processIteratorResult);\n",
+ " }\n",
+ " return messages.next().then(processIteratorResult);\n",
+ " }\n",
+ " })\n",
+ " var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n",
+ " return comm_promise.then((comm) => {\n",
+ " comm.send(data, metadata, buffers, disposeOnDone);\n",
+ " });\n",
+ " };\n",
+ " var comm = {\n",
+ " send: sendClosure\n",
+ " };\n",
+ " }\n",
+ " window.PyViz.comms[comm_id] = comm;\n",
+ " return comm;\n",
+ " }\n",
+ " window.PyViz.comm_manager = new JupyterCommManager();\n",
+ " \n",
+ "\n",
+ "\n",
+ "var JS_MIME_TYPE = 'application/javascript';\n",
+ "var HTML_MIME_TYPE = 'text/html';\n",
+ "var EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\n",
+ "var CLASS_NAME = 'output';\n",
+ "\n",
+ "/**\n",
+ " * Render data to the DOM node\n",
+ " */\n",
+ "function render(props, node) {\n",
+ " var div = document.createElement(\"div\");\n",
+ " var script = document.createElement(\"script\");\n",
+ " node.appendChild(div);\n",
+ " node.appendChild(script);\n",
+ "}\n",
+ "\n",
+ "/**\n",
+ " * Handle when a new output is added\n",
+ " */\n",
+ "function handle_add_output(event, handle) {\n",
+ " var output_area = handle.output_area;\n",
+ " var output = handle.output;\n",
+ " if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n",
+ " return\n",
+ " }\n",
+ " var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n",
+ " var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n",
+ " if (id !== undefined) {\n",
+ " var nchildren = toinsert.length;\n",
+ " var html_node = toinsert[nchildren-1].children[0];\n",
+ " html_node.innerHTML = output.data[HTML_MIME_TYPE];\n",
+ " var scripts = [];\n",
+ " var nodelist = html_node.querySelectorAll(\"script\");\n",
+ " for (var i in nodelist) {\n",
+ " if (nodelist.hasOwnProperty(i)) {\n",
+ " scripts.push(nodelist[i])\n",
+ " }\n",
+ " }\n",
+ "\n",
+ " scripts.forEach( function (oldScript) {\n",
+ " var newScript = document.createElement(\"script\");\n",
+ " var attrs = [];\n",
+ " var nodemap = oldScript.attributes;\n",
+ " for (var j in nodemap) {\n",
+ " if (nodemap.hasOwnProperty(j)) {\n",
+ " attrs.push(nodemap[j])\n",
+ " }\n",
+ " }\n",
+ " attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n",
+ " newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n",
+ " oldScript.parentNode.replaceChild(newScript, oldScript);\n",
+ " });\n",
+ " if (JS_MIME_TYPE in output.data) {\n",
+ " toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n",
+ " }\n",
+ " output_area._hv_plot_id = id;\n",
+ " if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n",
+ " window.PyViz.plot_index[id] = Bokeh.index[id];\n",
+ " } else {\n",
+ " window.PyViz.plot_index[id] = null;\n",
+ " }\n",
+ " } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n",
+ " var bk_div = document.createElement(\"div\");\n",
+ " bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n",
+ " var script_attrs = bk_div.children[0].attributes;\n",
+ " for (var i = 0; i < script_attrs.length; i++) {\n",
+ " toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n",
+ " }\n",
+ " // store reference to server id on output_area\n",
+ " output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n",
+ " }\n",
+ "}\n",
+ "\n",
+ "/**\n",
+ " * Handle when an output is cleared or removed\n",
+ " */\n",
+ "function handle_clear_output(event, handle) {\n",
+ " var id = handle.cell.output_area._hv_plot_id;\n",
+ " var server_id = handle.cell.output_area._bokeh_server_id;\n",
+ " if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n",
+ " var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n",
+ " if (server_id !== null) {\n",
+ " comm.send({event_type: 'server_delete', 'id': server_id});\n",
+ " return;\n",
+ " } else if (comm !== null) {\n",
+ " comm.send({event_type: 'delete', 'id': id});\n",
+ " }\n",
+ " delete PyViz.plot_index[id];\n",
+ " if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n",
+ " var doc = window.Bokeh.index[id].model.document\n",
+ " doc.clear();\n",
+ " const i = window.Bokeh.documents.indexOf(doc);\n",
+ " if (i > -1) {\n",
+ " window.Bokeh.documents.splice(i, 1);\n",
+ " }\n",
+ " }\n",
+ "}\n",
+ "\n",
+ "/**\n",
+ " * Handle kernel restart event\n",
+ " */\n",
+ "function handle_kernel_cleanup(event, handle) {\n",
+ " delete PyViz.comms[\"hv-extension-comm\"];\n",
+ " window.PyViz.plot_index = {}\n",
+ "}\n",
+ "\n",
+ "/**\n",
+ " * Handle update_display_data messages\n",
+ " */\n",
+ "function handle_update_output(event, handle) {\n",
+ " handle_clear_output(event, {cell: {output_area: handle.output_area}})\n",
+ " handle_add_output(event, handle)\n",
+ "}\n",
+ "\n",
+ "function register_renderer(events, OutputArea) {\n",
+ " function append_mime(data, metadata, element) {\n",
+ " // create a DOM node to render to\n",
+ " var toinsert = this.create_output_subarea(\n",
+ " metadata,\n",
+ " CLASS_NAME,\n",
+ " EXEC_MIME_TYPE\n",
+ " );\n",
+ " this.keyboard_manager.register_events(toinsert);\n",
+ " // Render to node\n",
+ " var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n",
+ " render(props, toinsert[0]);\n",
+ " element.append(toinsert);\n",
+ " return toinsert\n",
+ " }\n",
+ "\n",
+ " events.on('output_added.OutputArea', handle_add_output);\n",
+ " events.on('output_updated.OutputArea', handle_update_output);\n",
+ " events.on('clear_output.CodeCell', handle_clear_output);\n",
+ " events.on('delete.Cell', handle_clear_output);\n",
+ " events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n",
+ "\n",
+ " OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n",
+ " safe: true,\n",
+ " index: 0\n",
+ " });\n",
+ "}\n",
+ "\n",
+ "if (window.Jupyter !== undefined) {\n",
+ " try {\n",
+ " var events = require('base/js/events');\n",
+ " var OutputArea = require('notebook/js/outputarea').OutputArea;\n",
+ " if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n",
+ " register_renderer(events, OutputArea);\n",
+ " }\n",
+ " } catch(err) {\n",
+ " }\n",
+ "}\n"
+ ],
+ "application/vnd.holoviews_load.v0+json": "\nif ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n}\n\n\n function JupyterCommManager() {\n }\n\n JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n comm_manager.register_target(comm_id, function(comm) {\n comm.on_msg(msg_handler);\n });\n } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n comm.onMsg = msg_handler;\n });\n } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n var messages = comm.messages[Symbol.asyncIterator]();\n function processIteratorResult(result) {\n var message = result.value;\n var content = {data: message.data, comm_id};\n var buffers = []\n for (var buffer of message.buffers || []) {\n buffers.push(new DataView(buffer))\n }\n var metadata = message.metadata || {};\n var msg = {content, buffers, metadata}\n msg_handler(msg);\n return messages.next().then(processIteratorResult);\n }\n return messages.next().then(processIteratorResult);\n })\n }\n }\n\n JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n if (comm_id in window.PyViz.comms) {\n return window.PyViz.comms[comm_id];\n } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n if (msg_handler) {\n comm.on_msg(msg_handler);\n }\n } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n let retries = 0;\n const open = () => {\n if (comm.active) {\n comm.open();\n } else if (retries > 3) {\n console.warn('Comm target never activated')\n } else {\n retries += 1\n setTimeout(open, 500)\n }\n }\n if (comm.active) {\n comm.open();\n } else {\n setTimeout(open, 500)\n }\n if (msg_handler) {\n comm.onMsg = msg_handler;\n }\n } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n var comm_promise = google.colab.kernel.comms.open(comm_id)\n comm_promise.then((comm) => {\n window.PyViz.comms[comm_id] = comm;\n if (msg_handler) {\n var messages = comm.messages[Symbol.asyncIterator]();\n function processIteratorResult(result) {\n var message = result.value;\n var content = {data: message.data};\n var metadata = message.metadata || {comm_id};\n var msg = {content, metadata}\n msg_handler(msg);\n return messages.next().then(processIteratorResult);\n }\n return messages.next().then(processIteratorResult);\n }\n })\n var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n return comm_promise.then((comm) => {\n comm.send(data, metadata, buffers, disposeOnDone);\n });\n };\n var comm = {\n send: sendClosure\n };\n }\n window.PyViz.comms[comm_id] = comm;\n return comm;\n }\n window.PyViz.comm_manager = new JupyterCommManager();\n \n\n\nvar JS_MIME_TYPE = 'application/javascript';\nvar HTML_MIME_TYPE = 'text/html';\nvar EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\nvar CLASS_NAME = 'output';\n\n/**\n * Render data to the DOM node\n */\nfunction render(props, node) {\n var div = document.createElement(\"div\");\n var script = document.createElement(\"script\");\n node.appendChild(div);\n node.appendChild(script);\n}\n\n/**\n * Handle when a new output is added\n */\nfunction handle_add_output(event, handle) {\n var output_area = handle.output_area;\n var output = handle.output;\n if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n return\n }\n var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n if (id !== undefined) {\n var nchildren = toinsert.length;\n var html_node = toinsert[nchildren-1].children[0];\n html_node.innerHTML = output.data[HTML_MIME_TYPE];\n var scripts = [];\n var nodelist = html_node.querySelectorAll(\"script\");\n for (var i in nodelist) {\n if (nodelist.hasOwnProperty(i)) {\n scripts.push(nodelist[i])\n }\n }\n\n scripts.forEach( function (oldScript) {\n var newScript = document.createElement(\"script\");\n var attrs = [];\n var nodemap = oldScript.attributes;\n for (var j in nodemap) {\n if (nodemap.hasOwnProperty(j)) {\n attrs.push(nodemap[j])\n }\n }\n attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n oldScript.parentNode.replaceChild(newScript, oldScript);\n });\n if (JS_MIME_TYPE in output.data) {\n toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n }\n output_area._hv_plot_id = id;\n if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n window.PyViz.plot_index[id] = Bokeh.index[id];\n } else {\n window.PyViz.plot_index[id] = null;\n }\n } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n var bk_div = document.createElement(\"div\");\n bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n var script_attrs = bk_div.children[0].attributes;\n for (var i = 0; i < script_attrs.length; i++) {\n toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n }\n // store reference to server id on output_area\n output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n }\n}\n\n/**\n * Handle when an output is cleared or removed\n */\nfunction handle_clear_output(event, handle) {\n var id = handle.cell.output_area._hv_plot_id;\n var server_id = handle.cell.output_area._bokeh_server_id;\n if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n if (server_id !== null) {\n comm.send({event_type: 'server_delete', 'id': server_id});\n return;\n } else if (comm !== null) {\n comm.send({event_type: 'delete', 'id': id});\n }\n delete PyViz.plot_index[id];\n if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n var doc = window.Bokeh.index[id].model.document\n doc.clear();\n const i = window.Bokeh.documents.indexOf(doc);\n if (i > -1) {\n window.Bokeh.documents.splice(i, 1);\n }\n }\n}\n\n/**\n * Handle kernel restart event\n */\nfunction handle_kernel_cleanup(event, handle) {\n delete PyViz.comms[\"hv-extension-comm\"];\n window.PyViz.plot_index = {}\n}\n\n/**\n * Handle update_display_data messages\n */\nfunction handle_update_output(event, handle) {\n handle_clear_output(event, {cell: {output_area: handle.output_area}})\n handle_add_output(event, handle)\n}\n\nfunction register_renderer(events, OutputArea) {\n function append_mime(data, metadata, element) {\n // create a DOM node to render to\n var toinsert = this.create_output_subarea(\n metadata,\n CLASS_NAME,\n EXEC_MIME_TYPE\n );\n this.keyboard_manager.register_events(toinsert);\n // Render to node\n var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n render(props, toinsert[0]);\n element.append(toinsert);\n return toinsert\n }\n\n events.on('output_added.OutputArea', handle_add_output);\n events.on('output_updated.OutputArea', handle_update_output);\n events.on('clear_output.CodeCell', handle_clear_output);\n events.on('delete.Cell', handle_clear_output);\n events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n\n OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n safe: true,\n index: 0\n });\n}\n\nif (window.Jupyter !== undefined) {\n try {\n var events = require('base/js/events');\n var OutputArea = require('notebook/js/outputarea').OutputArea;\n if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n register_renderer(events, OutputArea);\n }\n } catch(err) {\n }\n}\n"
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "import json\n",
+ "import logging\n",
+ "import warnings\n",
+ "\n",
+ "import dask.distributed as dd\n",
+ "import geopandas as gpd\n",
+ "from pystac_client import Client\n",
+ "import shapely\n",
+ "\n",
+ "# For visualization\n",
+ "import holoviews as hv\n",
+ "import hvplot.xarray\n",
+ "\n",
+ "hv.extension(\"bokeh\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "id": "bkHC",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "/jupyter/semantique/semantique/__init__.py:12: UserWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html. The pkg_resources package is slated for removal as early as 2025-11-30. Refrain from using this package or pin to Setuptools<81.\n",
+ " import pkg_resources\n"
+ ]
+ }
+ ],
+ "source": [
+ "import semantique as sq"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "id": "lEQa",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Configure logging\n",
+ "logging.basicConfig(\n",
+ " level=logging.WARNING,\n",
+ " format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',\n",
+ " force=True # This forces reconfiguration if already set\n",
+ ")\n",
+ "\n",
+ "logging.getLogger(\"semantique\").setLevel(logging.DEBUG)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "id": "Xref",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "http://127.0.0.1:8787/status\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Start Dask LocalCluster and dashboard\n",
+ "# By default, dashboard will be available at http://localhost:8787\n",
+ "dask_client = dd.Client(n_workers=6, threads_per_worker=1)\n",
+ "print(dask_client.dashboard_link)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "id": "BYtC",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Found: 13 items.\n"
+ ]
+ },
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " geometry | \n",
+ " created | \n",
+ " platform | \n",
+ " constellation | \n",
+ " instruments | \n",
+ " eo:cloud_cover | \n",
+ " proj:epsg | \n",
+ " mgrs:utm_zone | \n",
+ " mgrs:latitude_band | \n",
+ " mgrs:grid_square | \n",
+ " ... | \n",
+ " s2:datastrip_id | \n",
+ " s2:granule_id | \n",
+ " s2:reflectance_conversion_factor | \n",
+ " datetime | \n",
+ " s2:sequence | \n",
+ " earthsearch:s3_path | \n",
+ " earthsearch:payload_id | \n",
+ " earthsearch:boa_offset_applied | \n",
+ " processing:software | \n",
+ " updated | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " | 0 | \n",
+ " POLYGON ((15.99815 48.74868, 16.49326 48.74333... | \n",
+ " 2023-07-01T18:36:05.617Z | \n",
+ " sentinel-2b | \n",
+ " sentinel-2 | \n",
+ " [msi] | \n",
+ " 99.629170 | \n",
+ " 32633 | \n",
+ " 33 | \n",
+ " U | \n",
+ " WP | \n",
+ " ... | \n",
+ " S2B_OPER_MSI_L2A_DS_2BPS_20230701T113400_S2023... | \n",
+ " S2B_OPER_MSI_L2A_TL_2BPS_20230701T113400_A0329... | \n",
+ " 0.967644 | \n",
+ " 2023-07-01T09:57:27.326000Z | \n",
+ " 0 | \n",
+ " s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... | \n",
+ " roda-sentinel2/workflow-sentinel2-to-stac/5aec... | \n",
+ " True | \n",
+ " {'sentinel2-to-stac': '0.1.0'} | \n",
+ " 2023-07-01T18:36:05.617Z | \n",
+ "
\n",
+ " \n",
+ " | 1 | \n",
+ " POLYGON ((14.99974 48.75300, 16.49326 48.74333... | \n",
+ " 2023-06-29T23:46:20.169Z | \n",
+ " sentinel-2a | \n",
+ " sentinel-2 | \n",
+ " [msi] | \n",
+ " 6.525128 | \n",
+ " 32633 | \n",
+ " 33 | \n",
+ " U | \n",
+ " WP | \n",
+ " ... | \n",
+ " S2A_OPER_MSI_L2A_DS_2APS_20230629T175255_S2023... | \n",
+ " S2A_OPER_MSI_L2A_TL_2APS_20230629T175255_A0418... | \n",
+ " 0.967807 | \n",
+ " 2023-06-29T10:07:23.808000Z | \n",
+ " 0 | \n",
+ " s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... | \n",
+ " roda-sentinel2/workflow-sentinel2-to-stac/d626... | \n",
+ " True | \n",
+ " {'sentinel2-to-stac': '0.1.0'} | \n",
+ " 2023-06-29T23:46:20.169Z | \n",
+ "
\n",
+ " \n",
+ " | 2 | \n",
+ " POLYGON ((15.99666 48.74869, 16.49326 48.74333... | \n",
+ " 2023-06-26T17:20:57.655Z | \n",
+ " sentinel-2a | \n",
+ " sentinel-2 | \n",
+ " [msi] | \n",
+ " 11.215745 | \n",
+ " 32633 | \n",
+ " 33 | \n",
+ " U | \n",
+ " WP | \n",
+ " ... | \n",
+ " S2A_OPER_MSI_L2A_DS_2APS_20230626T140000_S2023... | \n",
+ " S2A_OPER_MSI_L2A_TL_2APS_20230626T140000_A0418... | \n",
+ " 0.968124 | \n",
+ " 2023-06-26T09:57:25.568000Z | \n",
+ " 0 | \n",
+ " s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... | \n",
+ " roda-sentinel2/workflow-sentinel2-to-stac/2195... | \n",
+ " True | \n",
+ " {'sentinel2-to-stac': '0.1.0'} | \n",
+ " 2023-06-26T17:20:57.655Z | \n",
+ "
\n",
+ " \n",
+ " | 3 | \n",
+ " POLYGON ((14.99974 48.75300, 16.49326 48.74333... | \n",
+ " 2023-06-24T18:11:36.093Z | \n",
+ " sentinel-2b | \n",
+ " sentinel-2 | \n",
+ " [msi] | \n",
+ " 99.978906 | \n",
+ " 32633 | \n",
+ " 33 | \n",
+ " U | \n",
+ " WP | \n",
+ " ... | \n",
+ " S2B_OPER_MSI_L2A_DS_2BPS_20230624T113131_S2023... | \n",
+ " S2B_OPER_MSI_L2A_TL_2BPS_20230624T113131_A0328... | \n",
+ " 0.968380 | \n",
+ " 2023-06-24T10:07:23.800000Z | \n",
+ " 0 | \n",
+ " s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... | \n",
+ " roda-sentinel2/workflow-sentinel2-to-stac/07e0... | \n",
+ " True | \n",
+ " {'sentinel2-to-stac': '0.1.0'} | \n",
+ " 2023-06-24T18:11:36.093Z | \n",
+ "
\n",
+ " \n",
+ " | 4 | \n",
+ " POLYGON ((16.00142 48.74865, 16.49326 48.74333... | \n",
+ " 2023-06-21T18:19:44.109Z | \n",
+ " sentinel-2b | \n",
+ " sentinel-2 | \n",
+ " [msi] | \n",
+ " 30.004331 | \n",
+ " 32633 | \n",
+ " 33 | \n",
+ " U | \n",
+ " WP | \n",
+ " ... | \n",
+ " S2B_OPER_MSI_L2A_DS_2BPS_20230621T113044_S2023... | \n",
+ " S2B_OPER_MSI_L2A_TL_2BPS_20230621T113044_A0328... | \n",
+ " 0.968835 | \n",
+ " 2023-06-21T09:57:26.119000Z | \n",
+ " 0 | \n",
+ " s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... | \n",
+ " roda-sentinel2/workflow-sentinel2-to-stac/b647... | \n",
+ " True | \n",
+ " {'sentinel2-to-stac': '0.1.0'} | \n",
+ " 2023-06-21T18:19:44.109Z | \n",
+ "
\n",
+ " \n",
+ " | 5 | \n",
+ " POLYGON ((14.99974 48.75300, 16.49326 48.74333... | \n",
+ " 2023-06-19T19:28:34.163Z | \n",
+ " sentinel-2a | \n",
+ " sentinel-2 | \n",
+ " [msi] | \n",
+ " 37.662557 | \n",
+ " 32633 | \n",
+ " 33 | \n",
+ " U | \n",
+ " WP | \n",
+ " ... | \n",
+ " S2A_OPER_MSI_L2A_DS_2APS_20230619T160956_S2023... | \n",
+ " S2A_OPER_MSI_L2A_TL_2APS_20230619T160956_A0417... | \n",
+ " 0.969182 | \n",
+ " 2023-06-19T10:07:23.655000Z | \n",
+ " 0 | \n",
+ " s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... | \n",
+ " roda-sentinel2/workflow-sentinel2-to-stac/e1eb... | \n",
+ " True | \n",
+ " {'sentinel2-to-stac': '0.1.0'} | \n",
+ " 2023-06-19T19:28:34.163Z | \n",
+ "
\n",
+ " \n",
+ " | 6 | \n",
+ " POLYGON ((15.99203 48.74873, 16.49326 48.74333... | \n",
+ " 2023-06-16T19:59:17.938Z | \n",
+ " sentinel-2a | \n",
+ " sentinel-2 | \n",
+ " [msi] | \n",
+ " 16.089541 | \n",
+ " 32633 | \n",
+ " 33 | \n",
+ " U | \n",
+ " WP | \n",
+ " ... | \n",
+ " S2A_OPER_MSI_L2A_DS_2APS_20230616T153853_S2023... | \n",
+ " S2A_OPER_MSI_L2A_TL_2APS_20230616T153853_A0416... | \n",
+ " 0.969772 | \n",
+ " 2023-06-16T09:57:26.329000Z | \n",
+ " 0 | \n",
+ " s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... | \n",
+ " roda-sentinel2/workflow-sentinel2-to-stac/667b... | \n",
+ " True | \n",
+ " {'sentinel2-to-stac': '0.1.0'} | \n",
+ " 2023-06-16T19:59:17.938Z | \n",
+ "
\n",
+ " \n",
+ " | 7 | \n",
+ " POLYGON ((14.99974 48.75300, 16.49326 48.74333... | \n",
+ " 2023-06-14T18:12:47.059Z | \n",
+ " sentinel-2b | \n",
+ " sentinel-2 | \n",
+ " [msi] | \n",
+ " 33.078259 | \n",
+ " 32633 | \n",
+ " 33 | \n",
+ " U | \n",
+ " WP | \n",
+ " ... | \n",
+ " S2B_OPER_MSI_L2A_DS_2BPS_20230614T113003_S2023... | \n",
+ " S2B_OPER_MSI_L2A_TL_2BPS_20230614T113003_A0327... | \n",
+ " 0.970208 | \n",
+ " 2023-06-14T10:07:24.612000Z | \n",
+ " 0 | \n",
+ " s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... | \n",
+ " roda-sentinel2/workflow-sentinel2-to-stac/1ad4... | \n",
+ " True | \n",
+ " {'sentinel2-to-stac': '0.1.0'} | \n",
+ " 2023-06-14T18:12:47.059Z | \n",
+ "
\n",
+ " \n",
+ " | 8 | \n",
+ " POLYGON ((15.99516 48.74871, 16.49326 48.74333... | \n",
+ " 2023-06-11T18:15:25.755Z | \n",
+ " sentinel-2b | \n",
+ " sentinel-2 | \n",
+ " [msi] | \n",
+ " 99.740505 | \n",
+ " 32633 | \n",
+ " 33 | \n",
+ " U | \n",
+ " WP | \n",
+ " ... | \n",
+ " S2B_OPER_MSI_L2A_DS_2BPS_20230611T113435_S2023... | \n",
+ " S2B_OPER_MSI_L2A_TL_2BPS_20230611T113435_A0327... | \n",
+ " 0.970929 | \n",
+ " 2023-06-11T09:57:27.127000Z | \n",
+ " 0 | \n",
+ " s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... | \n",
+ " roda-sentinel2/workflow-sentinel2-to-stac/3e9b... | \n",
+ " True | \n",
+ " {'sentinel2-to-stac': '0.1.0'} | \n",
+ " 2023-06-11T18:15:25.755Z | \n",
+ "
\n",
+ " \n",
+ " | 9 | \n",
+ " POLYGON ((14.99974 48.75300, 16.49326 48.74333... | \n",
+ " 2023-06-09T20:36:11.201Z | \n",
+ " sentinel-2a | \n",
+ " sentinel-2 | \n",
+ " [msi] | \n",
+ " 97.923017 | \n",
+ " 32633 | \n",
+ " 33 | \n",
+ " U | \n",
+ " WP | \n",
+ " ... | \n",
+ " S2A_OPER_MSI_L2A_DS_2APS_20230609T161103_S2023... | \n",
+ " S2A_OPER_MSI_L2A_TL_2APS_20230609T161103_A0415... | \n",
+ " 0.971450 | \n",
+ " 2023-06-09T10:07:23.354000Z | \n",
+ " 0 | \n",
+ " s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... | \n",
+ " roda-sentinel2/workflow-sentinel2-to-stac/ea4c... | \n",
+ " True | \n",
+ " {'sentinel2-to-stac': '0.1.0'} | \n",
+ " 2023-06-09T20:36:11.201Z | \n",
+ "
\n",
+ " \n",
+ " | 10 | \n",
+ " POLYGON ((15.98986 48.74875, 16.49326 48.74333... | \n",
+ " 2023-06-06T18:35:46.172Z | \n",
+ " sentinel-2a | \n",
+ " sentinel-2 | \n",
+ " [msi] | \n",
+ " 93.600863 | \n",
+ " 32633 | \n",
+ " 33 | \n",
+ " U | \n",
+ " WP | \n",
+ " ... | \n",
+ " S2A_OPER_MSI_L2A_DS_2APS_20230606T140554_S2023... | \n",
+ " S2A_OPER_MSI_L2A_TL_2APS_20230606T140554_A0415... | \n",
+ " 0.972298 | \n",
+ " 2023-06-06T09:57:25.707000Z | \n",
+ " 0 | \n",
+ " s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... | \n",
+ " roda-sentinel2/workflow-sentinel2-to-stac/30da... | \n",
+ " True | \n",
+ " {'sentinel2-to-stac': '0.1.0'} | \n",
+ " 2023-06-06T18:35:46.172Z | \n",
+ "
\n",
+ " \n",
+ " | 11 | \n",
+ " POLYGON ((14.99974 48.75300, 16.49326 48.74333... | \n",
+ " 2023-06-04T19:57:33.044Z | \n",
+ " sentinel-2b | \n",
+ " sentinel-2 | \n",
+ " [msi] | \n",
+ " 70.831150 | \n",
+ " 32633 | \n",
+ " 33 | \n",
+ " U | \n",
+ " WP | \n",
+ " ... | \n",
+ " S2B_OPER_MSI_L2A_DS_2BPS_20230604T113608_S2023... | \n",
+ " S2B_OPER_MSI_L2A_TL_2BPS_20230604T113608_A0326... | \n",
+ " 0.972902 | \n",
+ " 2023-06-04T10:07:23.869000Z | \n",
+ " 0 | \n",
+ " s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... | \n",
+ " roda-sentinel2/workflow-sentinel2-to-stac/6285... | \n",
+ " True | \n",
+ " {'sentinel2-to-stac': '0.1.0'} | \n",
+ " 2023-06-04T19:57:33.044Z | \n",
+ "
\n",
+ " \n",
+ " | 12 | \n",
+ " POLYGON ((15.99951 48.74867, 16.49326 48.74333... | \n",
+ " 2023-06-01T20:19:14.459Z | \n",
+ " sentinel-2b | \n",
+ " sentinel-2 | \n",
+ " [msi] | \n",
+ " 2.554968 | \n",
+ " 32633 | \n",
+ " 33 | \n",
+ " U | \n",
+ " WP | \n",
+ " ... | \n",
+ " S2B_OPER_MSI_L2A_DS_2BPS_20230601T143134_S2023... | \n",
+ " S2B_OPER_MSI_L2A_TL_2BPS_20230601T143134_A0325... | \n",
+ " 0.973873 | \n",
+ " 2023-06-01T09:57:25.756000Z | \n",
+ " 0 | \n",
+ " s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... | \n",
+ " roda-sentinel2/workflow-sentinel2-to-stac/899b... | \n",
+ " True | \n",
+ " {'sentinel2-to-stac': '0.1.0'} | \n",
+ " 2023-06-01T20:19:14.459Z | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
13 rows × 42 columns
\n",
+ "
"
+ ],
+ "text/plain": [
+ " geometry \\\n",
+ "0 POLYGON ((15.99815 48.74868, 16.49326 48.74333... \n",
+ "1 POLYGON ((14.99974 48.75300, 16.49326 48.74333... \n",
+ "2 POLYGON ((15.99666 48.74869, 16.49326 48.74333... \n",
+ "3 POLYGON ((14.99974 48.75300, 16.49326 48.74333... \n",
+ "4 POLYGON ((16.00142 48.74865, 16.49326 48.74333... \n",
+ "5 POLYGON ((14.99974 48.75300, 16.49326 48.74333... \n",
+ "6 POLYGON ((15.99203 48.74873, 16.49326 48.74333... \n",
+ "7 POLYGON ((14.99974 48.75300, 16.49326 48.74333... \n",
+ "8 POLYGON ((15.99516 48.74871, 16.49326 48.74333... \n",
+ "9 POLYGON ((14.99974 48.75300, 16.49326 48.74333... \n",
+ "10 POLYGON ((15.98986 48.74875, 16.49326 48.74333... \n",
+ "11 POLYGON ((14.99974 48.75300, 16.49326 48.74333... \n",
+ "12 POLYGON ((15.99951 48.74867, 16.49326 48.74333... \n",
+ "\n",
+ " created platform constellation instruments \\\n",
+ "0 2023-07-01T18:36:05.617Z sentinel-2b sentinel-2 [msi] \n",
+ "1 2023-06-29T23:46:20.169Z sentinel-2a sentinel-2 [msi] \n",
+ "2 2023-06-26T17:20:57.655Z sentinel-2a sentinel-2 [msi] \n",
+ "3 2023-06-24T18:11:36.093Z sentinel-2b sentinel-2 [msi] \n",
+ "4 2023-06-21T18:19:44.109Z sentinel-2b sentinel-2 [msi] \n",
+ "5 2023-06-19T19:28:34.163Z sentinel-2a sentinel-2 [msi] \n",
+ "6 2023-06-16T19:59:17.938Z sentinel-2a sentinel-2 [msi] \n",
+ "7 2023-06-14T18:12:47.059Z sentinel-2b sentinel-2 [msi] \n",
+ "8 2023-06-11T18:15:25.755Z sentinel-2b sentinel-2 [msi] \n",
+ "9 2023-06-09T20:36:11.201Z sentinel-2a sentinel-2 [msi] \n",
+ "10 2023-06-06T18:35:46.172Z sentinel-2a sentinel-2 [msi] \n",
+ "11 2023-06-04T19:57:33.044Z sentinel-2b sentinel-2 [msi] \n",
+ "12 2023-06-01T20:19:14.459Z sentinel-2b sentinel-2 [msi] \n",
+ "\n",
+ " eo:cloud_cover proj:epsg mgrs:utm_zone mgrs:latitude_band \\\n",
+ "0 99.629170 32633 33 U \n",
+ "1 6.525128 32633 33 U \n",
+ "2 11.215745 32633 33 U \n",
+ "3 99.978906 32633 33 U \n",
+ "4 30.004331 32633 33 U \n",
+ "5 37.662557 32633 33 U \n",
+ "6 16.089541 32633 33 U \n",
+ "7 33.078259 32633 33 U \n",
+ "8 99.740505 32633 33 U \n",
+ "9 97.923017 32633 33 U \n",
+ "10 93.600863 32633 33 U \n",
+ "11 70.831150 32633 33 U \n",
+ "12 2.554968 32633 33 U \n",
+ "\n",
+ " mgrs:grid_square ... s2:datastrip_id \\\n",
+ "0 WP ... S2B_OPER_MSI_L2A_DS_2BPS_20230701T113400_S2023... \n",
+ "1 WP ... S2A_OPER_MSI_L2A_DS_2APS_20230629T175255_S2023... \n",
+ "2 WP ... S2A_OPER_MSI_L2A_DS_2APS_20230626T140000_S2023... \n",
+ "3 WP ... S2B_OPER_MSI_L2A_DS_2BPS_20230624T113131_S2023... \n",
+ "4 WP ... S2B_OPER_MSI_L2A_DS_2BPS_20230621T113044_S2023... \n",
+ "5 WP ... S2A_OPER_MSI_L2A_DS_2APS_20230619T160956_S2023... \n",
+ "6 WP ... S2A_OPER_MSI_L2A_DS_2APS_20230616T153853_S2023... \n",
+ "7 WP ... S2B_OPER_MSI_L2A_DS_2BPS_20230614T113003_S2023... \n",
+ "8 WP ... S2B_OPER_MSI_L2A_DS_2BPS_20230611T113435_S2023... \n",
+ "9 WP ... S2A_OPER_MSI_L2A_DS_2APS_20230609T161103_S2023... \n",
+ "10 WP ... S2A_OPER_MSI_L2A_DS_2APS_20230606T140554_S2023... \n",
+ "11 WP ... S2B_OPER_MSI_L2A_DS_2BPS_20230604T113608_S2023... \n",
+ "12 WP ... S2B_OPER_MSI_L2A_DS_2BPS_20230601T143134_S2023... \n",
+ "\n",
+ " s2:granule_id \\\n",
+ "0 S2B_OPER_MSI_L2A_TL_2BPS_20230701T113400_A0329... \n",
+ "1 S2A_OPER_MSI_L2A_TL_2APS_20230629T175255_A0418... \n",
+ "2 S2A_OPER_MSI_L2A_TL_2APS_20230626T140000_A0418... \n",
+ "3 S2B_OPER_MSI_L2A_TL_2BPS_20230624T113131_A0328... \n",
+ "4 S2B_OPER_MSI_L2A_TL_2BPS_20230621T113044_A0328... \n",
+ "5 S2A_OPER_MSI_L2A_TL_2APS_20230619T160956_A0417... \n",
+ "6 S2A_OPER_MSI_L2A_TL_2APS_20230616T153853_A0416... \n",
+ "7 S2B_OPER_MSI_L2A_TL_2BPS_20230614T113003_A0327... \n",
+ "8 S2B_OPER_MSI_L2A_TL_2BPS_20230611T113435_A0327... \n",
+ "9 S2A_OPER_MSI_L2A_TL_2APS_20230609T161103_A0415... \n",
+ "10 S2A_OPER_MSI_L2A_TL_2APS_20230606T140554_A0415... \n",
+ "11 S2B_OPER_MSI_L2A_TL_2BPS_20230604T113608_A0326... \n",
+ "12 S2B_OPER_MSI_L2A_TL_2BPS_20230601T143134_A0325... \n",
+ "\n",
+ " s2:reflectance_conversion_factor datetime \\\n",
+ "0 0.967644 2023-07-01T09:57:27.326000Z \n",
+ "1 0.967807 2023-06-29T10:07:23.808000Z \n",
+ "2 0.968124 2023-06-26T09:57:25.568000Z \n",
+ "3 0.968380 2023-06-24T10:07:23.800000Z \n",
+ "4 0.968835 2023-06-21T09:57:26.119000Z \n",
+ "5 0.969182 2023-06-19T10:07:23.655000Z \n",
+ "6 0.969772 2023-06-16T09:57:26.329000Z \n",
+ "7 0.970208 2023-06-14T10:07:24.612000Z \n",
+ "8 0.970929 2023-06-11T09:57:27.127000Z \n",
+ "9 0.971450 2023-06-09T10:07:23.354000Z \n",
+ "10 0.972298 2023-06-06T09:57:25.707000Z \n",
+ "11 0.972902 2023-06-04T10:07:23.869000Z \n",
+ "12 0.973873 2023-06-01T09:57:25.756000Z \n",
+ "\n",
+ " s2:sequence earthsearch:s3_path \\\n",
+ "0 0 s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... \n",
+ "1 0 s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... \n",
+ "2 0 s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... \n",
+ "3 0 s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... \n",
+ "4 0 s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... \n",
+ "5 0 s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... \n",
+ "6 0 s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... \n",
+ "7 0 s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... \n",
+ "8 0 s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... \n",
+ "9 0 s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... \n",
+ "10 0 s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... \n",
+ "11 0 s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... \n",
+ "12 0 s3://sentinel-cogs/sentinel-s2-l2a-cogs/33/U/W... \n",
+ "\n",
+ " earthsearch:payload_id \\\n",
+ "0 roda-sentinel2/workflow-sentinel2-to-stac/5aec... \n",
+ "1 roda-sentinel2/workflow-sentinel2-to-stac/d626... \n",
+ "2 roda-sentinel2/workflow-sentinel2-to-stac/2195... \n",
+ "3 roda-sentinel2/workflow-sentinel2-to-stac/07e0... \n",
+ "4 roda-sentinel2/workflow-sentinel2-to-stac/b647... \n",
+ "5 roda-sentinel2/workflow-sentinel2-to-stac/e1eb... \n",
+ "6 roda-sentinel2/workflow-sentinel2-to-stac/667b... \n",
+ "7 roda-sentinel2/workflow-sentinel2-to-stac/1ad4... \n",
+ "8 roda-sentinel2/workflow-sentinel2-to-stac/3e9b... \n",
+ "9 roda-sentinel2/workflow-sentinel2-to-stac/ea4c... \n",
+ "10 roda-sentinel2/workflow-sentinel2-to-stac/30da... \n",
+ "11 roda-sentinel2/workflow-sentinel2-to-stac/6285... \n",
+ "12 roda-sentinel2/workflow-sentinel2-to-stac/899b... \n",
+ "\n",
+ " earthsearch:boa_offset_applied processing:software \\\n",
+ "0 True {'sentinel2-to-stac': '0.1.0'} \n",
+ "1 True {'sentinel2-to-stac': '0.1.0'} \n",
+ "2 True {'sentinel2-to-stac': '0.1.0'} \n",
+ "3 True {'sentinel2-to-stac': '0.1.0'} \n",
+ "4 True {'sentinel2-to-stac': '0.1.0'} \n",
+ "5 True {'sentinel2-to-stac': '0.1.0'} \n",
+ "6 True {'sentinel2-to-stac': '0.1.0'} \n",
+ "7 True {'sentinel2-to-stac': '0.1.0'} \n",
+ "8 True {'sentinel2-to-stac': '0.1.0'} \n",
+ "9 True {'sentinel2-to-stac': '0.1.0'} \n",
+ "10 True {'sentinel2-to-stac': '0.1.0'} \n",
+ "11 True {'sentinel2-to-stac': '0.1.0'} \n",
+ "12 True {'sentinel2-to-stac': '0.1.0'} \n",
+ "\n",
+ " updated \n",
+ "0 2023-07-01T18:36:05.617Z \n",
+ "1 2023-06-29T23:46:20.169Z \n",
+ "2 2023-06-26T17:20:57.655Z \n",
+ "3 2023-06-24T18:11:36.093Z \n",
+ "4 2023-06-21T18:19:44.109Z \n",
+ "5 2023-06-19T19:28:34.163Z \n",
+ "6 2023-06-16T19:59:17.938Z \n",
+ "7 2023-06-14T18:12:47.059Z \n",
+ "8 2023-06-11T18:15:25.755Z \n",
+ "9 2023-06-09T20:36:11.201Z \n",
+ "10 2023-06-06T18:35:46.172Z \n",
+ "11 2023-06-04T19:57:33.044Z \n",
+ "12 2023-06-01T20:19:14.459Z \n",
+ "\n",
+ "[13 rows x 42 columns]"
+ ]
+ },
+ "execution_count": 5,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# Define area of interest and time period\n",
+ "\n",
+ "# Stockerau, Austria (small)\n",
+ "aoi = shapely.from_wkt(\"POLYGON ((16.17 48.33, 16.25 48.33, 16.25 48.41, 16.17 48.41, 16.17 48.33))\")\n",
+ "\n",
+ "# West of Vienna, 33UWP\n",
+ "# aoi = shapely.from_wkt(\"POLYGON ((15.00 48.75, 16.49 48.74, 16.46 47.75, 15.00 47.76, 15.00 48.75))\")\n",
+ "\n",
+ "aoi_gdf = gpd.GeoDataFrame(index=[0], crs='epsg:4326', geometry=[aoi])\n",
+ "t_range = [\"2023-06-01\", \"2023-07-01\"] # 1 month\n",
+ "# t_range = [\"2023-03-01\", \"2023-07-01\"] # 4 months\n",
+ "epsg = 32633 # For MGRS-33UWP\n",
+ "\n",
+ "# STAC-based metadata retrieval\n",
+ "catalog = Client.open(\"https://earth-search.aws.element84.com/v1\")\n",
+ "query = catalog.search(\n",
+ " collections=\"sentinel-2-l2a\", datetime=t_range, limit=100, intersects=aoi\n",
+ ")\n",
+ "item_coll = query.item_collection()\n",
+ "\n",
+ "# list results - part I\n",
+ "items = list(query.items())\n",
+ "print(f\"Found: {len(items):d} items.\")\n",
+ "\n",
+ "# list results - part II\n",
+ "stac_json = query.item_collection_as_dict()\n",
+ "gdf = gpd.GeoDataFrame.from_features(stac_json, \"epsg:4326\")\n",
+ "gdf"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "id": "RGSE",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "2025-10-15 17:55:53,239 - semantique.processor.utils - INFO - Lazy dask computation enabled.\n"
+ ]
+ }
+ ],
+ "source": [
+ "# define datacube\n",
+ "with open(\"files/layout_stac.json\", \"r\") as file:\n",
+ " dc = sq.datacube.STACCube(\n",
+ " json.load(file),\n",
+ " src=item_coll,\n",
+ " group_by_solar_day=False, # Pending Dask support\n",
+ " trim=False, # Pending Dask support\n",
+ " dask_lazy=True,\n",
+ " dask_chunk_size=2048\n",
+ " )\n",
+ "\n",
+ "# use same extents as for STAC query to set up the context for the datacube\n",
+ "space = sq.SpatialExtent(aoi_gdf.to_crs(epsg))\n",
+ "time = sq.TemporalExtent(*t_range)\n",
+ "resolution=20"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "id": "Kclp",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "mapping = sq.mapping.Semantique()\n",
+ "mapping[\"entity\"] = {}\n",
+ "mapping[\"entity\"][\"cloud\"] = {\n",
+ " \"color\": sq.appearance(\"scl\").evaluate(\"in\", [8, 9, 10])\n",
+ "}\n",
+ "mapping[\"entity\"][\"snow\"] = {\n",
+ " \"color\": sq.appearance(\"scl\").evaluate(\"in\", [11])\n",
+ "}"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "id": "emfo",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "2025-10-15 17:55:53,267 - semantique.processor.core - INFO - Started parsing the semantic query\n",
+ "2025-10-15 17:55:53,325 - semantique.processor.core - DEBUG - Parsed the spatio-temporal extent:\n",
+ " Size: 2MB\n",
+ "array([[[nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " ...,\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan]],\n",
+ "\n",
+ " [[nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " ...,\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan]]])\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 16B 2023-06-01 2023-07-01\n",
+ " * y (y) float64 4kB 5.363e+06 5.363e+06 ... 5.354e+06 5.354e+06\n",
+ " * x (x) float64 2kB 5.866e+05 5.866e+05 ... 5.926e+05 5.926e+05\n",
+ " spatial_ref int64 8B 0\n",
+ " spatial_feats (y, x) float64 1MB nan nan nan nan nan ... nan nan nan nan\n",
+ " temporal_ref int64 8B 0\n",
+ "Attributes:\n",
+ " name: index\n",
+ " long_name: index\n",
+ " _FillValue: nan\n",
+ " value_type: nominal\n",
+ " value_labels: {1: 'feature_1'}\n",
+ "2025-10-15 17:55:53,326 - semantique.processor.core - INFO - Finished parsing the semantic query\n",
+ "2025-10-15 17:55:53,329 - semantique.processor.core - INFO - Started optimizing the semantic query\n",
+ "2025-10-15 17:55:53,330 - semantique.processor.core - INFO - Finished optimizing the semantic query\n",
+ "2025-10-15 17:55:53,331 - semantique.processor.core - INFO - Started executing the semantic query\n",
+ "2025-10-15 17:55:53,332 - semantique.processor.core - INFO - Started executing result: 'cloud_snow_share'\n",
+ "2025-10-15 17:55:53,333 - semantique.processor.core - DEBUG - Constructing collection of arrays\n",
+ "2025-10-15 17:55:53,334 - semantique.processor.core - DEBUG - Translating concept ('entity', 'cloud')\n",
+ "2025-10-15 17:55:53,341 - semantique.processor.core - DEBUG - Translated concept ('entity', 'cloud'):\n",
+ " Size: 2MB\n",
+ "array([[[nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " ...,\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan]],\n",
+ "\n",
+ " [[nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " ...,\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan]]])\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 16B 2023-06-01 2023-07-01\n",
+ " * y (y) float64 4kB 5.363e+06 5.363e+06 ... 5.354e+06 5.354e+06\n",
+ " * x (x) float64 2kB 5.866e+05 5.866e+05 ... 5.926e+05 5.926e+05\n",
+ " spatial_ref int64 8B 0\n",
+ " spatial_feats (y, x) float64 1MB nan nan nan nan nan ... nan nan nan nan\n",
+ " temporal_ref int64 8B 0\n",
+ "Attributes:\n",
+ " name: index\n",
+ " long_name: index\n",
+ " _FillValue: nan\n",
+ " value_type: nominal\n",
+ " value_labels: {1: 'feature_1'}\n",
+ "2025-10-15 17:55:53,342 - semantique.processor.core - DEBUG - Translating concept ('entity', 'snow')\n",
+ "2025-10-15 17:55:53,350 - semantique.processor.core - DEBUG - Translated concept ('entity', 'snow'):\n",
+ " Size: 2MB\n",
+ "array([[[nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " ...,\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan]],\n",
+ "\n",
+ " [[nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " ...,\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan]]])\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 16B 2023-06-01 2023-07-01\n",
+ " * y (y) float64 4kB 5.363e+06 5.363e+06 ... 5.354e+06 5.354e+06\n",
+ " * x (x) float64 2kB 5.866e+05 5.866e+05 ... 5.926e+05 5.926e+05\n",
+ " spatial_ref int64 8B 0\n",
+ " spatial_feats (y, x) float64 1MB nan nan nan nan nan ... nan nan nan nan\n",
+ " temporal_ref int64 8B 0\n",
+ "Attributes:\n",
+ " name: index\n",
+ " long_name: index\n",
+ " _FillValue: nan\n",
+ " value_type: nominal\n",
+ " value_labels: {1: 'feature_1'}\n",
+ "2025-10-15 17:55:53,351 - semantique.processor.core - DEBUG - Constructed collection of:\n",
+ "['cloud', 'snow']\n",
+ "2025-10-15 17:55:53,353 - semantique.processor.core - INFO - Finished executing result: 'cloud_snow_share'\n",
+ "2025-10-15 17:55:53,355 - semantique.processor.core - INFO - Finished executing the semantic query\n",
+ "2025-10-15 17:55:53,362 - semantique.processor.core - DEBUG - Responding:\n",
+ "{'cloud_snow_share': [ Size: 2MB\n",
+ "array([[[nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " ...,\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan]],\n",
+ "\n",
+ " [[nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " ...,\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan]]])\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 16B 2023-06-01 2023-07-01\n",
+ " * y (y) float64 4kB 5.363e+06 5.363e+06 ... 5.354e+06 5.354e+06\n",
+ " * x (x) float64 2kB 5.866e+05 5.866e+05 ... 5.926e+05 5.926e+05\n",
+ " spatial_ref int64 8B 0\n",
+ " spatial_feats (y, x) float64 1MB nan nan nan nan nan ... nan nan nan nan\n",
+ " temporal_ref int64 8B 0\n",
+ "Attributes:\n",
+ " name: index\n",
+ " long_name: index\n",
+ " _FillValue: nan\n",
+ " value_type: nominal\n",
+ " value_labels: {1: 'feature_1'}, Size: 2MB\n",
+ "array([[[nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " ...,\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan]],\n",
+ "\n",
+ " [[nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " ...,\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan]]])\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 16B 2023-06-01 2023-07-01\n",
+ " * y (y) float64 4kB 5.363e+06 5.363e+06 ... 5.354e+06 5.354e+06\n",
+ " * x (x) float64 2kB 5.866e+05 5.866e+05 ... 5.926e+05 5.926e+05\n",
+ " spatial_ref int64 8B 0\n",
+ " spatial_feats (y, x) float64 1MB nan nan nan nan nan ... nan nan nan nan\n",
+ " temporal_ref int64 8B 0\n",
+ "Attributes:\n",
+ " name: index\n",
+ " long_name: index\n",
+ " _FillValue: nan\n",
+ " value_type: nominal\n",
+ " value_labels: {1: 'feature_1'}]}\n",
+ "2025-10-15 17:55:53,363 - semantique.processor.core - INFO - Started parsing the semantic query\n",
+ "2025-10-15 17:55:53,405 - semantique.processor.core - DEBUG - Parsed the spatio-temporal extent:\n",
+ " Size: 2MB\n",
+ "array([[[nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " ...,\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan]],\n",
+ "\n",
+ " [[nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " ...,\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan],\n",
+ " [nan, nan, nan, ..., nan, nan, nan]]])\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 16B 2023-06-01 2023-07-01\n",
+ " * y (y) float64 4kB 5.363e+06 5.363e+06 ... 5.354e+06 5.354e+06\n",
+ " * x (x) float64 2kB 5.866e+05 5.866e+05 ... 5.926e+05 5.926e+05\n",
+ " spatial_ref int64 8B 0\n",
+ " spatial_feats (y, x) float64 1MB nan nan nan nan nan ... nan nan nan nan\n",
+ " temporal_ref int64 8B 0\n",
+ "Attributes:\n",
+ " name: index\n",
+ " long_name: index\n",
+ " _FillValue: nan\n",
+ " value_type: nominal\n",
+ " value_labels: {1: 'feature_1'}\n",
+ "2025-10-15 17:55:53,406 - semantique.processor.core - INFO - Finished parsing the semantic query\n",
+ "2025-10-15 17:55:53,407 - semantique.processor.utils - INFO - Lazy dask computation enabled.\n",
+ "2025-10-15 17:55:53,409 - semantique.processor.core - INFO - Started optimizing the semantic query\n",
+ "2025-10-15 17:55:53,410 - semantique.processor.core - INFO - Finished optimizing the semantic query\n",
+ "2025-10-15 17:55:53,411 - semantique.processor.core - INFO - Started executing the semantic query\n",
+ "2025-10-15 17:55:53,413 - semantique.processor.core - INFO - Started executing result: 'cloud_snow_share'\n",
+ "2025-10-15 17:55:53,414 - semantique.processor.core - DEBUG - Constructing collection of arrays\n",
+ "2025-10-15 17:55:53,415 - semantique.processor.core - DEBUG - Translating concept ('entity', 'cloud')\n",
+ "2025-10-15 17:55:53,416 - semantique.processor.core - DEBUG - Retrieving layer ('appearance', 'scl')\n",
+ "2025-10-15 17:55:53,474 - semantique.processor.core - DEBUG - Retrieved layer ('appearance', 'scl'):\n",
+ " Size: 7MB\n",
+ "dask.array\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 96B 2023-06-01T09:57:25.756000 ... 2...\n",
+ " * y (y) float64 4kB 5.363e+06 5.363e+06 ... 5.354e+06 5.354e+06\n",
+ " * x (x) float64 2kB 5.866e+05 5.866e+05 ... 5.926e+05 5.926e+05\n",
+ " temporal_ref int64 8B 0\n",
+ " spatial_feats (y, x) float64 1MB nan nan nan nan nan ... nan nan nan nan\n",
+ "Attributes:\n",
+ " spec: RasterSpec(epsg=32633, bounds=(586580.0, 5353640.0, 592640...\n",
+ " crs: epsg:32633\n",
+ " transform: | 20.00, 0.00, 586580.00|\\n| 0.00,-20.00, 5362620.00|\\n| 0...\n",
+ " resolution: 20.0\n",
+ " value_type: ordinal\n",
+ " value_labels: {0: 'mask', 1: 'saturated', 2: 'dark', 3: 'shadow', 4: 've...\n",
+ "2025-10-15 17:55:53,476 - semantique.processor.core - DEBUG - Cache updated\n",
+ "2025-10-15 17:55:53,476 - semantique.processor.core - DEBUG - Sequence of layers: [('appearance', 'scl')]\n",
+ "2025-10-15 17:55:53,477 - semantique.processor.core - DEBUG - Currently cached layers: ['appearance_scl']\n",
+ "2025-10-15 17:55:53,481 - semantique.processor.utils - DEBUG - Applying .f at 0x7fe0606ff380> with args:\n",
+ "( Size: 7MB\n",
+ "dask.array\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 96B 2023-06-01T09:57:25.756000 ... 2...\n",
+ " * y (y) float64 4kB 5.363e+06 5.363e+06 ... 5.354e+06 5.354e+06\n",
+ " * x (x) float64 2kB 5.866e+05 5.866e+05 ... 5.926e+05 5.926e+05\n",
+ " temporal_ref int64 8B 0\n",
+ " spatial_feats (y, x) float64 1MB nan nan nan nan nan ... nan nan nan nan\n",
+ "Attributes:\n",
+ " spec: RasterSpec(epsg=32633, bounds=(586580.0, 5353640.0, 592640...\n",
+ " crs: epsg:32633\n",
+ " transform: | 20.00, 0.00, 586580.00|\\n| 0.00,-20.00, 5362620.00|\\n| 0...\n",
+ " resolution: 20.0\n",
+ " value_type: ordinal\n",
+ " value_labels: {0: 'mask', 1: 'saturated', 2: 'dark', 3: 'shadow', 4: 've...,)\n",
+ "2025-10-15 17:55:53,513 - semantique.processor.core - DEBUG - Applied verb evaluate:\n",
+ " Size: 7MB\n",
+ "dask.array\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 96B 2023-06-01T09:57:25.756000 ... 2...\n",
+ " * y (y) float64 4kB 5.363e+06 5.363e+06 ... 5.354e+06 5.354e+06\n",
+ " * x (x) float64 2kB 5.866e+05 5.866e+05 ... 5.926e+05 5.926e+05\n",
+ " temporal_ref int64 8B 0\n",
+ " spatial_feats (y, x) float64 1MB nan nan nan nan nan ... nan nan nan nan\n",
+ "Attributes:\n",
+ " spec: RasterSpec(epsg=32633, bounds=(586580.0, 5353640.0, 592640.0...\n",
+ " crs: epsg:32633\n",
+ " transform: | 20.00, 0.00, 586580.00|\\n| 0.00,-20.00, 5362620.00|\\n| 0.0...\n",
+ " resolution: 20.0\n",
+ " value_type: binary\n",
+ "2025-10-15 17:55:53,519 - semantique.processor.core - DEBUG - Translated concept ('entity', 'cloud'):\n",
+ " Size: 7MB\n",
+ "dask.array\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 96B 2023-06-01T09:57:25.756000 ... 2...\n",
+ " * y (y) float64 4kB 5.363e+06 5.363e+06 ... 5.354e+06 5.354e+06\n",
+ " * x (x) float64 2kB 5.866e+05 5.866e+05 ... 5.926e+05 5.926e+05\n",
+ " temporal_ref int64 8B 0\n",
+ " spatial_feats (y, x) float64 1MB nan nan nan nan nan ... nan nan nan nan\n",
+ "Attributes:\n",
+ " spec: RasterSpec(epsg=32633, bounds=(586580.0, 5353640.0, 592640.0...\n",
+ " crs: epsg:32633\n",
+ " transform: | 20.00, 0.00, 586580.00|\\n| 0.00,-20.00, 5362620.00|\\n| 0.0...\n",
+ " resolution: 20.0\n",
+ " value_type: binary\n",
+ "2025-10-15 17:55:53,520 - semantique.processor.core - DEBUG - Translating concept ('entity', 'snow')\n",
+ "2025-10-15 17:55:53,521 - semantique.processor.core - DEBUG - Loading layer ('appearance', 'scl') from cache\n",
+ "2025-10-15 17:55:53,526 - semantique.processor.core - DEBUG - Retrieved layer ('appearance', 'scl'):\n",
+ " Size: 7MB\n",
+ "dask.array\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 96B 2023-06-01T09:57:25.756000 ... 2...\n",
+ " * y (y) float64 4kB 5.363e+06 5.363e+06 ... 5.354e+06 5.354e+06\n",
+ " * x (x) float64 2kB 5.866e+05 5.866e+05 ... 5.926e+05 5.926e+05\n",
+ " temporal_ref int64 8B 0\n",
+ " spatial_feats (y, x) float64 1MB nan nan nan nan nan ... nan nan nan nan\n",
+ "Attributes:\n",
+ " spec: RasterSpec(epsg=32633, bounds=(586580.0, 5353640.0, 592640...\n",
+ " crs: epsg:32633\n",
+ " transform: | 20.00, 0.00, 586580.00|\\n| 0.00,-20.00, 5362620.00|\\n| 0...\n",
+ " resolution: 20.0\n",
+ " value_type: ordinal\n",
+ " value_labels: {0: 'mask', 1: 'saturated', 2: 'dark', 3: 'shadow', 4: 've...\n",
+ "2025-10-15 17:55:53,527 - semantique.processor.core - DEBUG - Cache updated\n",
+ "2025-10-15 17:55:53,528 - semantique.processor.core - DEBUG - Sequence of layers: []\n",
+ "2025-10-15 17:55:53,529 - semantique.processor.core - DEBUG - Currently cached layers: []\n",
+ "2025-10-15 17:55:53,535 - semantique.processor.utils - DEBUG - Applying .f at 0x7fe0697f0d60> with args:\n",
+ "( Size: 7MB\n",
+ "dask.array\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 96B 2023-06-01T09:57:25.756000 ... 2...\n",
+ " * y (y) float64 4kB 5.363e+06 5.363e+06 ... 5.354e+06 5.354e+06\n",
+ " * x (x) float64 2kB 5.866e+05 5.866e+05 ... 5.926e+05 5.926e+05\n",
+ " temporal_ref int64 8B 0\n",
+ " spatial_feats (y, x) float64 1MB nan nan nan nan nan ... nan nan nan nan\n",
+ "Attributes:\n",
+ " spec: RasterSpec(epsg=32633, bounds=(586580.0, 5353640.0, 592640...\n",
+ " crs: epsg:32633\n",
+ " transform: | 20.00, 0.00, 586580.00|\\n| 0.00,-20.00, 5362620.00|\\n| 0...\n",
+ " resolution: 20.0\n",
+ " value_type: ordinal\n",
+ " value_labels: {0: 'mask', 1: 'saturated', 2: 'dark', 3: 'shadow', 4: 've...,)\n",
+ "2025-10-15 17:55:53,543 - semantique.processor.core - DEBUG - Applied verb evaluate:\n",
+ " Size: 7MB\n",
+ "dask.array\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 96B 2023-06-01T09:57:25.756000 ... 2...\n",
+ " * y (y) float64 4kB 5.363e+06 5.363e+06 ... 5.354e+06 5.354e+06\n",
+ " * x (x) float64 2kB 5.866e+05 5.866e+05 ... 5.926e+05 5.926e+05\n",
+ " temporal_ref int64 8B 0\n",
+ " spatial_feats (y, x) float64 1MB nan nan nan nan nan ... nan nan nan nan\n",
+ "Attributes:\n",
+ " spec: RasterSpec(epsg=32633, bounds=(586580.0, 5353640.0, 592640.0...\n",
+ " crs: epsg:32633\n",
+ " transform: | 20.00, 0.00, 586580.00|\\n| 0.00,-20.00, 5362620.00|\\n| 0.0...\n",
+ " resolution: 20.0\n",
+ " value_type: binary\n",
+ "2025-10-15 17:55:53,547 - semantique.processor.core - DEBUG - Translated concept ('entity', 'snow'):\n",
+ " Size: 7MB\n",
+ "dask.array\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 96B 2023-06-01T09:57:25.756000 ... 2...\n",
+ " * y (y) float64 4kB 5.363e+06 5.363e+06 ... 5.354e+06 5.354e+06\n",
+ " * x (x) float64 2kB 5.866e+05 5.866e+05 ... 5.926e+05 5.926e+05\n",
+ " temporal_ref int64 8B 0\n",
+ " spatial_feats (y, x) float64 1MB nan nan nan nan nan ... nan nan nan nan\n",
+ "Attributes:\n",
+ " spec: RasterSpec(epsg=32633, bounds=(586580.0, 5353640.0, 592640.0...\n",
+ " crs: epsg:32633\n",
+ " transform: | 20.00, 0.00, 586580.00|\\n| 0.00,-20.00, 5362620.00|\\n| 0.0...\n",
+ " resolution: 20.0\n",
+ " value_type: binary\n",
+ "2025-10-15 17:55:53,548 - semantique.processor.core - DEBUG - Constructed collection of:\n",
+ "['cloud', 'snow']\n",
+ "2025-10-15 17:55:53,567 - semantique.processor.core - DEBUG - Applied verb merge:\n",
+ " Size: 13MB\n",
+ "dask.array\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 96B 2023-06-01T09:57:25.756000 ... 2...\n",
+ " * y (y) float64 4kB 5.363e+06 5.363e+06 ... 5.354e+06 5.354e+06\n",
+ " * x (x) float64 2kB 5.866e+05 5.866e+05 ... 5.926e+05 5.926e+05\n",
+ " temporal_ref int64 8B 0\n",
+ " spatial_feats (y, x) float64 1MB nan nan nan nan nan ... nan nan nan nan\n",
+ "Attributes:\n",
+ " spec: RasterSpec(epsg=32633, bounds=(586580.0, 5353640.0, 592640.0...\n",
+ " crs: epsg:32633\n",
+ " transform: | 20.00, 0.00, 586580.00|\\n| 0.00,-20.00, 5362620.00|\\n| 0.0...\n",
+ " resolution: 20.0\n",
+ " value_type: binary\n",
+ "2025-10-15 17:55:53,608 - semantique.processor.core - DEBUG - Applied verb reduce:\n",
+ " Size: 96B\n",
+ "dask.array\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 96B 2023-06-01T09:57:25.756000 ... 20...\n",
+ " temporal_ref int64 8B 0\n",
+ "Attributes:\n",
+ " spec: RasterSpec(epsg=32633, bounds=(586580.0, 5353640.0, 592640.0...\n",
+ " crs: epsg:32633\n",
+ " transform: | 20.00, 0.00, 586580.00|\\n| 0.00,-20.00, 5362620.00|\\n| 0.0...\n",
+ " resolution: 20.0\n",
+ " value_type: continuous\n",
+ "2025-10-15 17:55:53,611 - semantique.processor.core - INFO - Finished executing result: 'cloud_snow_share'\n",
+ "2025-10-15 17:55:53,612 - semantique.processor.core - INFO - Finished executing the semantic query\n",
+ "2025-10-15 17:55:53,614 - semantique.processor.core - DEBUG - Responding:\n",
+ "{'cloud_snow_share': Size: 96B\n",
+ "dask.array\n",
+ "Coordinates:\n",
+ " * time (time) datetime64[ns] 96B 2023-06-01T09:57:25.756000 ... 20...\n",
+ " temporal_ref int64 8B 0\n",
+ "Attributes:\n",
+ " spec: RasterSpec(epsg=32633, bounds=(586580.0, 5353640.0, 592640.0...\n",
+ " crs: epsg:32633\n",
+ " transform: | 20.00, 0.00, 586580.00|\\n| 0.00,-20.00, 5362620.00|\\n| 0.0...\n",
+ " resolution: 20.0\n",
+ " value_type: continuous}\n"
+ ]
+ }
+ ],
+ "source": [
+ "context = {\n",
+ " \"datacube\": dc,\n",
+ " \"mapping\": mapping,\n",
+ " \"space\": space,\n",
+ " \"time\": time,\n",
+ " \"crs\": epsg,\n",
+ " \"tz\": \"UTC\",\n",
+ " \"spatial_resolution\": [-resolution, resolution],\n",
+ " \"cache_data\": True\n",
+ " # \"cache_data\": False\n",
+ "}\n",
+ "\n",
+ "recipe = sq.QueryRecipe()\n",
+ "recipe[\"cloud_snow_share\"] = ( \n",
+ " sq.collection(\n",
+ " sq.entity(\"cloud\"), \n",
+ " sq.entity(\"snow\")\n",
+ " )\n",
+ " .merge(\"any\")\n",
+ " .reduce(\"percentage\", \"space\")\n",
+ ")\n",
+ "# Additional output - leave out for now for fairer comparisons\n",
+ "# (reintroduce when we have better data dependency management)\n",
+ "# recipe[\"cloud_snowfree\"] = ( \n",
+ "# sq.result(\"cloud_snow_share\")\n",
+ "# .evaluate(\"less\", 10)\n",
+ "# )\n",
+ "\n",
+ "# execute recipe\n",
+ "with warnings.catch_warnings():\n",
+ " warnings.simplefilter(\"ignore\", UserWarning)\n",
+ " warnings.simplefilter(\"ignore\", RuntimeWarning)\n",
+ " response = recipe.execute(**context)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "id": "Hstk",
+ "metadata": {
+ "marimo": {
+ "config": {
+ "disabled": true
+ }
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Visualize task graph - to file. Can take a while!\n",
+ "# response[\"cloud_snow_share\"].data.visualize(filename=\"data/graph_cloud_snow_share.svg\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "id": "nWHF",
+ "metadata": {
+ "marimo": {
+ "config": {
+ "disabled": true
+ }
+ }
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
HighLevelGraph
\n",
+ "
\n",
+ " HighLevelGraph with 47 layers and 625 keys from all layers.\n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer1: asset-table
\n",
+ "
\n",
+ " \n",
+ " asset-table-d430f26086d18353d050b6810c2ec64b\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " MaterializedLayer | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " True | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 1) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " [('url', 'O'), ('bounds', '<f8', (4,)), ('scale_offset', '<f8', (2,))] | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 1) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer2: asset_table_to_reader_and_window
\n",
+ "
\n",
+ " \n",
+ " asset_table_to_reader_and_window-a478deb1b5eaa9ee27c6b02f19e7abea\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | fuse | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 1) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " object | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 1) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " asset-table-d430f26086d18353d050b6810c2ec64b | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer3: fetch_raster_window
\n",
+ "
\n",
+ " \n",
+ " fetch_raster_window-0a5ef31258203b0ddbde185ea9f2c4c1\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " int8 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " asset_table_to_reader_and_window-a478deb1b5eaa9ee27c6b02f19e7abea | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer4: getitem
\n",
+ "
\n",
+ " \n",
+ " getitem-5b21e533707affbd8ff8d863dd805cfb\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " MaterializedLayer | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " True | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " int8 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " fetch_raster_window-0a5ef31258203b0ddbde185ea9f2c4c1 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer5: astype
\n",
+ "
\n",
+ " \n",
+ " astype-27a5caa3566b401a267c4098d3731341\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float32 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " getitem-5b21e533707affbd8ff8d863dd805cfb | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer6: ne
\n",
+ "
\n",
+ " \n",
+ " ne-0c20b400983a18ca46a49c24220631c8\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " bool | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " getitem-5b21e533707affbd8ff8d863dd805cfb | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer7: where
\n",
+ "
\n",
+ " \n",
+ " where-1ea4d0552e1498b920d445cc702e0342\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float32 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " ne-0c20b400983a18ca46a49c24220631c8 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " astype-27a5caa3566b401a267c4098d3731341 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer8: ne
\n",
+ "
\n",
+ " \n",
+ " ne-8d47136e1ec05dd7130a1f6bbb2e9dfb\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " bool | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " where-1ea4d0552e1498b920d445cc702e0342 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer9: where
\n",
+ "
\n",
+ " \n",
+ " where-ad631554a952eb40e7a85778b72dacb1\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float32 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " where-1ea4d0552e1498b920d445cc702e0342 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " ne-8d47136e1ec05dd7130a1f6bbb2e9dfb | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer10: array
\n",
+ "
\n",
+ " \n",
+ " array-7582cc4aa0f7a9cf493233c2a4dd49f5\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " MaterializedLayer | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " True | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 1 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " bool | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer11: where
\n",
+ "
\n",
+ " \n",
+ " where-012d1b457871ed908f86dfae7ff2a372\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float32 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " array-7582cc4aa0f7a9cf493233c2a4dd49f5 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " where-ad631554a952eb40e7a85778b72dacb1 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer12: f
\n",
+ "
\n",
+ " \n",
+ " f-a84a4d99e3fbb68755ce5dadb5094103\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float32 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " where-012d1b457871ed908f86dfae7ff2a372 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer13: f_0
\n",
+ "
\n",
+ " \n",
+ " f_0-a84a4d99e3fbb68755ce5dadb5094103\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " MaterializedLayer | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " True | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float32 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " f-a84a4d99e3fbb68755ce5dadb5094103 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer14: transpose
\n",
+ "
\n",
+ " \n",
+ " transpose-b228ed4a1924c4a2c9e7fd93df58b541\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float32 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " f_0-a84a4d99e3fbb68755ce5dadb5094103 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer15: getitem
\n",
+ "
\n",
+ " \n",
+ " getitem-4ff5e35b82d12beca86ab3d2cde05904\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " MaterializedLayer | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " True | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (1, 12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float32 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " transpose-b228ed4a1924c4a2c9e7fd93df58b541 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer16: f
\n",
+ "
\n",
+ " \n",
+ " f-8f7f085e8083c2d4bdf5ab3f5ce9c631\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float32 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " where-012d1b457871ed908f86dfae7ff2a372 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer17: f_0
\n",
+ "
\n",
+ " \n",
+ " f_0-8f7f085e8083c2d4bdf5ab3f5ce9c631\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " MaterializedLayer | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " True | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float32 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " f-8f7f085e8083c2d4bdf5ab3f5ce9c631 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer18: transpose
\n",
+ "
\n",
+ " \n",
+ " transpose-af9147d3fc0e17a557793bd6d2e6b921\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float32 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " f_0-8f7f085e8083c2d4bdf5ab3f5ce9c631 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer19: getitem
\n",
+ "
\n",
+ " \n",
+ " getitem-0c2a425d2cb897a3373bd476af53d557\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " MaterializedLayer | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " True | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (1, 12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float32 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " transpose-af9147d3fc0e17a557793bd6d2e6b921 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer20: concatenate
\n",
+ "
\n",
+ " \n",
+ " concatenate-47fe7c9b7cce4e45ab7948dbc55bb6f5\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " MaterializedLayer | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " True | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 24 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (2, 12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float32 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " getitem-0c2a425d2cb897a3373bd476af53d557 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " getitem-4ff5e35b82d12beca86ab3d2cde05904 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer21: asarray_isnull
\n",
+ "
\n",
+ " \n",
+ " _asarray_isnull-f2914599207cb7b467abca1aa823ded7\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 24 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (2, 12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " bool | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " concatenate-47fe7c9b7cce4e45ab7948dbc55bb6f5 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer22: where
\n",
+ "
\n",
+ " \n",
+ " where-76fd23fa20b6b65c45273ad0a58ccb78\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 24 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (2, 12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float32 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " concatenate-47fe7c9b7cce4e45ab7948dbc55bb6f5 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " _asarray_isnull-f2914599207cb7b467abca1aa823ded7 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer23: any
\n",
+ "
\n",
+ " \n",
+ " any-40899ae0be794b92fdb7b44b29af93f2\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 24 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (2, 12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " bool | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " where-76fd23fa20b6b65c45273ad0a58ccb78 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer24: any-aggregate
\n",
+ "
\n",
+ " \n",
+ " any-aggregate-06500e64b5323cff380bd9d3fdd3cb2c\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " MaterializedLayer | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " True | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " bool | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " any-40899ae0be794b92fdb7b44b29af93f2 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer25: invert
\n",
+ "
\n",
+ " \n",
+ " invert-40ffe2b24ef14a7f6b1ebcd3804110ae\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 24 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (2, 12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " bool | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " _asarray_isnull-f2914599207cb7b467abca1aa823ded7 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer26: sum
\n",
+ "
\n",
+ " \n",
+ " sum-3a4edc4ce40eeb7afc08a8296b3ee5da\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 24 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (2, 12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " int64 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " invert-40ffe2b24ef14a7f6b1ebcd3804110ae | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer27: sum-aggregate
\n",
+ "
\n",
+ " \n",
+ " sum-aggregate-92e1671557dba32b4b4cacdd642f9353\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " MaterializedLayer | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " True | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " int64 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " sum-3a4edc4ce40eeb7afc08a8296b3ee5da | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer28: equal
\n",
+ "
\n",
+ " \n",
+ " equal-149eb22bd15bc21327c8eb44dd97e804\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " bool | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " sum-aggregate-92e1671557dba32b4b4cacdd642f9353 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer29: where
\n",
+ "
\n",
+ " \n",
+ " where-9facb6c6362e96179415849e2275fde1\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float64 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 449, 303) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " any-aggregate-06500e64b5323cff380bd9d3fdd3cb2c | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " equal-149eb22bd15bc21327c8eb44dd97e804 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer30: reshape
\n",
+ "
\n",
+ " \n",
+ " reshape-d4dc45bbd02c34cade2c12fc106b3404\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " MaterializedLayer | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " True | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float64 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " where-9facb6c6362e96179415849e2275fde1 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer31: isnan
\n",
+ "
\n",
+ " \n",
+ " isnan-8c19b454cfb3a9334151f05fbb99ab43\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " bool | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " reshape-d4dc45bbd02c34cade2c12fc106b3404 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer32: invert
\n",
+ "
\n",
+ " \n",
+ " invert-9c512d5db9536b92574a5cc1a0d36483\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " bool | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " isnan-8c19b454cfb3a9334151f05fbb99ab43 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer33: sum
\n",
+ "
\n",
+ " \n",
+ " sum-0d04510c46f2c83deeebb5a2c78afda2\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " int64 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " invert-9c512d5db9536b92574a5cc1a0d36483 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer34: sum-aggregate
\n",
+ "
\n",
+ " \n",
+ " sum-aggregate-f9d95b2f5f93afa27ca5b22415e74348\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " MaterializedLayer | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " True | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12,) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " int64 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1,) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " sum-0d04510c46f2c83deeebb5a2c78afda2 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer35: asarray_isnull
\n",
+ "
\n",
+ " \n",
+ " _asarray_isnull-ff57c21eb03908c85088523f90262be2\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " bool | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " reshape-d4dc45bbd02c34cade2c12fc106b3404 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer36: where
\n",
+ "
\n",
+ " \n",
+ " where-a483c2e8b857759aad440899c5bd24f2\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float64 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " _asarray_isnull-ff57c21eb03908c85088523f90262be2 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " reshape-d4dc45bbd02c34cade2c12fc106b3404 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer37: astype
\n",
+ "
\n",
+ " \n",
+ " astype-096e9adad4444606f6727d0d07c4bd38\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " bool | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " where-a483c2e8b857759aad440899c5bd24f2 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer38: astype
\n",
+ "
\n",
+ " \n",
+ " astype-bff2f6d0fbdc57637fa2eb61a67d7b94\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " int64 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " astype-096e9adad4444606f6727d0d07c4bd38 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer39: sum
\n",
+ "
\n",
+ " \n",
+ " sum-8e375fe22ddc4d5441dd7066544ea9b4\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " int64 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " astype-bff2f6d0fbdc57637fa2eb61a67d7b94 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer40: sum-aggregate
\n",
+ "
\n",
+ " \n",
+ " sum-aggregate-72fa3ecb06baad1e174ca33c3208f95c\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " MaterializedLayer | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " True | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12,) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " int64 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1,) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " sum-8e375fe22ddc4d5441dd7066544ea9b4 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer41: invert
\n",
+ "
\n",
+ " \n",
+ " invert-e68a9c9bf801a0362997f48d02b1c623\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " bool | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " _asarray_isnull-ff57c21eb03908c85088523f90262be2 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer42: sum
\n",
+ "
\n",
+ " \n",
+ " sum-775593baa171724aaab0bb511f1bd770\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " int64 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1, 136047) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " invert-e68a9c9bf801a0362997f48d02b1c623 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer43: sum-aggregate
\n",
+ "
\n",
+ " \n",
+ " sum-aggregate-f20de339ec6af101f100085967970a45\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " MaterializedLayer | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " True | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12,) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " int64 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1,) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " sum-775593baa171724aaab0bb511f1bd770 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer44: equal
\n",
+ "
\n",
+ " \n",
+ " equal-9f0174a48b38c8fb9c64cf6ce449a2ac\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12,) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " bool | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1,) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " sum-aggregate-f20de339ec6af101f100085967970a45 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer45: where
\n",
+ "
\n",
+ " \n",
+ " where-36b943d3072a058cc3c655ffb56c464b\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12,) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float64 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1,) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " sum-aggregate-72fa3ecb06baad1e174ca33c3208f95c | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " equal-9f0174a48b38c8fb9c64cf6ce449a2ac | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer46: divide
\n",
+ "
\n",
+ " \n",
+ " divide-fa3758df75d1ae1a154d5d1490de253c\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12,) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float64 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1,) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " sum-aggregate-f9d95b2f5f93afa27ca5b22415e74348 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " where-36b943d3072a058cc3c655ffb56c464b | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " Layer47: multiply
\n",
+ "
\n",
+ " \n",
+ " multiply-b9566f3ecab806f868ff5dcea533a521\n",
+ "
\n",
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | layer_type | \n",
+ " Blockwise | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | is_materialized | \n",
+ " False | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | number of outputs | \n",
+ " 12 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | shape | \n",
+ " (12,) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | dtype | \n",
+ " float64 | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunksize | \n",
+ " (1,) | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | type | \n",
+ " dask.array.core.Array | \n",
+ " \n",
+ " \n",
+ " \n",
+ " | chunk_type | \n",
+ " numpy.ndarray | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | depends on | \n",
+ " divide-fa3758df75d1ae1a154d5d1490de253c | \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
\n",
+ "\n",
+ " \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ "HighLevelGraph with 47 layers.\n",
+ "\n",
+ " 0. asset-table-d430f26086d18353d050b6810c2ec64b\n",
+ " 1. asset_table_to_reader_and_window-a478deb1b5eaa9ee27c6b02f19e7abea\n",
+ " 2. fetch_raster_window-0a5ef31258203b0ddbde185ea9f2c4c1\n",
+ " 3. getitem-5b21e533707affbd8ff8d863dd805cfb\n",
+ " 4. astype-27a5caa3566b401a267c4098d3731341\n",
+ " 5. ne-0c20b400983a18ca46a49c24220631c8\n",
+ " 6. where-1ea4d0552e1498b920d445cc702e0342\n",
+ " 7. ne-8d47136e1ec05dd7130a1f6bbb2e9dfb\n",
+ " 8. where-ad631554a952eb40e7a85778b72dacb1\n",
+ " 9. array-7582cc4aa0f7a9cf493233c2a4dd49f5\n",
+ " 10. where-012d1b457871ed908f86dfae7ff2a372\n",
+ " 11. f-a84a4d99e3fbb68755ce5dadb5094103\n",
+ " 12. f_0-a84a4d99e3fbb68755ce5dadb5094103\n",
+ " 13. transpose-b228ed4a1924c4a2c9e7fd93df58b541\n",
+ " 14. getitem-4ff5e35b82d12beca86ab3d2cde05904\n",
+ " 15. f-8f7f085e8083c2d4bdf5ab3f5ce9c631\n",
+ " 16. f_0-8f7f085e8083c2d4bdf5ab3f5ce9c631\n",
+ " 17. transpose-af9147d3fc0e17a557793bd6d2e6b921\n",
+ " 18. getitem-0c2a425d2cb897a3373bd476af53d557\n",
+ " 19. concatenate-47fe7c9b7cce4e45ab7948dbc55bb6f5\n",
+ " 20. _asarray_isnull-f2914599207cb7b467abca1aa823ded7\n",
+ " 21. where-76fd23fa20b6b65c45273ad0a58ccb78\n",
+ " 22. any-40899ae0be794b92fdb7b44b29af93f2\n",
+ " 23. any-aggregate-06500e64b5323cff380bd9d3fdd3cb2c\n",
+ " 24. invert-40ffe2b24ef14a7f6b1ebcd3804110ae\n",
+ " 25. sum-3a4edc4ce40eeb7afc08a8296b3ee5da\n",
+ " 26. sum-aggregate-92e1671557dba32b4b4cacdd642f9353\n",
+ " 27. equal-149eb22bd15bc21327c8eb44dd97e804\n",
+ " 28. where-9facb6c6362e96179415849e2275fde1\n",
+ " 29. reshape-d4dc45bbd02c34cade2c12fc106b3404\n",
+ " 30. isnan-8c19b454cfb3a9334151f05fbb99ab43\n",
+ " 31. invert-9c512d5db9536b92574a5cc1a0d36483\n",
+ " 32. sum-0d04510c46f2c83deeebb5a2c78afda2\n",
+ " 33. sum-aggregate-f9d95b2f5f93afa27ca5b22415e74348\n",
+ " 34. _asarray_isnull-ff57c21eb03908c85088523f90262be2\n",
+ " 35. where-a483c2e8b857759aad440899c5bd24f2\n",
+ " 36. astype-096e9adad4444606f6727d0d07c4bd38\n",
+ " 37. astype-bff2f6d0fbdc57637fa2eb61a67d7b94\n",
+ " 38. sum-8e375fe22ddc4d5441dd7066544ea9b4\n",
+ " 39. sum-aggregate-72fa3ecb06baad1e174ca33c3208f95c\n",
+ " 40. invert-e68a9c9bf801a0362997f48d02b1c623\n",
+ " 41. sum-775593baa171724aaab0bb511f1bd770\n",
+ " 42. sum-aggregate-f20de339ec6af101f100085967970a45\n",
+ " 43. equal-9f0174a48b38c8fb9c64cf6ce449a2ac\n",
+ " 44. where-36b943d3072a058cc3c655ffb56c464b\n",
+ " 45. divide-fa3758df75d1ae1a154d5d1490de253c\n",
+ " 46. multiply-b9566f3ecab806f868ff5dcea533a521"
+ ]
+ },
+ "execution_count": 10,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# Alternative, lighter task graph explorer\n",
+ "response[\"cloud_snow_share\"].data.__dask_graph__()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "id": "iLit",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "{'cloud_snow_share': Size: 96B\n",
+ " dask.array\n",
+ " Coordinates:\n",
+ " * time (time) datetime64[ns] 96B 2023-06-01T09:57:25.756000 ... 20...\n",
+ " temporal_ref int64 8B 0\n",
+ " Attributes:\n",
+ " spec: RasterSpec(epsg=32633, bounds=(586580.0, 5353640.0, 592640.0...\n",
+ " crs: epsg:32633\n",
+ " transform: | 20.00, 0.00, 586580.00|\\n| 0.00,-20.00, 5362620.00|\\n| 0.0...\n",
+ " resolution: 20.0\n",
+ " value_type: continuous}"
+ ]
+ },
+ "execution_count": 11,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# Inspect the response - is it still a lazy dask array or has it been materialized?\n",
+ "response"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "id": "ZHCJ",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "/jupyter/semantique/semantique/__init__.py:12: UserWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html. The pkg_resources package is slated for removal as early as 2025-11-30. Refrain from using this package or pin to Setuptools<81.\n",
+ " import pkg_resources\n",
+ "/jupyter/semantique/semantique/__init__.py:12: UserWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html. The pkg_resources package is slated for removal as early as 2025-11-30. Refrain from using this package or pin to Setuptools<81.\n",
+ " import pkg_resources\n",
+ "/jupyter/semantique/semantique/__init__.py:12: UserWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html. The pkg_resources package is slated for removal as early as 2025-11-30. Refrain from using this package or pin to Setuptools<81.\n",
+ " import pkg_resources\n",
+ "/jupyter/semantique/semantique/__init__.py:12: UserWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html. The pkg_resources package is slated for removal as early as 2025-11-30. Refrain from using this package or pin to Setuptools<81.\n",
+ " import pkg_resources\n",
+ "/jupyter/semantique/semantique/__init__.py:12: UserWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html. The pkg_resources package is slated for removal as early as 2025-11-30. Refrain from using this package or pin to Setuptools<81.\n",
+ " import pkg_resources\n",
+ "/jupyter/semantique/semantique/__init__.py:12: UserWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html. The pkg_resources package is slated for removal as early as 2025-11-30. Refrain from using this package or pin to Setuptools<81.\n",
+ " import pkg_resources\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Run the computation!\n",
+ "result_cs_pc = response[\"cloud_snow_share\"].compute()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "id": "ROlb",
+ "metadata": {
+ "marimo": {
+ "config": {
+ "disabled": true
+ }
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Compute secondary output - ignore for now.\n",
+ "# result_csfree_mask = response[\"cloud_snowfree\"].compute()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 16,
+ "id": "qnkX",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {},
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "application/vnd.holoviews_exec.v0+json": "",
+ "text/html": [
+ "\n",
+ ""
+ ],
+ "text/plain": [
+ ":Scatter [time] (cloud_snow_share)"
+ ]
+ },
+ "execution_count": 16,
+ "metadata": {
+ "application/vnd.holoviews_exec.v0+json": {
+ "id": "553c9767-6673-49b7-a1e7-454fcc6765dc"
+ }
+ },
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "result_cs_pc.hvplot.scatter(\n",
+ " x=\"time\", y=\"cloud_snow_share\", marker=\"x\", color=\"blue\"\n",
+ ").opts(title=\"Cloud & snow percentage\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "id": "TqIu",
+ "metadata": {
+ "marimo": {
+ "config": {
+ "disabled": true
+ }
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Results of secondary output - ignore for now.\n",
+ "# result_csfree_mask.hvplot.scatter(\n",
+ "# x=\"time\", y=\"cloud_snowfree\", marker=\"x\"\n",
+ "# ).opts(color=\"red\", title=\"Cloud-free images\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "Vxnm",
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.13.8"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/semantique/datacube.py b/semantique/datacube.py
index 4a396626..b697967f 100644
--- a/semantique/datacube.py
+++ b/semantique/datacube.py
@@ -4,6 +4,7 @@
import copy
import datacube
import datetime
+import odc.stac
import os
import planetary_computer as pc
import pyproj
@@ -25,6 +26,7 @@
from semantique import exceptions
from semantique.dimensions import TIME, SPACE, X, Y
+from semantique.processor.utils import set_global_dask_lazy
class Datacube():
"""Base class for EO data cube configurations.
@@ -117,11 +119,12 @@ class Opendatacube(Datacube):
Trimming means that dimension coordinates for which all values are
missing are removed from the array. The spatial dimensions are trimmed
only at the edges, to maintain their regularity. Defaults to
- :obj:`True`.
+ :obj:`True`. Note: Not currently supported with dask_lazy also enabled.
* **group_by_solar_day** (:obj:`bool`): Should the time dimension be
resampled to the day level, using solar day to keep scenes together?
- Defaults to :obj:`True`.
+ Defaults to :obj:`True`. Note: Not currently supported with dask_lazy
+ also enabled.
* **value_type_mapping** (:obj:`dict`): How do value type encodings in
the layout map to the value types used by semantique?
@@ -1169,4 +1172,472 @@ def _sign_metadata(items):
else:
updated_items.extend(curr_colls[coll]["items"])
# return signed items
- return pystac.ItemCollection(updated_items)
\ No newline at end of file
+ return pystac.ItemCollection(updated_items)
+
+class DaskCube(Datacube):
+ """
+ A variant of `STACCube` that creates a lazy Dask-backed xarray.DataArray from a STAC item collection.
+
+ Experimental. Functionality is limited to operations that do not require intermediate data reads.
+
+ Limitations include:
+ * No trim support.
+ * No warnings if an intermediate step returns an empty dataset.
+ * Only really effective for a single output (others are supported but will trigger a recompute).
+ * Expect output to be slightly different, due to implementation differences\*.
+ * Some operators may not work as expected.
+ * May not work with really big jobs, due to task graph size issues.
+
+ \*Uses `odc.stac.load` (which supports a lazy group-by-solar-day functionality), rather than `stackstac.load`.
+
+ Parameters
+ ----------
+ layout : :obj:`dict`
+ The layout file describing the EO data cube. If :obj:`None`, an empty
+ EO data cube is constructed.
+ src : :obj:`pystac.item_collection.ItemCollection` or `list of pystac.item.Item`
+ The item search result from a previous STAC search as a src to build the datacube
+ **config:
+ Additional keyword arguments tuning the data retrieval configuration.
+ Valid options are:
+
+ * ~**trim** (:obj:`bool`):~ Unsupported.
+
+ * **group_by_solar_day** (:obj:`bool`): Should the time dimension be
+ resampled to the day level, using solar day to keep scenes together?
+ Defaults to :obj:`True`.
+
+ * **dask_chunk_size** (:obj:`int`): Dask chunks will be size × size × 1 grid cells (x × y × time).
+
+ * **dask_params** (:obj:`dict`): Parameters passed to the .compute() function.
+
+ * **reauth_individual** (:obj:`bool`): Should the items be resigned/reauthenticated
+ before loading them? Defaults to False.
+
+ * **access_token** (:obj:`str`): Access token string (OAuth2) to be used in accessing the STAC href.
+
+ * **value_type_mapping** (:obj:`dict`): How do value type encodings in
+ the layout map to the value types used by semantique?
+ Defaults to a one-to-one mapping: ::
+
+ {
+ "nominal": "nominal",
+ "ordinal": "ordinal",
+ "binary": "binary",
+ "continuous": "continuous",
+ "discrete": "discrete"
+ }
+
+ * **resamplers** (:obj:`dict`): When data need to be resampled to a
+ different spatial and/or temporal resolution, what resampling technique
+ should be used? Should be specified separately for each possible value
+ type in the layout. Valid techniques are: ::
+
+ 'nearest', 'average', 'bilinear', 'cubic', 'cubic_spline',
+ 'lanczos', 'mode', 'gauss', 'max', 'min', 'med', 'q1', 'q3'
+
+ Defaults to: ::
+
+ {
+ "nominal": "nearest",
+ "ordinal": "nearest",
+ "binary": "nearest",
+ "continuous": "bilinear",
+ "discrete": "nearest"
+ }
+
+ """
+
+ def __init__(self, layout=None, src=None, **config):
+ super(DaskCube, self).__init__(layout)
+ self.src = src
+ # Timezone of the temporal coordinates is infered from the pystac search results
+ # & automatically converted to UTC internally - result is given back as datetime64[ns]
+ self.tz = "UTC"
+ # Update default configuration parameters with provided ones.
+ params = self._default_config
+ params.update(config)
+ self.config = params
+
+ # Use dask lazy functions in subsequent operations
+ set_global_dask_lazy(True)
+
+
+
+ @property
+ def src(self):
+ """:obj:`pystac.item_collection.ItemCollection` or :obj:`list of pystac.item.Item`:
+ The item search result from a previous STAC search."""
+ return self._src
+
+ @src.setter
+ def src(self, value):
+ if value is not None:
+ assert np.all([isinstance(x, pystac.item.Item) for x in value])
+ self._src = value
+
+ @property
+ def _default_config(self):
+ return {
+ "trim": False,
+ "group_by_solar_day": True,
+ "dask_params": None,
+ "dask_chunk_size": 2048,
+ "reauth_individual": False,
+ "access_token": "",
+ "value_type_mapping": {
+ "nominal": "nominal",
+ "ordinal": "ordinal",
+ "binary": "binary",
+ "continuous": "continuous",
+ "discrete": "discrete",
+ },
+ "resamplers": {
+ "nominal": "nearest",
+ "ordinal": "nearest",
+ "binary": "nearest",
+ "continuous": "bilinear",
+ "discrete": "nearest",
+ },
+ }
+
+ @property
+ def config(self):
+ """:obj:`dict`: Configuration settings for data retrieval."""
+ return self._config
+
+ @config.setter
+ def config(self, value):
+ assert isinstance(value, dict)
+ self._config = value
+
+ @property
+ def layout(self):
+ """:obj:`dict`: The layout file of the EO data cube."""
+ return self._layout
+
+ @layout.setter
+ def layout(self, value):
+ self._layout = {} if value is None else self._parse_layout(value)
+
+ def _parse_layout(self, obj):
+ # Function to recursively parse and metadata objects to make them autocomplete friendly
+ def _parse(current_obj, ref_path):
+ if "type" in current_obj and "values" in current_obj:
+ current_obj["reference"] = copy.deepcopy(ref_path)
+ if isinstance(current_obj["values"], list):
+ current_obj["labels"] = {
+ item["label"]: item["id"] for item in current_obj["values"]
+ }
+ current_obj["descriptions"] = {
+ item["description"]: item["id"]
+ for item in current_obj["values"]
+ }
+ return
+
+ # If not a "layer", traverse deeper into the object.
+ for key, value in current_obj.items():
+ if isinstance(value, dict):
+ new_ref_path = ref_path + [key]
+ _parse(value, new_ref_path)
+
+ # Start parsing from the root object.
+ for key, value in obj.items():
+ if isinstance(value, dict):
+ _parse(value, [key])
+ return obj
+
+ def retrieve(self, *reference, extent):
+ """Retrieve a data layer from the EO data cube.
+
+ Parameters
+ ----------
+ *reference:
+ The index of the data layer in the layout of the EO data cube.
+ extent : :obj:`xarray.DataArray`
+ Spatio-temporal extent in which the data should be retrieved. Should be
+ given as an array with a temporal dimension and two spatial dimensions,
+ such as returned by
+ :func:`parse_extent `.
+ The retrieved subset of the EO data cube will have the same extent.
+
+ Returns
+ -------
+ :obj:`xarray.DataArray`
+ The retrieved subset of the EO data cube.
+
+ """
+ # Solve the reference by obtaining the corresponding metadata object.
+ metadata = self.lookup(*reference)
+ # Load the data values from the EO data cube.
+ data = self._load(metadata, extent)
+
+
+ # Format loaded data.
+ data = self._format(data, metadata, extent)
+ # Mask invalid data.
+ data = self._mask(data, metadata)
+
+ if self.config["trim"]:
+ raise NotImplementedError("Not supported with Dask lazy computation.")
+
+ return data
+
+ def _load(self, metadata, extent):
+ # check if extent is valid
+ if TIME not in extent.dims:
+ raise exceptions.MissingDimensionError(
+ "Cannot retrieve data in an extent without a temporal dimension"
+ )
+ if X not in extent.dims and Y not in extent.dims:
+ raise exceptions.MissingDimensionError(
+ "Cannot retrieve data in an extent without spatial dimensions"
+ )
+
+ # retrieve spatial bounds, resolution & epsg
+ # note: round to avoid binary format <-> floating-point number inconsistencies
+ s_bounds = tuple(np.array(extent.rio.bounds()).round(8))
+ res = tuple(np.abs(extent.rio.resolution()).round(8))
+ epsg = int(str(extent.rio.crs)[5:])
+
+ # retrieve resampler name (odc.stac uses string names)
+ resampler_name = self.config["resamplers"][metadata["type"]]
+
+ # retrieve layer specific information
+ lyr_dtype, lyr_na = self._get_dtype_na(metadata)
+
+ # subset temporally and spatially
+ if "spatial_feats" in extent.coords:
+ extent = extent.drop_vars("spatial_feats")
+ t_bounds = extent.sq.tz_convert(self.tz).time.values
+ item_coll = DaskCube.filter_spatio_temporal(
+ self.src,
+ extent.rio.bounds(),
+ epsg,
+ t_bounds[0],
+ t_bounds[1]
+ )
+
+ # subset according to layer key
+ filtered_items = []
+ for item in item_coll:
+ has_no_key = True
+ has_conformant_key = False
+ for asset_key, asset in item.assets.items():
+ if 'semantique:key' in asset.extra_fields:
+ has_no_key = False
+ asset_key = asset.extra_fields['semantique:key']
+ ref_key = metadata['reference']
+ if "_".join(asset_key) == "_".join(ref_key):
+ has_conformant_key = True
+ break
+ else:
+ continue
+ if has_no_key or has_conformant_key:
+ filtered_items.append(item)
+ item_coll = filtered_items
+
+ # return extent array as NaN in case of no data
+ if not len(item_coll):
+ empty_arr = xr.full_like(extent, np.nan)
+ return empty_arr
+
+ # reauth
+ if self.config["reauth_individual"]:
+ item_coll = DaskCube._sign_metadata(item_coll)
+
+ # determine groupby strategy
+ groupby = "solar_day" if self.config["group_by_solar_day"] else "time"
+
+ # odc.stac.load parameters
+ chunk_size = self.config["dask_chunk_size"]
+ odc_stac_inputs = {
+ "bands": [metadata["name"]],
+ "resampling": resampler_name,
+ # Bounding box in input CRS ('bbox' is EPSG:4326 only)
+ "x": (s_bounds[0], s_bounds[2]),
+ "y": (s_bounds[1], s_bounds[3]),
+ "crs": f"EPSG:{epsg}",
+ "resolution": res[0], # odc.stac uses single resolution value
+ "nodata": lyr_na,
+ "dtype": lyr_dtype,
+ "chunks": {"x": chunk_size, "y": chunk_size},
+ "groupby": groupby,
+ "fail_on_error": False,
+ }
+
+ # auth via token - set GDAL environment variables
+ if self.config["access_token"]:
+ os.environ["GDAL_HTTP_AUTH"] = "BEARER"
+ os.environ["GDAL_HTTP_BEARER"] = self.config["access_token"]
+
+ data = odc.stac.load(
+ item_coll,
+ **odc_stac_inputs
+ )
+
+ return data
+
+ def _get_dtype_na(self, metadata):
+ # retrieve dtype
+ try:
+ lyr_dtype = np.dtype(metadata["dtype"])
+ except:
+ lyr_dtype = "float32"
+ # retrieve na_value
+ try:
+ lyr_na = np.array([metadata["na_value"]], dtype=lyr_dtype)[0]
+ except:
+ if isinstance(np.array([1], dtype=lyr_dtype)[0], np.floating):
+ lyr_na = np.nan
+ else:
+ lyr_na = 0
+ # return both
+ return lyr_dtype, lyr_na
+
+ def _format(self, data, metadata, extent):
+ # Step I: Extract the band variable from the Dataset.
+ # odc.stac.load returns a Dataset with band names as variables.
+ band_name = metadata["name"]
+ data = data[band_name]
+ data.name = band_name
+ # Step II: Drop unnecessary coordinates.
+ keep_coords = ["time", data.rio.x_dim, data.rio.y_dim]
+ drop_coords = [x for x in list(data.coords) if x not in keep_coords]
+ data = data.drop_vars(drop_coords)
+ # Step III: Format temporal coordinates.
+ # --> Make sure time dimension has the correct name.
+ # --> Convert time coordinates back into the original timezone.
+ data = data.sq.rename_dims({"time": TIME})
+ data = data.sq.write_tz(self.tz)
+ data = data.sq.tz_convert(extent.sq.tz)
+ # Step IV: Format spatial coordinates.
+ # --> Make sure X and Y dims have the correct names.
+ # --> Store resolution as an attribute of the spatial coordinate dimensions.
+ # --> Add spatial feature indices as a non-dimension coordinate.
+ data = data.sq.rename_dims({data.rio.y_dim: Y, data.rio.x_dim: X})
+ data = data.sq.write_spatial_resolution(extent.sq.spatial_resolution)
+ data.coords["spatial_feats"] = ([Y, X], extent["spatial_feats"].data)
+ # Step V: Write semantique specific attributes.
+ # --> Value types for the data and all dimension coordinates.
+ # --> Mapping from category labels to indices for all categorical data.
+ data.sq.value_type = self.config["value_type_mapping"][metadata["type"]]
+ if isinstance(metadata["values"], list):
+ value_labels = {}
+ for x in metadata["values"]:
+ try:
+ label = x["label"]
+ except KeyError:
+ label = None
+ value_labels[x["id"]] = label
+ data.sq.value_labels = value_labels
+ data[TIME].sq.value_type = "datetime"
+ data[Y].sq.value_type = "continuous"
+ data[X].sq.value_type = "continuous"
+ data["spatial_feats"].sq.value_type = extent["spatial_feats"].sq.value_type
+ data["spatial_feats"].sq.value_labels = extent["spatial_feats"].sq.value_labels
+ return data
+
+ def _mask(self, data, metadata):
+ # Step I: Mask nodata values.
+ _, lyr_na = self._get_dtype_na(metadata)
+ data = data.where(data != lyr_na)
+ data = data.where(data != data.rio.nodata)
+ # Step II: Mask values outside of the spatial extent.
+ # This is needed since data are initially loaded for the bbox of the extent.
+ data = data.where(data["spatial_feats"].notnull())
+ return data
+
+ @staticmethod
+ def _divide_chunks(lst, k):
+ return [lst[i : i + k] for i in range(0, len(lst), k)]
+
+ @staticmethod
+ def filter_spatio_temporal(item_collection, bbox, bbox_crs, start_datetime, end_datetime):
+ """
+ Filter item collection by spatio-temporal extent.
+
+ Args:
+ item_collection (pystac.ItemCollection): The item collection to filter.
+ bbox (tuple): The bounding box in WGS84 coordinates to filter by.
+ bbox_crs (str): The CRS of the bounding box.
+ start_datetime (np.datetime64): The start datetime to filter by.
+ end_datetime (np.datetime64): The end datetime to filter by.
+ """
+ min_lon, min_lat, max_lon, max_lat = bbox
+ spatial_filter = box(min_lon, min_lat, max_lon, max_lat)
+ source_crs = pyproj.CRS("EPSG:4326")
+ target_crs = pyproj.CRS(bbox_crs)
+ transformer = (
+ pyproj.Transformer
+ .from_crs(source_crs, target_crs, always_xy=True)
+ .transform
+ )
+ filtered_items = []
+ for item in item_collection:
+ item_geom = shape(item.geometry)
+ item_geom = transform(transformer, item_geom)
+ item_datetime = np.datetime64(item.datetime)
+ if not spatial_filter.intersects(item_geom):
+ continue
+ if not (start_datetime <= item_datetime < end_datetime):
+ continue
+ filtered_items.append(item)
+ return filtered_items
+
+ @staticmethod
+ def _sign_metadata(items):
+ # retrieve collections root & item ids
+ roots = [x.get_root_link().href for x in items]
+ # create dictionary grouped by collection
+ curr_colls = {}
+ for c, item in zip(roots, items):
+ if c not in curr_colls:
+ curr_colls[c] = {"items": []}
+ curr_colls[c]["items"].append(item)
+ # define collections requiring authentication
+ # dict with collection and modifier
+ auth_colls = {}
+ auth_colls = {
+ "https://planetarycomputer.microsoft.com/api/stac/v1": pc.sign_inplace
+ }
+ # update signature for items
+ updated_items = []
+ for coll in curr_colls.keys():
+ if coll in auth_colls.keys():
+ # perform search again to renew authentification
+ retry = Retry(
+ total=5,
+ backoff_factor=1,
+ status_forcelist=[408, 502, 503, 504],
+ allowed_methods=None,
+ )
+ client = pystac_client.Client.open(
+ coll,
+ modifier=auth_colls[coll],
+ stac_io=StacApiIO(max_retries=retry, timeout=1800),
+ )
+ item_chunks = STACCube._divide_chunks(curr_colls[coll]["items"], 100)
+ for chunk in item_chunks:
+ item_search = client.search(
+ ids=[x.id for x in chunk],
+ collections=[x.get_collection() for x in chunk],
+ )
+ for item in item_search.items():
+ original_item = next(
+ (i for i in chunk if i.id == item.id), None
+ )
+ if original_item is not None:
+ # create a deep copy of the original item
+ # aim: keep original attributes and assets
+ new_item = original_item.clone()
+ # imprinting of the updated hrefs with new tokens
+ for asset_key in item.assets:
+ if asset_key in new_item.assets:
+ new_href = item.assets[asset_key].href
+ new_item.assets[asset_key].href = new_href
+ updated_items.append(new_item)
+ else:
+ updated_items.extend(curr_colls[coll]["items"])
+ # return signed items
+ return pystac.ItemCollection(updated_items)
diff --git a/semantique/processor/arrays.py b/semantique/processor/arrays.py
index caf703a0..ec108b2b 100644
--- a/semantique/processor/arrays.py
+++ b/semantique/processor/arrays.py
@@ -768,7 +768,7 @@ def delineate(self, track_types = True, **kwargs):
"""
# Get and check array.
- obj = xr.apply_ufunc(utils.null_as_zero, self._obj)
+ obj = utils.apply_ufunc(utils.null_as_zero, self._obj)
if track_types:
vtype = obj.sq.value_type
if vtype is not None and vtype != "binary":
@@ -816,7 +816,7 @@ def delineate(self, track_types = True, **kwargs):
f"and/or '{SPACE}', not: {list(dims)}"
)
# Delineate.
- out = xr.apply_ufunc(lambda x, y: ndimage.label(x, y)[0], obj, nb)
+ out = utils.apply_ufunc(lambda x, y: ndimage.label(x, y)[0], obj, nb)
# Post-process.
out = out.where(pd.notnull(self._obj)) # Preserve nan.
if track_types:
diff --git a/semantique/processor/core.py b/semantique/processor/core.py
index 10a307e5..78b69792 100644
--- a/semantique/processor/core.py
+++ b/semantique/processor/core.py
@@ -1086,14 +1086,15 @@ def call_verb(self, name, params):
verb = getattr(obj.sq, name)
out = verb(**params)
# Warn when output array is empty.
- try:
- is_empty = out.sq.is_empty
- except AttributeError:
- is_empty = out.is_empty
- if is_empty:
- warnings.warn(
- f"Verb '{name}' returned an empty array"
- )
+ # Removed for Dask. TODO: Put behind config flag.
+ # try:
+ # is_empty = out.sq.is_empty
+ # except AttributeError:
+ # is_empty = out.is_empty
+ # if is_empty:
+ # warnings.warn(
+ # f"Verb '{name}' returned an empty array"
+ # )
logger.debug(f"Applied verb {name}:\n{out}")
return out
diff --git a/semantique/processor/operators.py b/semantique/processor/operators.py
index dbc8f7c2..d5864ec1 100644
--- a/semantique/processor/operators.py
+++ b/semantique/processor/operators.py
@@ -1,3 +1,4 @@
+import dask.array as da
import pandas as pd
import numpy as np
import xarray as xr
@@ -49,7 +50,7 @@ def not_(x, track_types = True, **kwargs):
promoter = TypePromoter(x, function = "not")
promoter.check()
f = lambda x: np.where(pd.notnull(x), np.logical_not(x), np.nan)
- out = xr.apply_ufunc(f, x, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -105,7 +106,7 @@ def is_missing_(x, track_types = True, **kwargs):
promoter = TypePromoter(x, function = "is_missing")
promoter.check()
f = lambda x: pd.isnull(x)
- out = xr.apply_ufunc(f, x, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -148,7 +149,7 @@ def not_missing_(x, track_types = True, **kwargs):
promoter = TypePromoter(x, function = "not_missing")
promoter.check()
f = lambda x: pd.notnull(x)
- out = xr.apply_ufunc(f, x, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -191,7 +192,7 @@ def absolute_(x, track_types = True, **kwargs):
promoter = TypePromoter(x, function = "absolute")
promoter.check()
f = lambda x: np.absolute(x)
- out = xr.apply_ufunc(f, x, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -236,7 +237,7 @@ def ceiling_(x, track_types = True, **kwargs):
promoter = TypePromoter(x, function = "ceiling")
promoter.check()
f = lambda x: np.ceil(x)
- out = xr.apply_ufunc(f, x, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -281,7 +282,7 @@ def cosine_(x, track_types = True, **kwargs):
promoter = TypePromoter(x, function = "cosine")
promoter.check()
f = lambda x: np.cos(x)
- out = xr.apply_ufunc(f, x, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -329,7 +330,7 @@ def f(x):
sin = np.sin(x)
sin_nozero = np.where(np.equal(sin, 0), np.nan, sin)
return np.divide(1, sin_nozero)
- out = xr.apply_ufunc(f, x, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -377,7 +378,7 @@ def f(x):
tan = np.tan(x)
tan_nozero = np.where(np.equal(tan, 0), np.nan, tan)
return np.divide(1, tan_nozero)
- out = xr.apply_ufunc(f, x, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -420,7 +421,7 @@ def cube_root_(x, track_types = True, **kwargs):
promoter = TypePromoter(x, function = "cube_root")
promoter.check()
f = lambda x: np.cbrt(x)
- out = xr.apply_ufunc(f, x, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -467,7 +468,7 @@ def exponential_(x, track_types = True, **kwargs):
promoter = TypePromoter(x, function = "exponential")
promoter.check()
f = lambda x: np.exp(x)
- out = xr.apply_ufunc(f, x, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -512,7 +513,7 @@ def floor_(x, track_types = True, **kwargs):
promoter = TypePromoter(x, function = "floor")
promoter.check()
f = lambda x: np.floor(x)
- out = xr.apply_ufunc(f, x, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -559,7 +560,7 @@ def natural_logarithm_(x, track_types = True, **kwargs):
promoter = TypePromoter(x, function = "natural_logarithm")
promoter.check()
f = lambda x: np.where(np.equal(x, 0), np.nan, np.log(x))
- out = xr.apply_ufunc(f, x, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -607,7 +608,7 @@ def f(x):
cos = np.cos(x)
cos_nozero = np.where(np.equal(cos, 0), np.nan, cos)
return np.divide(1, cos_nozero)
- out = xr.apply_ufunc(f, x, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -652,7 +653,7 @@ def sine_(x, track_types = True, **kwargs):
promoter = TypePromoter(x, function = "sine")
promoter.check()
f = lambda x: np.sin(x)
- out = xr.apply_ufunc(f, x, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -695,7 +696,7 @@ def square_root_(x, track_types = True, **kwargs):
promoter = TypePromoter(x, function = "square_root")
promoter.check()
f = lambda x: np.sqrt(x)
- out = xr.apply_ufunc(f, x, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -740,7 +741,7 @@ def tangent_(x, track_types = True, **kwargs):
promoter = TypePromoter(x, function = "tangent")
promoter.check()
f = lambda x: np.tan(x)
- out = xr.apply_ufunc(f, x, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -785,7 +786,7 @@ def to_degrees_(x, track_types = True, **kwargs):
promoter = TypePromoter(x, function = "to_degrees")
promoter.check()
f = lambda x: np.rad2deg(x)
- out = xr.apply_ufunc(f, x, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -830,7 +831,7 @@ def to_radians_(x, track_types = True, **kwargs):
promoter = TypePromoter(x, function = "to_radians")
promoter.check()
f = lambda x: np.deg2rad(x)
- out = xr.apply_ufunc(f, x, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -888,7 +889,7 @@ def add_(x, y, track_types = True, **kwargs):
promoter.check()
f = lambda x, y: np.add(x, y)
y = xr.DataArray(y).sq.align_with(x)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -942,7 +943,7 @@ def divide_(x, y, track_types = True, **kwargs):
promoter.check()
f = lambda x, y: np.divide(x, np.where(np.equal(y, 0), np.nan, y))
y = xr.DataArray(y).sq.align_with(x)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -996,7 +997,7 @@ def multiply_(x, y, track_types = True, **kwargs):
promoter.check()
f = lambda x, y: np.multiply(x, y)
y = xr.DataArray(y).sq.align_with(x)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -1050,7 +1051,7 @@ def power_(x, y, track_types = True, **kwargs):
promoter.check()
f = lambda x, y: np.power(x, y)
y = xr.DataArray(y).sq.align_with(x)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -1104,7 +1105,7 @@ def subtract_(x, y, track_types = True, **kwargs):
promoter.check()
f = lambda x, y: np.subtract(x, y)
y = xr.DataArray(y).sq.align_with(x)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -1162,7 +1163,7 @@ def normalized_difference_(x, y, track_types = True, **kwargs):
promoter.check()
f = lambda x, y: np.divide(np.subtract(x, y), np.add(x, y))
y = xr.DataArray(y).sq.align_with(x)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -1222,7 +1223,7 @@ def f(x, y):
y = utils.null_as_zero(y)
return np.where(pd.notnull(x), np.logical_and(x, y), np.nan)
y = xr.DataArray(y).sq.align_with(x)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -1285,7 +1286,7 @@ def f(x, y):
y = utils.null_as_zero(y)
return np.where(pd.notnull(x), np.logical_or(x, y), np.nan)
y = xr.DataArray(y).sq.align_with(x)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -1348,7 +1349,7 @@ def f(x, y):
y = utils.null_as_zero(y)
return np.where(pd.notnull(x), np.logical_xor(x, y), np.nan)
y = xr.DataArray(y).sq.align_with(x)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -1406,11 +1407,12 @@ def equal_(x, y, track_types = True, **kwargs):
promoter.check()
f = lambda x, y: np.where(pd.notnull(x), np.equal(x, y), np.nan)
y = xr.DataArray(y).sq.align_with(x)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
+
def in_(x, y, track_types = True, **kwargs):
"""Test if x is a member of set y.
@@ -1458,10 +1460,19 @@ def f(x, y):
if isinstance(y, Interval):
a = np.greater_equal(x, y.lower)
b = np.less_equal(x, y.upper)
- return np.where(pd.notnull(x), np.logical_and(a, b), np.nan)
+
+ return np.where(utils.notnull(x), np.logical_and(a, b), np.nan)
else:
- return np.where(pd.notnull(x), np.isin(x, y), np.nan)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ return np.where(utils.notnull(x), np.isin(x, y), np.nan)
+
+ out = utils.apply_ufunc(
+ f,
+ x,
+ keep_attrs=True,
+ kwargs={'y': y},
+ output_dtypes=[x.dtype],
+ )
+
if track_types:
out = promoter.promote(out)
return out
@@ -1515,7 +1526,7 @@ def not_equal_(x, y, track_types = True, **kwargs):
promoter.check()
f = lambda x, y: np.where(pd.notnull(x), np.not_equal(x, y), np.nan)
y = xr.DataArray(y).sq.align_with(x)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -1570,7 +1581,7 @@ def f(x, y):
return np.where(pd.notnull(x), np.logical_or(a, b), np.nan)
else:
return np.where(pd.notnull(x), np.isin(x, y, invert = True), np.nan)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -1628,7 +1639,7 @@ def greater_(x, y, track_types = True, **kwargs):
promoter.check()
f = lambda x, y: np.where(pd.notnull(x), np.greater(x, y), np.nan)
y = xr.DataArray(y).sq.align_with(x)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -1682,7 +1693,7 @@ def greater_equal_(x, y, track_types = True, **kwargs):
promoter.check()
f = lambda x, y: np.where(pd.notnull(x), np.greater_equal(x, y), np.nan)
y = xr.DataArray(y).sq.align_with(x)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -1736,7 +1747,7 @@ def less_(x, y, track_types = True, **kwargs):
promoter.check()
f = lambda x, y: np.where(pd.notnull(x), np.less(x, y), np.nan)
y = xr.DataArray(y).sq.align_with(x)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -1790,7 +1801,7 @@ def less_equal_(x, y, track_types = True, **kwargs):
promoter.check()
f = lambda x, y: np.where(pd.notnull(x), np.less_equal(x, y), np.nan)
y = xr.DataArray(y).sq.align_with(x)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -1928,7 +1939,7 @@ def after_(x, y, track_types = True, **kwargs):
promoter = TypePromoter(x, y, function = "after")
promoter.check()
f = lambda x, y: np.where(pd.notnull(x), np.greater(x, np.nanmax(y)), np.nan)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -1991,7 +2002,7 @@ def before_(x, y, track_types = True, **kwargs):
promoter = TypePromoter(x, y, function = "before")
promoter.check()
f = lambda x, y: np.where(pd.notnull(x), np.less(x, np.nanmin(y)), np.nan)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -2054,7 +2065,7 @@ def f(x, y):
a = np.greater_equal(x, np.nanmin(y))
b = np.less_equal(x, np.nanmax(y))
return np.where(pd.notnull(x), np.logical_and(a, b), np.nan)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -2114,7 +2125,7 @@ def assign_(x, y, track_types = True, **kwargs):
promoter.check()
f = lambda x, y: np.where(pd.notnull(x), y, utils.get_null(y))
y = xr.DataArray(y).sq.align_with(x)
- out = xr.apply_ufunc(f, x, y, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
@@ -2173,7 +2184,7 @@ def assign_at_(x, y, z, track_types = True, **kwargs):
f = lambda x, y, z: np.where(np.logical_and(pd.notnull(z), z), y, x)
y = xr.DataArray(y).sq.align_with(x)
z = z.sq.align_with(x)
- out = xr.apply_ufunc(f, x, y, z, keep_attrs = True)
+ out = utils.apply_ufunc(f, x, y, z, keep_attrs = True)
if track_types:
out = promoter.promote(out)
return out
diff --git a/semantique/processor/reducers.py b/semantique/processor/reducers.py
index e8a16ae0..0dcef06a 100644
--- a/semantique/processor/reducers.py
+++ b/semantique/processor/reducers.py
@@ -769,12 +769,29 @@ def percentage_(x, track_types = True, **kwargs):
if track_types:
promoter = TypePromoter(x, function = "percentage")
promoter.check()
- def f(x, axis = None):
- part = np.count_nonzero(utils.null_as_zero(x), axis)
- part = np.where(utils.allnull(x, axis), np.nan, part)
- whole = np.sum(pd.notnull(x), axis)
- return np.multiply(np.divide(part, whole), 100)
+ def f(x, axis=None):
+ # TODO: Tidy this up.
+ # All operations stay as dask/numpy arrays - no compute() calls
+ part = np.count_nonzero(utils.null_as_zero(x), axis=axis)
+
+ # Check if all values are null along axis
+ all_null_mask = utils.allnull(x, axis=axis)
+ part = np.where(all_null_mask, np.nan, part)
+
+ # Replace pd.notnull with numpy equivalent
+ whole = np.sum(~np.isnan(x), axis=axis)
+
+ # Avoid division by zero - set to nan where whole == 0
+ with np.errstate(divide='ignore', invalid='ignore'):
+ result = np.divide(part, whole)
+ result = np.multiply(result, 100)
+
+ return result
+ # print(f"percentage_ args: {kwargs}")
out = x.reduce(f, **kwargs)
+ # print(f"percentage_ out: {out}")
+ # print("percentage_ done.")
+
if track_types:
out = promoter.promote(out)
return out
diff --git a/semantique/processor/utils.py b/semantique/processor/utils.py
index 96e12447..c2ca030e 100644
--- a/semantique/processor/utils.py
+++ b/semantique/processor/utils.py
@@ -1,5 +1,8 @@
+import logging
import rioxarray
+import dask.array as da
+from dask.callbacks import Callback
import numpy as np
import pandas as pd
import xarray as xr
@@ -7,6 +10,8 @@
from semantique import components
from semantique.dimensions import TIME, X, Y
+logger = logging.getLogger(__name__)
+
def get_null(x):
"""Return the appropriate nodata value for an array.
@@ -27,6 +32,45 @@ def get_null(x):
else:
return None
+
+def isnull(x):
+ """Return a boolean array whose elements indicate whether the corresponding
+ input element is NaN, None etc..
+
+ Parameters
+ ----------
+ x : :obj:`xarray.DataArray` or :obj:`numpy.array`
+ The input array.
+
+ """
+
+ if DASK_LAZY:
+ # Doesn't trigger a computation, but may not work for all use cases
+ return da.isnull(x)
+ else:
+ # Continue to use this for backwards compatibility, for now
+ return pd.isnull(x)
+
+
+def notnull(x):
+ """Return a boolean array whose elements indicate whether the corresponding
+ input element is *not* NaN, None etc..
+
+ Parameters
+ ----------
+ x : :obj:`xarray.DataArray` or :obj:`numpy.array`
+ The input array.
+
+ """
+
+ if DASK_LAZY:
+ # Doesn't trigger a computation, but may not work for all use cases
+ return da.notnull(x)
+ else:
+ # Continue to use this for backwards compatibility, for now
+ return pd.notnull(x)
+
+
def allnull(x, axis):
"""Test whether all elements along a given axis in an array are null.
@@ -42,7 +86,9 @@ def allnull(x, axis):
:obj:`numpy.array`
"""
- return np.equal(np.sum(pd.notnull(x), axis = axis), 0)
+
+ return np.equal(np.sum(notnull(x), axis = axis), 0)
+
def null_as_zero(x):
"""Convert all null values in an array to 0.
@@ -57,7 +103,8 @@ def null_as_zero(x):
:obj:`numpy.array`
"""
- return np.where(pd.isnull(x), 0, x)
+ return np.where(isnull(x), 0, x)
+
def inf_as_null(x):
"""Convert all infinite values in an array to null values.
@@ -78,6 +125,7 @@ def inf_as_null(x):
return x
return np.where(is_inf, get_null(x), x)
+
def datetime64_as_unix(x):
"""Convert datetime64 values in an array to unix time values.
@@ -285,4 +333,37 @@ def parse_datetime_component(name, obj):
}
else:
obj.sq.value_type = "discrete"
- return obj
\ No newline at end of file
+ return obj
+
+
+# SHOULDDO: Find a better way to do this than with a global variable.
+DASK_LAZY = False
+
+def set_global_dask_lazy(dask_lazy):
+ """Sets the global flag to enable or disable lazy dask computation."""
+ global DASK_LAZY
+ if dask_lazy:
+ logger.info("Lazy dask computation enabled.")
+ else:
+ logger.info("Lazy dask computation disabled.")
+ DASK_LAZY = dask_lazy
+
+
+def apply_ufunc(func, *args, **kwargs):
+ logger.debug(f"Applying {func} with args:\n{args}")
+
+ if DASK_LAZY:
+ return xr.apply_ufunc(func, *args, dask="parallelized", **kwargs)
+ else:
+ return xr.apply_ufunc(func, *args, **kwargs)
+
+class ComputeTracer(Callback):
+ def __init__(self):
+ self.compute_count = 0
+
+ def _start(self, dsk):
+ self.compute_count += 1
+ print(f"⚠️ COMPUTE #{self.compute_count} triggered!")
+ import traceback
+ traceback.print_stack() # Shows where compute was called
+ print(f" Graph size: {len(dsk)} tasks\n")
diff --git a/semantique/recipe.py b/semantique/recipe.py
index 34d1d12a..47586d55 100644
--- a/semantique/recipe.py
+++ b/semantique/recipe.py
@@ -1,6 +1,12 @@
+import logging
+
+from semantique.datacube import DaskCube
+from semantique.processor.utils import ComputeTracer, set_global_dask_lazy
from semantique.processor.core import QueryProcessor, FakeProcessor
from semantique.visualiser.visualise import show
+logger = logging.getLogger(__name__)
+
class QueryRecipe(dict):
"""Dict-like container to store instructions of a query recipe.
@@ -100,7 +106,20 @@ def execute(self, datacube, mapping, space, time, run_preview = False,
cache=cache,
**config
)
- return qp.optimize().execute()
+
+ dask_lazy = isinstance(datacube, DaskCube)
+ if dask_lazy:
+ set_global_dask_lazy(True)
+ else:
+ set_global_dask_lazy(False)
+
+ if dask_lazy and logger.isEnabledFor(logging.DEBUG):
+ with ComputeTracer():
+ result = qp.optimize().execute()
+ else:
+ result = qp.optimize().execute()
+
+ return result
def visualise(self):
"""Visualise the recipe in a web browser.
diff --git a/setup.py b/setup.py
index 09e3b654..e817cb7b 100644
--- a/setup.py
+++ b/setup.py
@@ -10,6 +10,7 @@
'geocube>=0.4.1',
'geopandas>=0.11,<1',
'numpy>=1.21',
+ 'odc-stac>=0.3.9',
'pandas>=2.0',
'planetary-computer',
'pyproj>=3.0',