diff --git a/docs/internal_api/index.rst b/docs/internal_api/index.rst index c77efca7e..0f37d774c 100644 --- a/docs/internal_api/index.rst +++ b/docs/internal_api/index.rst @@ -142,6 +142,9 @@ Geometry grid.geometry._populate_face_latlon_bound grid.geometry._populate_bounds grid.geometry._construct_hole_edge_indices + grid.geometry._point_to_plane + grid.geometry._point_to_sphere + grid.geometry._ray_casting_plane Coordinates ----------- diff --git a/docs/user-guide/remapping.ipynb b/docs/user-guide/remapping.ipynb index 683f4c01d..88d96697e 100644 --- a/docs/user-guide/remapping.ipynb +++ b/docs/user-guide/remapping.ipynb @@ -21,7 +21,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "id": "7449507f-3d79-4e86-a775-3b9137153adc", "metadata": {}, "outputs": [], @@ -37,7 +37,7 @@ "\n", "warnings.filterwarnings(\"ignore\")\n", "\n", - "hv.extension(\"bokeh\")\n", + "# hv.extension(\"bokeh\")\n", "\n", "features = gf.coastline(projection=ccrs.PlateCarree(), scale=\"50m\")" ] @@ -60,10 +60,108 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "id": "a2dddad7-872a-4b4f-a2b0-7de698ef6a38", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": {}, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.holoviews_exec.v0+json": "", + "text/html": [ + "
\n", + "
\n", + "
\n", + "" + ], + "text/plain": [ + ":Layout\n", + " .DynamicMap.I :DynamicMap []\n", + " :Overlay\n", + " .Image.I :Image [Longitude,Latitude] (Longitude_Latitude t2m)\n", + " .Path.I :Path [Longitude,Latitude]\n", + " .Path.I :Path [Longitude,Latitude]" + ] + }, + "execution_count": 2, + "metadata": { + "application/vnd.holoviews_exec.v0+json": { + "id": "p1011" + } + }, + "output_type": "execute_result" + } + ], "source": [ "grid_path = \"../../test/meshfiles/ugrid/quad-hexagon/grid.nc\"\n", "data_path = \"../../test/meshfiles/ugrid/quad-hexagon/data.nc\"\n", @@ -125,10 +223,749 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "id": "4d73a380-349d-473d-8e57-10c52102adca", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "application/javascript": [ + "(function(root) {\n", + " function now() {\n", + " return new Date();\n", + " }\n", + "\n", + " var force = true;\n", + " var py_version = '3.4.3'.replace('rc', '-rc.').replace('.dev', '-dev.');\n", + " var reloading = false;\n", + " var Bokeh = root.Bokeh;\n", + "\n", + " if (typeof (root._bokeh_timeout) === \"undefined\" || force) {\n", + " root._bokeh_timeout = Date.now() + 5000;\n", + " root._bokeh_failed_load = false;\n", + " }\n", + "\n", + " function run_callbacks() {\n", + " try {\n", + " root._bokeh_onload_callbacks.forEach(function(callback) {\n", + " if (callback != null)\n", + " callback();\n", + " });\n", + " } finally {\n", + " delete root._bokeh_onload_callbacks;\n", + " }\n", + " console.debug(\"Bokeh: all callbacks have finished\");\n", + " }\n", + "\n", + " function load_libs(css_urls, js_urls, js_modules, js_exports, callback) {\n", + " if (css_urls == null) css_urls = [];\n", + " if (js_urls == null) js_urls = [];\n", + " if (js_modules == null) js_modules = [];\n", + " if (js_exports == null) js_exports = {};\n", + "\n", + " root._bokeh_onload_callbacks.push(callback);\n", + "\n", + " if (root._bokeh_is_loading > 0) {\n", + " console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n", + " return null;\n", + " }\n", + " if (js_urls.length === 0 && js_modules.length === 0 && Object.keys(js_exports).length === 0) {\n", + " run_callbacks();\n", + " return null;\n", + " }\n", + " if (!reloading) {\n", + " console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n", + " }\n", + "\n", + " function on_load() {\n", + " root._bokeh_is_loading--;\n", + " if (root._bokeh_is_loading === 0) {\n", + " console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n", + " run_callbacks()\n", + " }\n", + " }\n", + " window._bokeh_on_load = on_load\n", + "\n", + " function on_error() {\n", + " console.error(\"failed to load \" + url);\n", + " }\n", + "\n", + " var skip = [];\n", + " if (window.requirejs) {\n", + " window.requirejs.config({'packages': {}, 'paths': {}, 'shim': {}});\n", + " root._bokeh_is_loading = css_urls.length + 0;\n", + " } else {\n", + " root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length + Object.keys(js_exports).length;\n", + " }\n", + "\n", + " var existing_stylesheets = []\n", + " var links = document.getElementsByTagName('link')\n", + " for (var i = 0; i < links.length; i++) {\n", + " var link = links[i]\n", + " if (link.href != null) {\n", + "\texisting_stylesheets.push(link.href)\n", + " }\n", + " }\n", + " for (var i = 0; i < css_urls.length; i++) {\n", + " var url = css_urls[i];\n", + " if (existing_stylesheets.indexOf(url) !== -1) {\n", + "\ton_load()\n", + "\tcontinue;\n", + " }\n", + " const element = document.createElement(\"link\");\n", + " element.onload = on_load;\n", + " element.onerror = on_error;\n", + " element.rel = \"stylesheet\";\n", + " element.type = \"text/css\";\n", + " element.href = url;\n", + " console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n", + " document.body.appendChild(element);\n", + " } var existing_scripts = []\n", + " var scripts = document.getElementsByTagName('script')\n", + " for (var i = 0; i < scripts.length; i++) {\n", + " var script = scripts[i]\n", + " if (script.src != null) {\n", + "\texisting_scripts.push(script.src)\n", + " }\n", + " }\n", + " for (var i = 0; i < js_urls.length; i++) {\n", + " var url = js_urls[i];\n", + " if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n", + "\tif (!window.requirejs) {\n", + "\t on_load();\n", + "\t}\n", + "\tcontinue;\n", + " }\n", + " var element = document.createElement('script');\n", + " element.onload = on_load;\n", + " element.onerror = on_error;\n", + " element.async = false;\n", + " element.src = url;\n", + " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", + " document.head.appendChild(element);\n", + " }\n", + " for (var i = 0; i < js_modules.length; i++) {\n", + " var url = js_modules[i];\n", + " if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n", + "\tif (!window.requirejs) {\n", + "\t on_load();\n", + "\t}\n", + "\tcontinue;\n", + " }\n", + " var element = document.createElement('script');\n", + " element.onload = on_load;\n", + " element.onerror = on_error;\n", + " element.async = false;\n", + " element.src = url;\n", + " element.type = \"module\";\n", + " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", + " document.head.appendChild(element);\n", + " }\n", + " for (const name in js_exports) {\n", + " var url = js_exports[name];\n", + " if (skip.indexOf(url) >= 0 || root[name] != null) {\n", + "\tif (!window.requirejs) {\n", + "\t on_load();\n", + "\t}\n", + "\tcontinue;\n", + " }\n", + " var element = document.createElement('script');\n", + " element.onerror = on_error;\n", + " element.async = false;\n", + " element.type = \"module\";\n", + " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", + " element.textContent = `\n", + " import ${name} from \"${url}\"\n", + " window.${name} = ${name}\n", + " window._bokeh_on_load()\n", + " `\n", + " document.head.appendChild(element);\n", + " }\n", + " if (!js_urls.length && !js_modules.length) {\n", + " on_load()\n", + " }\n", + " };\n", + "\n", + " function inject_raw_css(css) {\n", + " const element = document.createElement(\"style\");\n", + " element.appendChild(document.createTextNode(css));\n", + " document.body.appendChild(element);\n", + " }\n", + "\n", + " var js_urls = [\"https://cdn.bokeh.org/bokeh/release/bokeh-3.4.3.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-gl-3.4.3.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-3.4.3.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-3.4.3.min.js\", \"https://cdn.holoviz.org/panel/1.4.4/dist/panel.min.js\", \"https://cdn.jsdelivr.net/npm/@holoviz/geoviews@1.12.0/dist/geoviews.min.js\"];\n", + " var js_modules = [];\n", + " var js_exports = {};\n", + " var css_urls = [];\n", + " var inline_js = [ function(Bokeh) {\n", + " Bokeh.set_log_level(\"info\");\n", + " },\n", + "function(Bokeh) {} // ensure no trailing comma for IE\n", + " ];\n", + "\n", + " function run_inline_js() {\n", + " if ((root.Bokeh !== undefined) || (force === true)) {\n", + " for (var i = 0; i < inline_js.length; i++) {\n", + "\ttry {\n", + " inline_js[i].call(root, root.Bokeh);\n", + "\t} catch(e) {\n", + "\t if (!reloading) {\n", + "\t throw e;\n", + "\t }\n", + "\t}\n", + " }\n", + " // Cache old bokeh versions\n", + " if (Bokeh != undefined && !reloading) {\n", + "\tvar NewBokeh = root.Bokeh;\n", + "\tif (Bokeh.versions === undefined) {\n", + "\t Bokeh.versions = new Map();\n", + "\t}\n", + "\tif (NewBokeh.version !== Bokeh.version) {\n", + "\t Bokeh.versions.set(NewBokeh.version, NewBokeh)\n", + "\t}\n", + "\troot.Bokeh = Bokeh;\n", + " }} else if (Date.now() < root._bokeh_timeout) {\n", + " setTimeout(run_inline_js, 100);\n", + " } else if (!root._bokeh_failed_load) {\n", + " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n", + " root._bokeh_failed_load = true;\n", + " }\n", + " root._bokeh_is_initializing = false\n", + " }\n", + "\n", + " function load_or_wait() {\n", + " // Implement a backoff loop that tries to ensure we do not load multiple\n", + " // versions of Bokeh and its dependencies at the same time.\n", + " // In recent versions we use the root._bokeh_is_initializing flag\n", + " // to determine whether there is an ongoing attempt to initialize\n", + " // bokeh, however for backward compatibility we also try to ensure\n", + " // that we do not start loading a newer (Panel>=1.0 and Bokeh>3) version\n", + " // before older versions are fully initialized.\n", + " if (root._bokeh_is_initializing && Date.now() > root._bokeh_timeout) {\n", + " root._bokeh_is_initializing = false;\n", + " root._bokeh_onload_callbacks = undefined;\n", + " console.log(\"Bokeh: BokehJS was loaded multiple times but one version failed to initialize.\");\n", + " load_or_wait();\n", + " } else if (root._bokeh_is_initializing || (typeof root._bokeh_is_initializing === \"undefined\" && root._bokeh_onload_callbacks !== undefined)) {\n", + " setTimeout(load_or_wait, 100);\n", + " } else {\n", + " root._bokeh_is_initializing = true\n", + " root._bokeh_onload_callbacks = []\n", + " var bokeh_loaded = Bokeh != null && (Bokeh.version === py_version || (Bokeh.versions !== undefined && Bokeh.versions.has(py_version)));\n", + " if (!reloading && !bokeh_loaded) {\n", + "\troot.Bokeh = undefined;\n", + " }\n", + " load_libs(css_urls, js_urls, js_modules, js_exports, function() {\n", + "\tconsole.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n", + "\trun_inline_js();\n", + " });\n", + " }\n", + " }\n", + " // Give older versions of the autoload script a head-start to ensure\n", + " // they initialize before we start loading newer version.\n", + " setTimeout(load_or_wait, 100)\n", + "}(window));" + ], + "application/vnd.holoviews_load.v0+json": "(function(root) {\n function now() {\n return new Date();\n }\n\n var force = true;\n var py_version = '3.4.3'.replace('rc', '-rc.').replace('.dev', '-dev.');\n var reloading = false;\n var Bokeh = root.Bokeh;\n\n if (typeof (root._bokeh_timeout) === \"undefined\" || force) {\n root._bokeh_timeout = Date.now() + 5000;\n root._bokeh_failed_load = false;\n }\n\n function run_callbacks() {\n try {\n root._bokeh_onload_callbacks.forEach(function(callback) {\n if (callback != null)\n callback();\n });\n } finally {\n delete root._bokeh_onload_callbacks;\n }\n console.debug(\"Bokeh: all callbacks have finished\");\n }\n\n function load_libs(css_urls, js_urls, js_modules, js_exports, callback) {\n if (css_urls == null) css_urls = [];\n if (js_urls == null) js_urls = [];\n if (js_modules == null) js_modules = [];\n if (js_exports == null) js_exports = {};\n\n root._bokeh_onload_callbacks.push(callback);\n\n if (root._bokeh_is_loading > 0) {\n console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n return null;\n }\n if (js_urls.length === 0 && js_modules.length === 0 && Object.keys(js_exports).length === 0) {\n run_callbacks();\n return null;\n }\n if (!reloading) {\n console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n }\n\n function on_load() {\n root._bokeh_is_loading--;\n if (root._bokeh_is_loading === 0) {\n console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n run_callbacks()\n }\n }\n window._bokeh_on_load = on_load\n\n function on_error() {\n console.error(\"failed to load \" + url);\n }\n\n var skip = [];\n if (window.requirejs) {\n window.requirejs.config({'packages': {}, 'paths': {}, 'shim': {}});\n root._bokeh_is_loading = css_urls.length + 0;\n } else {\n root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length + Object.keys(js_exports).length;\n }\n\n var existing_stylesheets = []\n var links = document.getElementsByTagName('link')\n for (var i = 0; i < links.length; i++) {\n var link = links[i]\n if (link.href != null) {\n\texisting_stylesheets.push(link.href)\n }\n }\n for (var i = 0; i < css_urls.length; i++) {\n var url = css_urls[i];\n if (existing_stylesheets.indexOf(url) !== -1) {\n\ton_load()\n\tcontinue;\n }\n const element = document.createElement(\"link\");\n element.onload = on_load;\n element.onerror = on_error;\n element.rel = \"stylesheet\";\n element.type = \"text/css\";\n element.href = url;\n console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n document.body.appendChild(element);\n } var existing_scripts = []\n var scripts = document.getElementsByTagName('script')\n for (var i = 0; i < scripts.length; i++) {\n var script = scripts[i]\n if (script.src != null) {\n\texisting_scripts.push(script.src)\n }\n }\n for (var i = 0; i < js_urls.length; i++) {\n var url = js_urls[i];\n if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n\tif (!window.requirejs) {\n\t on_load();\n\t}\n\tcontinue;\n }\n var element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error;\n element.async = false;\n element.src = url;\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n for (var i = 0; i < js_modules.length; i++) {\n var url = js_modules[i];\n if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n\tif (!window.requirejs) {\n\t on_load();\n\t}\n\tcontinue;\n }\n var element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error;\n element.async = false;\n element.src = url;\n element.type = \"module\";\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n for (const name in js_exports) {\n var url = js_exports[name];\n if (skip.indexOf(url) >= 0 || root[name] != null) {\n\tif (!window.requirejs) {\n\t on_load();\n\t}\n\tcontinue;\n }\n var element = document.createElement('script');\n element.onerror = on_error;\n element.async = false;\n element.type = \"module\";\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n element.textContent = `\n import ${name} from \"${url}\"\n window.${name} = ${name}\n window._bokeh_on_load()\n `\n document.head.appendChild(element);\n }\n if (!js_urls.length && !js_modules.length) {\n on_load()\n }\n };\n\n function inject_raw_css(css) {\n const element = document.createElement(\"style\");\n element.appendChild(document.createTextNode(css));\n document.body.appendChild(element);\n }\n\n var js_urls = [\"https://cdn.bokeh.org/bokeh/release/bokeh-3.4.3.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-gl-3.4.3.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-3.4.3.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-3.4.3.min.js\", \"https://cdn.holoviz.org/panel/1.4.4/dist/panel.min.js\", \"https://cdn.jsdelivr.net/npm/@holoviz/geoviews@1.12.0/dist/geoviews.min.js\"];\n var js_modules = [];\n var js_exports = {};\n var css_urls = [];\n var inline_js = [ function(Bokeh) {\n Bokeh.set_log_level(\"info\");\n },\nfunction(Bokeh) {} // ensure no trailing comma for IE\n ];\n\n function run_inline_js() {\n if ((root.Bokeh !== undefined) || (force === true)) {\n for (var i = 0; i < inline_js.length; i++) {\n\ttry {\n inline_js[i].call(root, root.Bokeh);\n\t} catch(e) {\n\t if (!reloading) {\n\t throw e;\n\t }\n\t}\n }\n // Cache old bokeh versions\n if (Bokeh != undefined && !reloading) {\n\tvar NewBokeh = root.Bokeh;\n\tif (Bokeh.versions === undefined) {\n\t Bokeh.versions = new Map();\n\t}\n\tif (NewBokeh.version !== Bokeh.version) {\n\t Bokeh.versions.set(NewBokeh.version, NewBokeh)\n\t}\n\troot.Bokeh = Bokeh;\n }} else if (Date.now() < root._bokeh_timeout) {\n setTimeout(run_inline_js, 100);\n } else if (!root._bokeh_failed_load) {\n console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n root._bokeh_failed_load = true;\n }\n root._bokeh_is_initializing = false\n }\n\n function load_or_wait() {\n // Implement a backoff loop that tries to ensure we do not load multiple\n // versions of Bokeh and its dependencies at the same time.\n // In recent versions we use the root._bokeh_is_initializing flag\n // to determine whether there is an ongoing attempt to initialize\n // bokeh, however for backward compatibility we also try to ensure\n // that we do not start loading a newer (Panel>=1.0 and Bokeh>3) version\n // before older versions are fully initialized.\n if (root._bokeh_is_initializing && Date.now() > root._bokeh_timeout) {\n root._bokeh_is_initializing = false;\n root._bokeh_onload_callbacks = undefined;\n console.log(\"Bokeh: BokehJS was loaded multiple times but one version failed to initialize.\");\n load_or_wait();\n } else if (root._bokeh_is_initializing || (typeof root._bokeh_is_initializing === \"undefined\" && root._bokeh_onload_callbacks !== undefined)) {\n setTimeout(load_or_wait, 100);\n } else {\n root._bokeh_is_initializing = true\n root._bokeh_onload_callbacks = []\n var bokeh_loaded = Bokeh != null && (Bokeh.version === py_version || (Bokeh.versions !== undefined && Bokeh.versions.has(py_version)));\n if (!reloading && !bokeh_loaded) {\n\troot.Bokeh = undefined;\n }\n load_libs(css_urls, js_urls, js_modules, js_exports, function() {\n\tconsole.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n\trun_inline_js();\n });\n }\n }\n // Give older versions of the autoload script a head-start to ensure\n // they initialize before we start loading newer version.\n setTimeout(load_or_wait, 100)\n}(window));" + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/javascript": [ + "\n", + "if ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n", + " window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n", + "}\n", + "\n", + "\n", + " function JupyterCommManager() {\n", + " }\n", + "\n", + " JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n", + " if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n", + " var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n", + " comm_manager.register_target(comm_id, function(comm) {\n", + " comm.on_msg(msg_handler);\n", + " });\n", + " } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n", + " window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n", + " comm.onMsg = msg_handler;\n", + " });\n", + " } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n", + " google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n", + " var messages = comm.messages[Symbol.asyncIterator]();\n", + " function processIteratorResult(result) {\n", + " var message = result.value;\n", + " console.log(message)\n", + " var content = {data: message.data, comm_id};\n", + " var buffers = []\n", + " for (var buffer of message.buffers || []) {\n", + " buffers.push(new DataView(buffer))\n", + " }\n", + " var metadata = message.metadata || {};\n", + " var msg = {content, buffers, metadata}\n", + " msg_handler(msg);\n", + " return messages.next().then(processIteratorResult);\n", + " }\n", + " return messages.next().then(processIteratorResult);\n", + " })\n", + " }\n", + " }\n", + "\n", + " JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n", + " if (comm_id in window.PyViz.comms) {\n", + " return window.PyViz.comms[comm_id];\n", + " } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n", + " var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n", + " var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n", + " if (msg_handler) {\n", + " comm.on_msg(msg_handler);\n", + " }\n", + " } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n", + " var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n", + " comm.open();\n", + " if (msg_handler) {\n", + " comm.onMsg = msg_handler;\n", + " }\n", + " } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n", + " var comm_promise = google.colab.kernel.comms.open(comm_id)\n", + " comm_promise.then((comm) => {\n", + " window.PyViz.comms[comm_id] = comm;\n", + " if (msg_handler) {\n", + " var messages = comm.messages[Symbol.asyncIterator]();\n", + " function processIteratorResult(result) {\n", + " var message = result.value;\n", + " var content = {data: message.data};\n", + " var metadata = message.metadata || {comm_id};\n", + " var msg = {content, metadata}\n", + " msg_handler(msg);\n", + " return messages.next().then(processIteratorResult);\n", + " }\n", + " return messages.next().then(processIteratorResult);\n", + " }\n", + " }) \n", + " var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n", + " return comm_promise.then((comm) => {\n", + " comm.send(data, metadata, buffers, disposeOnDone);\n", + " });\n", + " };\n", + " var comm = {\n", + " send: sendClosure\n", + " };\n", + " }\n", + " window.PyViz.comms[comm_id] = comm;\n", + " return comm;\n", + " }\n", + " window.PyViz.comm_manager = new JupyterCommManager();\n", + " \n", + "\n", + "\n", + "var JS_MIME_TYPE = 'application/javascript';\n", + "var HTML_MIME_TYPE = 'text/html';\n", + "var EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\n", + "var CLASS_NAME = 'output';\n", + "\n", + "/**\n", + " * Render data to the DOM node\n", + " */\n", + "function render(props, node) {\n", + " var div = document.createElement(\"div\");\n", + " var script = document.createElement(\"script\");\n", + " node.appendChild(div);\n", + " node.appendChild(script);\n", + "}\n", + "\n", + "/**\n", + " * Handle when a new output is added\n", + " */\n", + "function handle_add_output(event, handle) {\n", + " var output_area = handle.output_area;\n", + " var output = handle.output;\n", + " if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n", + " return\n", + " }\n", + " var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n", + " var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n", + " if (id !== undefined) {\n", + " var nchildren = toinsert.length;\n", + " var html_node = toinsert[nchildren-1].children[0];\n", + " html_node.innerHTML = output.data[HTML_MIME_TYPE];\n", + " var scripts = [];\n", + " var nodelist = html_node.querySelectorAll(\"script\");\n", + " for (var i in nodelist) {\n", + " if (nodelist.hasOwnProperty(i)) {\n", + " scripts.push(nodelist[i])\n", + " }\n", + " }\n", + "\n", + " scripts.forEach( function (oldScript) {\n", + " var newScript = document.createElement(\"script\");\n", + " var attrs = [];\n", + " var nodemap = oldScript.attributes;\n", + " for (var j in nodemap) {\n", + " if (nodemap.hasOwnProperty(j)) {\n", + " attrs.push(nodemap[j])\n", + " }\n", + " }\n", + " attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n", + " newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n", + " oldScript.parentNode.replaceChild(newScript, oldScript);\n", + " });\n", + " if (JS_MIME_TYPE in output.data) {\n", + " toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n", + " }\n", + " output_area._hv_plot_id = id;\n", + " if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n", + " window.PyViz.plot_index[id] = Bokeh.index[id];\n", + " } else {\n", + " window.PyViz.plot_index[id] = null;\n", + " }\n", + " } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n", + " var bk_div = document.createElement(\"div\");\n", + " bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n", + " var script_attrs = bk_div.children[0].attributes;\n", + " for (var i = 0; i < script_attrs.length; i++) {\n", + " toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n", + " }\n", + " // store reference to server id on output_area\n", + " output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n", + " }\n", + "}\n", + "\n", + "/**\n", + " * Handle when an output is cleared or removed\n", + " */\n", + "function handle_clear_output(event, handle) {\n", + " var id = handle.cell.output_area._hv_plot_id;\n", + " var server_id = handle.cell.output_area._bokeh_server_id;\n", + " if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n", + " var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n", + " if (server_id !== null) {\n", + " comm.send({event_type: 'server_delete', 'id': server_id});\n", + " return;\n", + " } else if (comm !== null) {\n", + " comm.send({event_type: 'delete', 'id': id});\n", + " }\n", + " delete PyViz.plot_index[id];\n", + " if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n", + " var doc = window.Bokeh.index[id].model.document\n", + " doc.clear();\n", + " const i = window.Bokeh.documents.indexOf(doc);\n", + " if (i > -1) {\n", + " window.Bokeh.documents.splice(i, 1);\n", + " }\n", + " }\n", + "}\n", + "\n", + "/**\n", + " * Handle kernel restart event\n", + " */\n", + "function handle_kernel_cleanup(event, handle) {\n", + " delete PyViz.comms[\"hv-extension-comm\"];\n", + " window.PyViz.plot_index = {}\n", + "}\n", + "\n", + "/**\n", + " * Handle update_display_data messages\n", + " */\n", + "function handle_update_output(event, handle) {\n", + " handle_clear_output(event, {cell: {output_area: handle.output_area}})\n", + " handle_add_output(event, handle)\n", + "}\n", + "\n", + "function register_renderer(events, OutputArea) {\n", + " function append_mime(data, metadata, element) {\n", + " // create a DOM node to render to\n", + " var toinsert = this.create_output_subarea(\n", + " metadata,\n", + " CLASS_NAME,\n", + " EXEC_MIME_TYPE\n", + " );\n", + " this.keyboard_manager.register_events(toinsert);\n", + " // Render to node\n", + " var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n", + " render(props, toinsert[0]);\n", + " element.append(toinsert);\n", + " return toinsert\n", + " }\n", + "\n", + " events.on('output_added.OutputArea', handle_add_output);\n", + " events.on('output_updated.OutputArea', handle_update_output);\n", + " events.on('clear_output.CodeCell', handle_clear_output);\n", + " events.on('delete.Cell', handle_clear_output);\n", + " events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n", + "\n", + " OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n", + " safe: true,\n", + " index: 0\n", + " });\n", + "}\n", + "\n", + "if (window.Jupyter !== undefined) {\n", + " try {\n", + " var events = require('base/js/events');\n", + " var OutputArea = require('notebook/js/outputarea').OutputArea;\n", + " if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n", + " register_renderer(events, OutputArea);\n", + " }\n", + " } catch(err) {\n", + " }\n", + "}\n" + ], + "application/vnd.holoviews_load.v0+json": "\nif ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n}\n\n\n function JupyterCommManager() {\n }\n\n JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n comm_manager.register_target(comm_id, function(comm) {\n comm.on_msg(msg_handler);\n });\n } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n comm.onMsg = msg_handler;\n });\n } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n var messages = comm.messages[Symbol.asyncIterator]();\n function processIteratorResult(result) {\n var message = result.value;\n console.log(message)\n var content = {data: message.data, comm_id};\n var buffers = []\n for (var buffer of message.buffers || []) {\n buffers.push(new DataView(buffer))\n }\n var metadata = message.metadata || {};\n var msg = {content, buffers, metadata}\n msg_handler(msg);\n return messages.next().then(processIteratorResult);\n }\n return messages.next().then(processIteratorResult);\n })\n }\n }\n\n JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n if (comm_id in window.PyViz.comms) {\n return window.PyViz.comms[comm_id];\n } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n if (msg_handler) {\n comm.on_msg(msg_handler);\n }\n } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n comm.open();\n if (msg_handler) {\n comm.onMsg = msg_handler;\n }\n } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n var comm_promise = google.colab.kernel.comms.open(comm_id)\n comm_promise.then((comm) => {\n window.PyViz.comms[comm_id] = comm;\n if (msg_handler) {\n var messages = comm.messages[Symbol.asyncIterator]();\n function processIteratorResult(result) {\n var message = result.value;\n var content = {data: message.data};\n var metadata = message.metadata || {comm_id};\n var msg = {content, metadata}\n msg_handler(msg);\n return messages.next().then(processIteratorResult);\n }\n return messages.next().then(processIteratorResult);\n }\n }) \n var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n return comm_promise.then((comm) => {\n comm.send(data, metadata, buffers, disposeOnDone);\n });\n };\n var comm = {\n send: sendClosure\n };\n }\n window.PyViz.comms[comm_id] = comm;\n return comm;\n }\n window.PyViz.comm_manager = new JupyterCommManager();\n \n\n\nvar JS_MIME_TYPE = 'application/javascript';\nvar HTML_MIME_TYPE = 'text/html';\nvar EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\nvar CLASS_NAME = 'output';\n\n/**\n * Render data to the DOM node\n */\nfunction render(props, node) {\n var div = document.createElement(\"div\");\n var script = document.createElement(\"script\");\n node.appendChild(div);\n node.appendChild(script);\n}\n\n/**\n * Handle when a new output is added\n */\nfunction handle_add_output(event, handle) {\n var output_area = handle.output_area;\n var output = handle.output;\n if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n return\n }\n var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n if (id !== undefined) {\n var nchildren = toinsert.length;\n var html_node = toinsert[nchildren-1].children[0];\n html_node.innerHTML = output.data[HTML_MIME_TYPE];\n var scripts = [];\n var nodelist = html_node.querySelectorAll(\"script\");\n for (var i in nodelist) {\n if (nodelist.hasOwnProperty(i)) {\n scripts.push(nodelist[i])\n }\n }\n\n scripts.forEach( function (oldScript) {\n var newScript = document.createElement(\"script\");\n var attrs = [];\n var nodemap = oldScript.attributes;\n for (var j in nodemap) {\n if (nodemap.hasOwnProperty(j)) {\n attrs.push(nodemap[j])\n }\n }\n attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n oldScript.parentNode.replaceChild(newScript, oldScript);\n });\n if (JS_MIME_TYPE in output.data) {\n toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n }\n output_area._hv_plot_id = id;\n if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n window.PyViz.plot_index[id] = Bokeh.index[id];\n } else {\n window.PyViz.plot_index[id] = null;\n }\n } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n var bk_div = document.createElement(\"div\");\n bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n var script_attrs = bk_div.children[0].attributes;\n for (var i = 0; i < script_attrs.length; i++) {\n toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n }\n // store reference to server id on output_area\n output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n }\n}\n\n/**\n * Handle when an output is cleared or removed\n */\nfunction handle_clear_output(event, handle) {\n var id = handle.cell.output_area._hv_plot_id;\n var server_id = handle.cell.output_area._bokeh_server_id;\n if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n if (server_id !== null) {\n comm.send({event_type: 'server_delete', 'id': server_id});\n return;\n } else if (comm !== null) {\n comm.send({event_type: 'delete', 'id': id});\n }\n delete PyViz.plot_index[id];\n if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n var doc = window.Bokeh.index[id].model.document\n doc.clear();\n const i = window.Bokeh.documents.indexOf(doc);\n if (i > -1) {\n window.Bokeh.documents.splice(i, 1);\n }\n }\n}\n\n/**\n * Handle kernel restart event\n */\nfunction handle_kernel_cleanup(event, handle) {\n delete PyViz.comms[\"hv-extension-comm\"];\n window.PyViz.plot_index = {}\n}\n\n/**\n * Handle update_display_data messages\n */\nfunction handle_update_output(event, handle) {\n handle_clear_output(event, {cell: {output_area: handle.output_area}})\n handle_add_output(event, handle)\n}\n\nfunction register_renderer(events, OutputArea) {\n function append_mime(data, metadata, element) {\n // create a DOM node to render to\n var toinsert = this.create_output_subarea(\n metadata,\n CLASS_NAME,\n EXEC_MIME_TYPE\n );\n this.keyboard_manager.register_events(toinsert);\n // Render to node\n var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n render(props, toinsert[0]);\n element.append(toinsert);\n return toinsert\n }\n\n events.on('output_added.OutputArea', handle_add_output);\n events.on('output_updated.OutputArea', handle_update_output);\n events.on('clear_output.CodeCell', handle_clear_output);\n events.on('delete.Cell', handle_clear_output);\n events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n\n OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n safe: true,\n index: 0\n });\n}\n\nif (window.Jupyter !== undefined) {\n try {\n var events = require('base/js/events');\n var OutputArea = require('notebook/js/outputarea').OutputArea;\n if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n register_renderer(events, OutputArea);\n }\n } catch(err) {\n }\n}\n" + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.holoviews_exec.v0+json": "", + "text/html": [ + "
\n", + "
\n", + "
\n", + "" + ] + }, + "metadata": { + "application/vnd.holoviews_exec.v0+json": { + "id": "p1011" + } + }, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + "\n", + "\n", + "
\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "hv.extension(\"matplotlib\")\n", "\n", @@ -157,7 +994,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "id": "fe358933-bc7c-422c-88af-a697ffb4e45a", "metadata": {}, "outputs": [], @@ -644,6 +1481,247 @@ "\n", "Conversely, during upsampling, the effects are much more pronounced. This is logical, considering that upsampling involves transitioning from a grid with fewer faces, where each face represents a larger area, to one where more faces are present, representing a smaller area per face. Therefore, adjusting from 2 neighbors to 128 leads to substantial changes because the additional faces encompase a much larger area, and by effect much more drastic changes in values." ] + }, + { + "cell_type": "markdown", + "id": "f1f33631-19b7-4b73-8452-7dc1e3fa48a2", + "metadata": {}, + "source": [ + "### Bilinear" + ] + }, + { + "cell_type": "markdown", + "id": "6bec26ce-67b6-4300-a310-63cbac2b289a", + "metadata": {}, + "source": [ + "Bilinear remapping breaks down the grid into triangles, and then finds the triangle that contains each point on the destinitation grid. It then uses the values stored at each vertex to bilinearly find a value for the point, depending on it's postion inside the triangle." + ] + }, + { + "cell_type": "markdown", + "id": "db618e64-76a5-4432-a3c5-31a956607089", + "metadata": {}, + "source": [ + "#### Upsampling" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "81b1ad60-3c79-4d36-9492-254304ff93e1", + "metadata": {}, + "outputs": [], + "source": [ + "upsampling_b = uxds_480[\"bottomDepth\"].remap.bilinear(\n", + " uxds_120.uxgrid, remap_to=\"face centers\", coord_type=\"spherical\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "e59684d7-db46-46a2-94a7-b33fd822f891", + "metadata": {}, + "outputs": [ + { + "data": {}, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.holoviews_exec.v0+json": "", + "text/html": [ + "
\n", + "
\n", + "
\n", + "" + ], + "text/plain": [ + ":Layout\n", + " .DynamicMap.I :DynamicMap []\n", + " :Overlay\n", + " .Image.I :Image [Longitude,Latitude] (Longitude_Latitude bottomDepth)\n", + " .Coastline.I :Feature [Longitude,Latitude]\n", + " .DynamicMap.II :DynamicMap []\n", + " :Overlay\n", + " .Image.I :Image [Longitude,Latitude] (Longitude_Latitude bottomDepth)\n", + " .Coastline.I :Feature [Longitude,Latitude]\n", + " .DynamicMap.III :DynamicMap []\n", + " :Overlay\n", + " .Image.I :Image [Longitude,Latitude] (Longitude_Latitude bottomDepth)\n", + " .Coastline.I :Feature [Longitude,Latitude]\n", + " .DynamicMap.IV :DynamicMap []\n", + " :Overlay\n", + " .Image.I :Image [Longitude,Latitude] (Longitude_Latitude bottomDepth)\n", + " .Coastline.I :Feature [Longitude,Latitude]" + ] + }, + "execution_count": 8, + "metadata": { + "application/vnd.holoviews_exec.v0+json": { + "id": "p1037" + } + }, + "output_type": "execute_result" + } + ], + "source": [ + "(\n", + " uxds_480[\"bottomDepth\"].plot(\n", + " title=\"Bottom Depth (480km)\", cmap=ux.cmaps.sequential_blue\n", + " )\n", + " * features\n", + " + upsampling_b.plot(\n", + " title=\"Remapped Bottom Depth (480km to 120km)\",\n", + " cmap=ux.cmaps.sequential_blue,\n", + " )\n", + " * features\n", + " + uxds_480[\"bottomDepth\"].plot(\n", + " title=\"Zoomed (480km)\",\n", + " xlim=(-10, 10),\n", + " ylim=(-5, 5),\n", + " cmap=ux.cmaps.sequential_blue,\n", + " )\n", + " * features\n", + " + upsampling_b.plot(\n", + " title=\"Zoomed Remap (480km to 120km)\",\n", + " xlim=(-10, 10),\n", + " ylim=(-5, 5),\n", + " cmap=ux.cmaps.sequential_blue,\n", + " )\n", + " * features\n", + ").opts(fig_size=300).cols(1)" + ] + }, + { + "cell_type": "markdown", + "id": "467252cb-9e07-42bd-8734-15666f612387", + "metadata": {}, + "source": [ + "#### Downsampling" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9268e8c5-fc66-48c9-854e-4fdc482bf815", + "metadata": {}, + "outputs": [], + "source": [ + "downsampling_b = uxds_120[\"bottomDepth\"].remap.bilinear(\n", + " uxds_480.uxgrid, remap_to=\"face centers\", coord_type=\"spherical\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8449c7f4-3fd8-4042-89f7-bc72e38e5fdd", + "metadata": {}, + "outputs": [], + "source": [ + "(\n", + " uxds_120[\"bottomDepth\"].plot(\n", + " title=\"Bottom Depth (120km)\", cmap=ux.cmaps.sequential_blue\n", + " )\n", + " * features\n", + " + downsampling_b.plot(\n", + " title=\"Remapped Bottom Depth (120km to 480km)\",\n", + " cmap=ux.cmaps.sequential_blue,\n", + " )\n", + " * features\n", + " + uxds_120[\"bottomDepth\"].plot(\n", + " title=\"Zoomed (120km)\",\n", + " xlim=(-10, 10),\n", + " ylim=(-5, 5),\n", + " cmap=ux.cmaps.sequential_blue,\n", + " )\n", + " * features\n", + " + downsampling_b.plot(\n", + " title=\"Zoomed Remap (120km to 480km)\",\n", + " xlim=(-10, 10),\n", + " ylim=(-5, 5),\n", + " cmap=ux.cmaps.sequential_blue,\n", + " )\n", + " * features\n", + ").opts(fig_size=300).cols(1)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ec76a7fc-a5ec-412e-841a-ab8abeceb2ab", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { @@ -662,7 +1740,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.9" + "version": "3.12.7" } }, "nbformat": 4, diff --git a/test/test_geometry.py b/test/test_geometry.py index 05e0930fc..b21eb2ebf 100644 --- a/test/test_geometry.py +++ b/test/test_geometry.py @@ -12,8 +12,8 @@ from uxarray.grid.coordinates import _populate_node_latlon, _lonlat_rad_to_xyz, _normalize_xyz, _xyz_to_lonlat_rad from uxarray.grid.arcs import extreme_gca_latitude, _extreme_gca_latitude_cartesian from uxarray.grid.utils import _get_cartesian_face_edge_nodes, _get_lonlat_rad_face_edge_nodes -from uxarray.grid.geometry import _populate_face_latlon_bound, _populate_bounds, _pole_point_inside_polygon_cartesian, stereographic_projection, inverse_stereographic_projection +from uxarray.grid.geometry import _populate_face_latlon_bound, _populate_bounds, _pole_point_inside_polygon_cartesian, stereographic_projection, inverse_stereographic_projection current_path = Path(os.path.dirname(os.path.realpath(__file__))) @@ -30,6 +30,7 @@ grid_quad_hex = current_path / "meshfiles" / "ugrid" / "quad-hexagon" / "grid.nc" grid_geoflow = current_path / "meshfiles" / "ugrid" / "geoflow-small" / "grid.nc" grid_mpas = current_path / "meshfiles" / "mpas" / "QU" / "oQU480.231010.nc" +grid_mpas_2 = current_path / "meshfiles" / "mpas" / "QU" / "mesh.QU.1920km.151026.nc" # List of grid files to test grid_files_latlonBound = [grid_quad_hex, grid_geoflow, gridfile_CSne8, grid_mpas] @@ -81,7 +82,7 @@ def test_pole_point_inside_polygon_from_vertice_north(self): [vertices[1], vertices[2]], [vertices[2], vertices[3]], [vertices[3], vertices[0]]]) - + print(face_edge_cart) # Check if the North pole is inside the polygon result = _pole_point_inside_polygon_cartesian( 'North', face_edge_cart) diff --git a/test/test_remap.py b/test/test_remap.py index 6fb9b7535..6175db487 100644 --- a/test/test_remap.py +++ b/test/test_remap.py @@ -22,6 +22,7 @@ dsfile_v2_geoflow = current_path / "meshfiles" / "ugrid" / "geoflow-small" / "v2.nc" dsfile_v3_geoflow = current_path / "meshfiles" / "ugrid" / "geoflow-small" / "v3.nc" mpasfile_QU = current_path / "meshfiles" / "mpas" / "QU" / "mesh.QU.1920km.151026.nc" +mpasfile_QU_2 = current_path / "meshfiles" / "mpas" / "QU" / "oQU480.231010.nc" class TestNearestNeighborRemap(TestCase): @@ -224,7 +225,6 @@ def test_preserve_coordinates(self): assert "time" in res.coords - class TestInverseDistanceWeightedRemapping(TestCase): """Testing for inverse distance weighted remapping.""" @@ -429,3 +429,13 @@ def test_value_errors(self): destination_grid=destination_grid, remap_to="nodes", power=6 ) + + +class TestBilinearRemapping(TestCase): + + def test_uxda_remap(self): + source_uxds = ux.open_dataset(mpasfile_QU, mpasfile_QU) + destination = ux.open_dataset(mpasfile_QU_2, mpasfile_QU_2) + + bilinear_remap = source_uxds['latVertex'].remap.bilinear(destination.uxgrid, remap_to='face centers', + coord_type='spherical') diff --git a/uxarray/grid/geometry.py b/uxarray/grid/geometry.py index 92a018011..e191f75fc 100644 --- a/uxarray/grid/geometry.py +++ b/uxarray/grid/geometry.py @@ -20,6 +20,7 @@ ) from uxarray.grid.arcs import extreme_gca_latitude, point_within_gca from uxarray.grid.coordinates import _xyz_to_lonlat_rad + from uxarray.grid.intersections import gca_gca_intersection from uxarray.grid.utils import ( _get_cartesian_face_edge_nodes, @@ -1236,11 +1237,13 @@ def _populate_face_latlon_bound( face_latlon_array = insert_pt_in_latlonbox( face_latlon_array, np.array([lat_min, node1_lon_rad]) ) + face_latlon_array[0, 1] = math.pi / 2 # Upper latitude bound else: face_latlon_array = insert_pt_in_latlonbox( face_latlon_array, np.array([lat_max, node1_lon_rad]) ) + face_latlon_array[0, 0] = -math.pi / 2 # Lower latitude bound # Adjust longitude bounds globally if the pole is centrally inside the polygon diff --git a/uxarray/remap/bilinear.py b/uxarray/remap/bilinear.py new file mode 100644 index 000000000..a25116d8d --- /dev/null +++ b/uxarray/remap/bilinear.py @@ -0,0 +1,416 @@ +from __future__ import annotations +from typing import TYPE_CHECKING + +from uxarray.grid.coordinates import _xyz_to_lonlat_deg + +if TYPE_CHECKING: + from uxarray.core.dataset import UxDataset + from uxarray.core.dataarray import UxDataArray + +import numpy as np +import uxarray.core.dataarray +import uxarray.core.dataset +from uxarray.grid import Grid + + +def _bilinear( + source_uxda: UxDataArray, + destination_grid: Grid, + remap_to: str = "face centers", + coord_type: str = "spherical", +) -> np.ndarray: + """Bilinear Remapping between two grids, mapping data that resides on the + corner nodes, edge centers, or face centers on the source grid to the + corner nodes, edge centers, or face centers of the destination grid. + + Parameters + --------- + source_uxda : UxDataArray + Source UxDataArray + remap_to : str, default="nodes" + Location of where to map data, either "nodes", "edge centers", or "face centers" + coord_type: str, default="spherical" + Coordinate type to use for bilinear query, either "spherical" or "Cartesian" + + Returns + ------- + destination_data : np.ndarray + Data mapped to destination grid + """ + + # ensure array is a np.ndarray + source_data = np.asarray(source_uxda.data) + source_grid = source_uxda.uxgrid + + n_elements = source_data.shape[-1] + + # Find where the source data is located + if n_elements == source_grid.n_node: + source_data_mapping = "nodes" + elif n_elements == source_grid.n_face: + source_data_mapping = "face centers" + elif n_elements == source_grid.n_edge: + # Since currently `topological_mean` is not supported for edge centers raise a `ValueError` + raise ValueError( + "'edges' is currently an unsupported source data dimension for bilinear remapping" + ) + else: + raise ValueError( + f"Invalid source_data shape. The final dimension should be either match the number of corner " + f"nodes ({source_grid.n_node}), edge centers ({source_grid.n_edge}), or face centers ({source_grid.n_face}) in the" + f" source grid, but received: {source_data.shape}" + ) + + # If the data isn't face centered, take a `topological_mean` so the data will be face centered for the dual + if source_data_mapping != "face centers": + source_uxda = source_uxda.topological_mean(destination="face") + + # Reload the data array after topological mean + source_data = np.asarray(source_uxda.data) + # Construct dual for searching + dual = source_uxda.get_dual() + + if coord_type == "spherical": + # get destination coordinate pairs + if remap_to == "nodes": + lon, lat = ( + destination_grid.node_lon.values, + destination_grid.node_lat.values, + ) + data_size = destination_grid.n_node + elif remap_to == "edge centers": + lon, lat = ( + destination_grid.edge_lon.values, + destination_grid.edge_lat.values, + ) + data_size = destination_grid.n_edge + elif remap_to == "face centers": + lon, lat = ( + destination_grid.face_lon.values, + destination_grid.face_lat.values, + ) + data_size = destination_grid.n_face + else: + raise ValueError( + f"Invalid remap_to. Expected 'nodes', 'edge centers', or 'face centers', " + f"but received: {remap_to}" + ) + + values = np.ndarray(data_size) + + tree = dual.uxgrid.get_ball_tree( + coordinates="face centers", coordinate_system="spherical" + ) + + for i in range(len(lon)): + # Find polygon containing point + weights, data = find_polygon_containing_point( + [lon[i], lat[i]], dual, source_data, tree + ) + + values[i] = np.sum(weights * data, axis=-1) + + elif coord_type == "cartesian": + # get destination coordinates + if remap_to == "nodes": + cart_x, cart_y, cart_z = ( + destination_grid.node_x.values, + destination_grid.node_y.values, + destination_grid.node_z.values, + ) + data_size = destination_grid.n_node + elif remap_to == "edge centers": + cart_x, cart_y, cart_z = ( + destination_grid.edge_x.values, + destination_grid.edge_y.values, + destination_grid.edge_z.values, + ) + data_size = destination_grid.n_edge + elif remap_to == "face centers": + cart_x, cart_y, cart_z = ( + destination_grid.face_x.values, + destination_grid.face_y.values, + destination_grid.face_z.values, + ) + data_size = destination_grid.n_face + else: + raise ValueError( + f"Invalid remap_to. Expected 'nodes', 'edge centers', or 'face centers', " + f"but received: {remap_to}" + ) + + values = np.ndarray(data_size) + # tree = dual.uxgrid.get_ball_tree( + # coordinates="face centers", coordinate_system="spherical" + # ) + for i in range(len(cart_x)): + # Get point + point = np.array([cart_x[i], cart_y[i], cart_z[i]]) + + # Find the index of the polygon containing the point + polygon_ind = dual.get_polygons_containing_point(point) + + # Convert point to lonlat for barycentric calculation + point = _xyz_to_lonlat_deg(*point) + + if len(polygon_ind) == 0: + raise ValueError("No polygon found containing the point") + + # Inside the polygon or on an edge + elif len(polygon_ind) < 3: + # Get the index of the face that holds the point + node_ind = dual.face_node_connectivity[polygon_ind[0]].values + + # Create the polygon from the `face_node_connectivity` + nodes_per_face = dual.n_nodes_per_face[polygon_ind[0]].values + polygon = np.empty([nodes_per_face, 3]) + data = np.empty([nodes_per_face]) + for node in range(nodes_per_face): + polygon[i] = [ + dual.node_lon.values[node_ind[node]], + dual.node_lat.values[node_ind[node]], + ] + + # Create the data array that is on the polygon + data[i] = source_data[node] + + # If the face is a triangle, use barycentric coordinates, otherwise break the face into triangles + # and then use barycentric coordinates + polygon_len = len(polygon) + if polygon_len == 3: + weights = barycentric_coordinates(point, polygon) + + values[i] = np.sum(weights * data, axis=-1) + else: + reference_vertex = polygon[0] + triangles = [] + triangle_data = [] + for j in range(1, polygon_len - 1): + triangles.append([reference_vertex, polygon[j], polygon[j + 1]]) + triangle_data.append([data[0], data[j], data[j + 1]]) + + for d, triangle in enumerate(triangles): + if point_in_polygon(point, triangle): + weights = barycentric_coordinates(point, triangle) + + values[i] = np.sum(weights * triangle_data[d], axis=-1) + + # On a node + else: + node_ind_1 = dual.face_node_connectivity[polygon_ind[0]].values + node_ind_2 = dual.face_node_connectivity[polygon_ind[1]].values + for ind, x in enumerate(node_ind_1): + if x == node_ind_2[ind]: + values[i] = source_data[x] + + else: + raise ValueError( + f"Invalid coord_type. Expected either 'spherical' or 'cartesian', but received {coord_type}" + ) + + return values + + +def _bilinear_uxda( + source_uxda: UxDataArray, + destination_grid: Grid, + remap_to: str = "face centers", + coord_type: str = "spherical", +): + """Bilinear Remapping implementation for ``UxDataArray``. + + Parameters + --------- + source_uxda : UxDataArray + Source UxDataArray for remapping + destination_grid : Grid + Destination Grid for remapping + remap_to : str, default="nodes" + Location of where to map data, either "nodes", "edge centers", or "face centers" + coord_type : str, default="spherical" + Indicates whether to remap using on Spherical or Cartesian coordinates for computations when + remapping. + """ + + # prepare dimensions + if remap_to == "nodes": + destination_dim = "n_node" + elif remap_to == "edge centers": + destination_dim = "n_edge" + else: + destination_dim = "n_face" + + destination_dims = list(source_uxda.dims) + destination_dims[-1] = destination_dim + + # perform remapping + destination_data = _bilinear(source_uxda, destination_grid, remap_to, coord_type) + # construct data array for remapping variable + uxda_remap = uxarray.core.dataarray.UxDataArray( + data=destination_data, + name=source_uxda.name, + coords=source_uxda.coords, + dims=destination_dims, + uxgrid=destination_grid, + ) + + return uxda_remap + + +def _bilinear_uxds( + source_uxds: UxDataset, + destination_grid: Grid, + remap_to: str = "face centers", + coord_type: str = "spherical", +): + """Bilinear Remapping implementation for ``UxDataset``. + + Parameters + --------- + source_uxds : UxDataset + Source UxDataset for remapping + destination_grid : Grid + Destination for remapping + remap_to : str, default="nodes" + Location of where to map data, either "nodes", "edge centers", or "face centers" + coord_type : str, default="spherical" + Indicates whether to remap using on Spherical or Cartesian coordinates + """ + + destination_uxds = uxarray.core.dataset.UxDataset(uxgrid=destination_grid) + + for var_name in source_uxds.data_vars: + destination_uxds = _bilinear_uxda( + source_uxds[var_name], destination_grid, remap_to, coord_type + ) + + return destination_uxds + + +# @njit(cache=True) +def barycentric_coordinates(point, triangle): + """Calculates the barycentric weights for a point inside a triangle. + + Args: + point: A 2D point (x, y) inside the triangle. + triangle: A 2D triangle with three vertices as [(x0, y0), (x1, y1), (x2, y2)]. + + Returns: + An array with 3 weights for each node of the triangle. + """ + x1, y1 = triangle[0][0], triangle[0][1] + x2, y2 = triangle[1][0], triangle[1][1] + x3, y3 = triangle[2][0], triangle[2][1] + + px, py = point + + # Compute the denominator (2 * the signed area of the full triangle) + denom = (y2 - y3) * (x1 - x3) + (x3 - x2) * (y1 - y3) + + if np.abs(denom) < 1e-10: + raise ValueError("The triangle points are too close to being collinear.") + + # Compute barycentric weights (dA, dB, dC) + weight_a = ((y2 - y3) * (px - x3) + (x3 - x2) * (py - y3)) / denom + weight_b = ((y3 - y1) * (px - x3) + (x1 - x3) * (py - y3)) / denom + weight_c = 1.0 - weight_a - weight_b # Third barycentric coordinate + + return np.array([weight_a, weight_b, weight_c], dtype=np.float64) + + +# def find_polygon_containing_point(point, dual, source_data, tree): +# """Finds the polygon that contains a point.""" +# +# # Create arrays to hold the lat/lon of first face +# triangle = np.zeros( +# (3, 2), dtype=np.float64 +# ) # Array to store 3 vertices (lat, lon) +# +# xyz = np.zeros( +# (3, 3), dtype=np.float64 +# ) # Array to store 3 vertices (lat, lon) +# +# # If the mesh is not partial +# if dual.uxgrid.boundary_edge_indices.size == 0: +# # First check the nearest face +# ind = tree.query(point, k=1, return_distance=False) +# data = [] +# polygon = np.zeros([len(dual.uxgrid.face_node_connectivity[ind].values), 3]) +# for j, node in enumerate(dual.uxgrid.face_node_connectivity[ind]): +# if node != INT_FILL_VALUE: +# lon = dual.uxgrid.node_lon[node.values].values # Longitude for the node +# lat = dual.uxgrid.node_lat[node.values].values # Latitude for the node +# polygon[j] = [dual.uxgrid.node_x[node.values].values, +# dual.uxgrid.node_y[node.values].values, +# dual.uxgrid.node_z[node.values].values] +# +# tolerance = 1e-0 +# if abs(lat - point[1]) <= tolerance and abs(lon - point[0]) <= tolerance: +# return 1, source_data[node] +# +# triangle[j] = [lon, lat] # Store the (lon, lat) pair in the triangle +# data.append(source_data[node]) +# polygon2 = [ +# _lonlat_rad_to_xyz(np.deg2rad(vertex[0]), np.deg2rad(vertex[1])) +# for vertex in triangle +# ] +# +# face_ind = (ind + 2) % len(dual.uxgrid.node_x.values) +# ref_point = np.array([0, 0, 1]) +# point_cart = np.array(_lonlat_rad_to_xyz(np.deg2rad(point[0]), np.deg2rad(point[1]))) +# +# point_found = point_in_polygon(polygon2, point_cart, ref_point) +# +# # If found in first face, return weights +# if point_found: +# return barycentric_coordinates(point=point, triangle=triangle), data +# else: +# +# # Find the largest face radius +# max_distance = get_max_face_radius(dual) +# +# # If the nearest face doesn't contain the point, continue to check nearest faces +# for i in range(2, dual.uxgrid.n_face): +# triangle = np.zeros( +# (3, 2), dtype=np.float64 +# ) # Array to store 3 vertices (lat, lon) +# +# # Query the tree for increasingly more neighbors if the polygon isn't found +# d, ind = tree.query(point, k=i, return_distance=True, sort_results=True) +# data = [] +# +# # If the distance is outside the max distance the point could be in, the point is outside the partial +# # grid +# if d[i - 1] > max_distance: +# return INT_FILL_VALUE, 0 +# +# # Get the lat/lon for the face +# for j, node in enumerate(dual.uxgrid.face_node_connectivity[ind[0]]): +# if node != INT_FILL_VALUE: +# lat = dual.uxgrid.node_lat[node.values].values # Latitude for the node +# lon = dual.uxgrid.node_lon[node.values].values # Longitude for the node +# x = dual.uxgrid.node_x[node.values].values +# y = dual.uxgrid.node_y[node.values].values +# z = dual.uxgrid.node_z[node.values].values +# tolerance = 1e-0 +# if abs(lat - point[1]) <= tolerance and abs(lon - point[0]) <= tolerance: +# return 1, source_data[node] +# +# triangle[j] = [lat, lon] # Store the (lat, lon) pair in the triangle +# xyz[j] = [x, y, z] +# data.append(source_data[node]) +# +# polygon2 = [ +# _lonlat_rad_to_xyz(np.deg2rad(vertex[0]), np.deg2rad(vertex[1])) +# for vertex in triangle +# ] +# +# ref_point = np.array([0, 0, 1]) +# point_cart = np.array(_lonlat_rad_to_xyz(np.deg2rad(point[0]), np.deg2rad(point[1]))) +# +# point_found = point_in_polygon(polygon2, point_cart, ref_point) +# +# # If found in first face, return weights +# if point_found: +# return barycentric_coordinates(point=point, triangle=triangle), data +# return 0, 0 diff --git a/uxarray/remap/dataarray_accessor.py b/uxarray/remap/dataarray_accessor.py index a3c44105d..f412d5ee3 100644 --- a/uxarray/remap/dataarray_accessor.py +++ b/uxarray/remap/dataarray_accessor.py @@ -1,20 +1,21 @@ from __future__ import annotations -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Optional +from warnings import warn +from uxarray.remap.bilinear import _bilinear_uxda from uxarray.remap.nearest_neighbor import _nearest_neighbor_uxda from uxarray.remap.inverse_distance_weighted import ( _inverse_distance_weighted_remap_uxda, ) if TYPE_CHECKING: + from uxarray.core.dataset import UxDataset from uxarray.core.dataarray import UxDataArray from uxarray.grid import Grid class UxDataArrayRemapAccessor: - """Remapping accessor for ``UxDataArray``""" - def __init__(self, uxda: UxDataArray): self.uxda = uxda @@ -31,40 +32,64 @@ def __repr__(self): def nearest_neighbor( self, - destination_grid: Grid, + destination_grid: Optional[Grid] = None, + destination_obj: Optional[Grid, UxDataArray, UxDataset] = None, remap_to: str = "face centers", coord_type: str = "spherical", ): - """Nearest Neighbor Remapping between a source ``UxDataArray`` and - destination ``Grid`` + """Nearest Neighbor Remapping between a source (``UxDataArray``) and + destination.`. Parameters --------- destination_grid : Grid Destination Grid for remapping + destination_obj : Grid, UxDataArray, UxDataset + Optional destination for remapping, deprecating remap_to : str, default="nodes" Location of where to map data, either "nodes" or "face centers" coord_type : str, default="spherical" Indicates whether to remap using on spherical or cartesian coordinates """ + if destination_grid is not None and destination_obj is not None: + raise ValueError( + "Only one destination allowed, " + "please remove either `destination_grid` or `destination_obj`." + ) + elif destination_grid is None and destination_obj is None: + raise ValueError("Destination needed for remap.") - return _nearest_neighbor_uxda(self.uxda, destination_grid, remap_to, coord_type) + if destination_grid is not None: + return _nearest_neighbor_uxda( + self.uxda, destination_grid, remap_to, coord_type + ) + elif destination_obj is not None: + warn( + "destination_obj will be deprecated in a future release. Please use destination_grid instead.", + DeprecationWarning, + ) + return _nearest_neighbor_uxda( + self.uxda, destination_obj, remap_to, coord_type + ) def inverse_distance_weighted( self, - destination_grid: Grid, + destination_grid: Optional[Grid] = None, + destination_obj: Optional[Grid, UxDataArray, UxDataset] = None, remap_to: str = "face centers", coord_type: str = "spherical", power=2, k=8, ): - """Inverse Distance Weighted Remapping between a source ``UxDataArray`` - and destination ``Grid`` + """Inverse Distance Weighted Remapping between a source + (``UxDataArray``) and destination.`. Parameters --------- destination_grid : Grid Destination Grid for remapping + destination_obj : Grid, UxDataArray, UxDataset + Optional destination for remapping, deprecating remap_to : str, default="nodes" Location of where to map data, either "nodes" or "face centers" coord_type : str, default="spherical" @@ -75,7 +100,46 @@ def inverse_distance_weighted( k : int, default=8 Number of nearest neighbors to consider in the weighted calculation. """ + if destination_grid is not None and destination_obj is not None: + raise ValueError( + "Only one destination allowed, " + "please remove either `destination_grid` or `destination_obj`." + ) + elif destination_grid is None and destination_obj is None: + raise ValueError("Destination needed for remap.") - return _inverse_distance_weighted_remap_uxda( - self.uxda, destination_grid, remap_to, coord_type, power, k - ) + if destination_grid is not None: + return _inverse_distance_weighted_remap_uxda( + self.uxda, destination_grid, remap_to, coord_type, power, k + ) + elif destination_obj is not None: + warn( + "destination_obj will be deprecated in a future release. Please use destination_grid instead.", + DeprecationWarning, + ) + return _inverse_distance_weighted_remap_uxda( + self.uxda, destination_obj, remap_to, coord_type, power, k + ) + + def bilinear( + self, + destination_grid: Grid = None, + remap_to: str = "face centers", + coord_type: str = "spherical", + ): + """Bilinear Remapping between a source (``UxDataArray``) and + destination.`. + + Parameters + --------- + destination_grid : Grid + Destination Grid for remapping + remap_to : str, default="nodes" + Location of where to map data, either "nodes" or "face centers" + coord_type : str, default="spherical" + Indicates whether to remap using on spherical or cartesian coordinates + """ + if destination_grid is None: + raise ValueError("Destination needed for remap.") + + return _bilinear_uxda(self.uxda, destination_grid, remap_to, coord_type) diff --git a/uxarray/remap/dataset_accessor.py b/uxarray/remap/dataset_accessor.py index 59a8694cb..3cf1dda86 100644 --- a/uxarray/remap/dataset_accessor.py +++ b/uxarray/remap/dataset_accessor.py @@ -1,6 +1,8 @@ from __future__ import annotations -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Optional +from warnings import warn +from uxarray.remap.bilinear import _bilinear_uxds from uxarray.remap.nearest_neighbor import _nearest_neighbor_uxds from uxarray.remap.inverse_distance_weighted import ( _inverse_distance_weighted_remap_uxds, @@ -8,13 +10,12 @@ if TYPE_CHECKING: from uxarray.core.dataset import UxDataset + from uxarray.core.dataarray import UxDataArray from uxarray.grid import Grid class UxDatasetRemapAccessor: - """Remapping accessor for ``UxDataset``""" - def __init__(self, uxds: UxDataset): self.uxds = uxds @@ -31,40 +32,65 @@ def __repr__(self): def nearest_neighbor( self, - destination_grid: Grid, + destination_grid: Optional[Grid] = None, + destination_obj: Optional[Grid, UxDataArray, UxDataset] = None, remap_to: str = "face centers", coord_type: str = "spherical", ): - """Nearest Neighbor Remapping between a source ``UxDataset`` and - destination ``Grid`` + """Nearest Neighbor Remapping between a source (``UxDataset``) and + destination.`. Parameters --------- destination_grid : Grid Destination Grid for remapping + destination_obj : Grid, UxDataArray, UxDataset + Optional destination for remapping, deprecating remap_to : str, default="nodes" Location of where to map data, either "nodes", "edge centers", or "face centers" coord_type : str, default="spherical" Indicates whether to remap using on spherical or cartesian coordinates """ - return _nearest_neighbor_uxds(self.uxds, destination_grid, remap_to, coord_type) + if destination_grid is not None and destination_obj is not None: + raise ValueError( + "Only one destination allowed, " + "please remove either `destination_grid` or `destination_obj`." + ) + elif destination_grid is None and destination_obj is None: + raise ValueError("Destination needed for remap.") + + if destination_grid is not None: + return _nearest_neighbor_uxds( + self.uxds, destination_grid, remap_to, coord_type + ) + elif destination_obj is not None: + warn( + "destination_obj will be deprecated in a future release. Please use destination_grid instead.", + DeprecationWarning, + ) + return _nearest_neighbor_uxds( + self.uxds, destination_obj, remap_to, coord_type + ) def inverse_distance_weighted( self, - destination_grid: Grid, + destination_grid: Optional[Grid] = None, + destination_obj: Optional[Grid, UxDataArray, UxDataset] = None, remap_to: str = "face centers", coord_type: str = "spherical", power=2, k=8, ): - """Inverse Distance Weighted Remapping between a source ``UxDataset`` - and destination ``Grid`` + """Inverse Distance Weighted Remapping between a source (``UxDataset``) + and destination.`. Parameters --------- destination_grid : Grid Destination Grid for remapping + destination_obj : Grid, UxDataArray, UxDataset + Optional destination for remapping, deprecating remap_to : str, default="nodes" Location of where to map data, either "nodes", "edge centers", or "face centers" coord_type : str, default="spherical" @@ -76,6 +102,46 @@ def inverse_distance_weighted( Number of nearest neighbors to consider in the weighted calculation. """ - return _inverse_distance_weighted_remap_uxds( - self.uxds, destination_grid, remap_to, coord_type, power, k - ) + if destination_grid is not None and destination_obj is not None: + raise ValueError( + "Only one destination allowed, " + "please remove either `destination_grid` or `destination_obj`." + ) + elif destination_grid is None and destination_obj is None: + raise ValueError("Destination needed for remap.") + + if destination_grid is not None: + return _inverse_distance_weighted_remap_uxds( + self.uxds, destination_grid, remap_to, coord_type, power, k + ) + elif destination_obj is not None: + warn( + "destination_obj will be deprecated in a future release. Please use destination_grid instead.", + DeprecationWarning, + ) + return _inverse_distance_weighted_remap_uxds( + self.uxds, destination_obj, remap_to, coord_type, power, k + ) + + def bilinear( + self, + destination_grid: Grid = None, + remap_to: str = "face centers", + coord_type: str = "spherical", + ): + """Bilinear Remapping between a source (``UxDataset``) and + destination.`. + + Parameters + --------- + destination_grid : Grid + Destination Grid for remapping + remap_to : str, default="nodes" + Location of where to map data, either "nodes" or "face centers" + coord_type : str, default="spherical" + Indicates whether to remap using on spherical or cartesian coordinates + """ + if destination_grid is None: + raise ValueError("Destination needed for remap.") + + return _bilinear_uxds(self.uxds, destination_grid, remap_to, coord_type)