diff --git a/benchmarks/asv.conf.json b/benchmarks/asv.conf.json index 31a43921d..b108ec396 100644 --- a/benchmarks/asv.conf.json +++ b/benchmarks/asv.conf.json @@ -94,7 +94,7 @@ "setuptools_scm": [""], "xarray": [""], "netcdf4": [""], - "pip+pyfma": [""] + "pip+git+https://github.com/philipc2/pyfma.git": [""] }, diff --git a/benchmarks/face_bounds.py b/benchmarks/face_bounds.py index b249e7b99..db3106fcb 100644 --- a/benchmarks/face_bounds.py +++ b/benchmarks/face_bounds.py @@ -28,3 +28,13 @@ def time_face_bounds(self, grid_path): def peakmem_face_bounds(self, grid_path): """Peak memory usage obtain ``Grid.face_bounds.""" face_bounds = self.uxgrid.bounds + +class Bounds: + def setup(self): + self.uxgrid = ux.open_grid(r"C:\Users\chmie\PycharmProjects\ncar-uxarray\uxarray-hongyu\benchmarks\oQU480.grid.nc") + + def teardown(self): + del self.uxgrid + + def time_bounds(self): + self.uxgrid.bounds diff --git a/benchmarks/mpas_ocean.py b/benchmarks/mpas_ocean.py index d2baaf86b..b4d50da4b 100644 --- a/benchmarks/mpas_ocean.py +++ b/benchmarks/mpas_ocean.py @@ -27,55 +27,57 @@ "120km": [current_path / grid_filename_120, current_path / data_filename_120]} -class Gradient: - param_names = ['resolution'] - params = ['480km', '120km'] +class DatasetBenchmark: + """Class used as a template for benchmarks requiring a ``UxDataset`` in + this module across both resolutions.""" + param_names = ['resolution',] + params = [['480km', '120km'],] + + + def setup(self, resolution, *args, **kwargs): - def setup(self, resolution): self.uxds = ux.open_dataset(file_path_dict[resolution][0], file_path_dict[resolution][1]) - def teardown(self, resolution): + def teardown(self, resolution, *args, **kwargs): del self.uxds - def time_gradient(self, resolution): - self.uxds[data_var].gradient() +class GridBenchmark: + """Class used as a template for benchmarks requiring a ``Grid`` in this + module across both resolutions.""" + param_names = ['resolution', ] + params = [['480km', '120km'], ] - def peakmem_gradient(self, resolution): - grad = self.uxds[data_var].gradient() + def setup(self, resolution, *args, **kwargs): -class Integrate: + self.uxgrid = ux.open_grid(file_path_dict[resolution][0]) - param_names = ['resolution'] - params = ['480km', '120km'] + def teardown(self, resolution, *args, **kwargs): + del self.uxgrid - def setup(self, resolution): - self.uxds = ux.open_dataset(file_path_dict[resolution][0], file_path_dict[resolution][1]) - def teardown(self, resolution): - del self.uxds +class Gradient(DatasetBenchmark): + def time_gradient(self, resolution): + self.uxds[data_var].gradient() + def peakmem_gradient(self, resolution): + grad = self.uxds[data_var].gradient() + +class Integrate(DatasetBenchmark): def time_integrate(self, resolution): self.uxds[data_var].integrate() def peakmem_integrate(self, resolution): integral = self.uxds[data_var].integrate() -class GeoDataFrame: - param_names = ['resolution', 'exclude_antimeridian'] - params = [['480km', '120km'], - [True, False]] +class GeoDataFrame(DatasetBenchmark): + param_names = DatasetBenchmark.param_names + ['exclude_antimeridian'] + params = DatasetBenchmark.params + [[True, False]] - def setup(self, resolution, exclude_antimeridian): - self.uxds = ux.open_dataset(file_path_dict[resolution][0], file_path_dict[resolution][1]) - - def teardown(self, resolution, exclude_antimeridian): - del self.uxds - def time_to_geodataframe(self, resolution, exclude_antimeridian): self.uxds[data_var].to_geodataframe(exclude_antimeridian=exclude_antimeridian) @@ -83,19 +85,7 @@ def peakmem_to_geodataframe(self, resolution, exclude_antimeridian): gdf = self.uxds[data_var].to_geodataframe(exclude_antimeridian=exclude_antimeridian) -class ConnectivityConstruction: - - param_names = ['resolution'] - params = ['480km', '120km'] - - - def setup(self, resolution): - self.uxds = ux.open_dataset(file_path_dict[resolution][0], file_path_dict[resolution][1]) - - - def teardown(self, resolution): - del self.uxds - +class ConnectivityConstruction(DatasetBenchmark): def time_n_nodes_per_face(self, resolution): self.uxds.uxgrid.n_nodes_per_face @@ -103,30 +93,15 @@ def time_face_face_connectivity(self, resolution): ux.grid.connectivity._populate_face_face_connectivity(self.uxds.uxgrid) -class MatplotlibConversion: - param_names = ['resolution', 'periodic_elements'] - params = (['480km', '120km'], ['include', 'exclude', 'split']) - - def setup(self, resolution, periodic_elements): - self.uxds = ux.open_dataset(file_path_dict[resolution][0], file_path_dict[resolution][1]) - - def teardown(self, resolution, periodic_elements): - del self.uxds +class MatplotlibConversion(DatasetBenchmark): + param_names = DatasetBenchmark.param_names + ['periodic_elements'] + params = DatasetBenchmark.params + [['include', 'exclude', 'split']] def time_dataarray_to_polycollection(self, resolution, periodic_elements): self.uxds[data_var].to_polycollection() -class ConstructTreeStructures: - param_names = ['resolution'] - params = ['480km', '120km'] - - def setup(self, resolution): - self.uxds = ux.open_dataset(file_path_dict[resolution][0], file_path_dict[resolution][1]) - - def teardown(self, resolution): - del self.uxds - +class ConstructTreeStructures(DatasetBenchmark): def time_kd_tree(self, resolution): self.uxds.uxgrid.get_kd_tree() @@ -164,3 +139,8 @@ def time_nearest_neighbor_remapping(self): def time_inverse_distance_weighted_remapping(self): self.uxds_480["bottomDepth"].remap.inverse_distance_weighted(self.uxds_120.uxgrid) + + +class HoleEdgeIndices(DatasetBenchmark): + def time_construct_hole_edge_indices(self, resolution): + ux.grid.geometry._construct_hole_edge_indices(self.uxds.uxgrid.edge_face_connectivity) diff --git a/docs/internal_api/index.rst b/docs/internal_api/index.rst index b263f4ec8..6dd40b2c0 100644 --- a/docs/internal_api/index.rst +++ b/docs/internal_api/index.rst @@ -141,6 +141,7 @@ Geometry grid.geometry._insert_pt_in_latlonbox grid.geometry._populate_face_latlon_bound grid.geometry._populate_bounds + grid.geometry._construct_hole_edge_indices Coordinates ----------- diff --git a/docs/user_api/index.rst b/docs/user_api/index.rst index ec3278b6d..57e40d832 100644 --- a/docs/user_api/index.rst +++ b/docs/user_api/index.rst @@ -54,9 +54,16 @@ Remapping --------- .. autosummary:: :toctree: generated/ + :template: autosummary/accessor.rst + + UxDataArray.remap + +.. autosummary:: + :toctree: generated/ + :template: autosummary/accessor.rst - UxDataset.nearest_neighbor_remap - UxDataset.inverse_distance_weighted_remap + UxDataset.remap.nearest_neighbor + UxDataset.remap.inverse_distance_weighted Plotting -------- @@ -132,10 +139,16 @@ Remapping --------- .. autosummary:: :toctree: generated/ + :template: autosummary/accessor.rst + + UxDataArray.remap + +.. autosummary:: + :toctree: generated/ + :template: autosummary/accessor_method.rst - UxDataArray.nearest_neighbor_remap - UxDataArray.inverse_distance_weighted_remap - UxDataArray.nodal_average + UxDataArray.remap.nearest_neighbor + UxDataArray.remap.inverse_distance_weighted Plotting -------- @@ -210,6 +223,7 @@ IO Grid.to_polycollection Grid.to_linecollection Grid.validate + Grid.hole_edge_indices Methods diff --git a/test/test_arcs.py b/test/test_arcs.py index 17d04fe57..fa6bc8499 100644 --- a/test/test_arcs.py +++ b/test/test_arcs.py @@ -30,13 +30,6 @@ class TestIntersectionPoint(TestCase): def test_pt_within_gcr(self): # The GCR that's eexactly 180 degrees will have Value Error raised - gcr_180degree_cart = [ - _lonlat_rad_to_xyz(0.0, 0.0), - _lonlat_rad_to_xyz(np.pi, 0.0) - ] - pt_same_lon_in = _lonlat_rad_to_xyz(0.0, 0.0) - with self.assertRaises(ValueError): - point_within_gca(np.asarray(pt_same_lon_in), np.asarray(gcr_180degree_cart)) gcr_180degree_cart = [ _lonlat_rad_to_xyz(0.0, np.pi / 2.0), @@ -45,7 +38,7 @@ def test_pt_within_gcr(self): pt_same_lon_in = _lonlat_rad_to_xyz(0.0, 0.0) with self.assertRaises(ValueError): - point_within_gca(np.asarray(pt_same_lon_in), np.asarray(gcr_180degree_cart)) + point_within_gca(pt_same_lon_in, gcr_180degree_cart) # Test when the point and the GCA all have the same longitude gcr_same_lon_cart = [ @@ -53,19 +46,19 @@ def test_pt_within_gcr(self): _lonlat_rad_to_xyz(0.0, -1.5) ] pt_same_lon_in = _lonlat_rad_to_xyz(0.0, 0.0) - self.assertTrue(point_within_gca(np.asarray(pt_same_lon_in), np.asarray(gcr_same_lon_cart))) + self.assertTrue(point_within_gca(pt_same_lon_in, gcr_same_lon_cart)) pt_same_lon_out = _lonlat_rad_to_xyz(0.0, 1.500000000000001) - res = point_within_gca(np.asarray(pt_same_lon_out), np.asarray(gcr_same_lon_cart)) + res = point_within_gca(pt_same_lon_out, gcr_same_lon_cart) self.assertFalse(res) pt_same_lon_out_2 = _lonlat_rad_to_xyz(0.1, 1.0) - res = point_within_gca(np.asarray(pt_same_lon_out_2), np.asarray(gcr_same_lon_cart)) + res = point_within_gca(pt_same_lon_out_2, gcr_same_lon_cart) self.assertFalse(res) # And if we increase the digital place by one, it should be true again pt_same_lon_out_add_one_place = _lonlat_rad_to_xyz(0.0, 1.5000000000000001) - res = point_within_gca(np.asarray(pt_same_lon_out_add_one_place), np.asarray(gcr_same_lon_cart)) + res = point_within_gca(pt_same_lon_out_add_one_place, gcr_same_lon_cart) self.assertTrue(res) # Normal case @@ -76,7 +69,7 @@ def test_pt_within_gcr(self): -0.997]]) pt_cart_within = np.array( [0.25616109352676675, 0.9246590335292105, -0.010021496695000144]) - self.assertTrue(point_within_gca(np.asarray(pt_cart_within), np.asarray(gcr_cart_2), True)) + self.assertTrue(point_within_gca(pt_cart_within, gcr_cart_2, True)) # Test other more complicate cases : The anti-meridian case @@ -87,16 +80,16 @@ def test_pt_within_gcr_antimeridian(self): gcr_cart = np.array([[0.351, -0.724, 0.593], [0.617, 0.672, 0.410]]) pt_cart = np.array( [0.9438777657502077, 0.1193199333436068, 0.922714737029319]) - self.assertTrue(point_within_gca(np.asarray(pt_cart), np.asarray(gcr_cart), is_directed=True)) + self.assertTrue(point_within_gca(pt_cart, gcr_cart, is_directed=True)) # If we swap the gcr, it should throw a value error since it's larger than 180 degree gcr_cart_flip = np.array([[0.617, 0.672, 0.410], [0.351, -0.724, 0.593]]) with self.assertRaises(ValueError): - point_within_gca(np.asarray(pt_cart), np.asarray(gcr_cart_flip), is_directed=True) + point_within_gca(pt_cart, gcr_cart_flip, is_directed=True) # If we flip the gcr in the undirected mode, it should still work self.assertTrue( - point_within_gca(np.asarray(pt_cart), np.asarray(gcr_cart_flip), is_directed=False)) + point_within_gca(pt_cart, gcr_cart_flip, is_directed=False)) # 2nd anti-meridian case # GCR vertex0 in radian : [4.104711496596806, 0.5352983676533828], @@ -107,9 +100,9 @@ def test_pt_within_gcr_antimeridian(self): pt_cart_within = np.array( [0.6136726305712109, 0.28442243941920053, -0.365605190899831]) self.assertFalse( - point_within_gca(np.asarray(pt_cart_within), np.asarray(gcr_cart_1), is_directed=True)) + point_within_gca(pt_cart_within, gcr_cart_1, is_directed=True)) self.assertFalse( - point_within_gca(np.asarray(pt_cart_within), np.asarray(gcr_cart_1), is_directed=False)) + point_within_gca(pt_cart_within, gcr_cart_1, is_directed=False)) # The first case should not work and the second should work v1_rad = [0.1, 0.0] @@ -119,10 +112,10 @@ def test_pt_within_gcr_antimeridian(self): gcr_cart = np.array([v1_cart, v2_cart]) pt_cart = _lonlat_rad_to_xyz(0.01, 0.0) with self.assertRaises(ValueError): - point_within_gca(np.asarray(pt_cart), np.asarray(gcr_cart), is_directed=True) + point_within_gca(pt_cart, gcr_cart, is_directed=True) gcr_car_flipped = np.array([v2_cart, v1_cart]) self.assertTrue( - point_within_gca(np.asarray(pt_cart), np.asarray(gcr_car_flipped), is_directed=True)) + point_within_gca(pt_cart, gcr_car_flipped, is_directed=True)) def test_pt_within_gcr_cross_pole(self): gcr_cart = np.array([[0.351, 0.0, 0.3], [-0.351, 0.0, 0.3]]) diff --git a/test/test_dataarray.py b/test/test_dataarray.py index 9d0408411..7cc438a83 100644 --- a/test/test_dataarray.py +++ b/test/test_dataarray.py @@ -113,29 +113,3 @@ def test_geodataframe_caching(self): # override will recompute the grid assert gdf_start is not gdf_end - - def test_nodal_average(self): - - # test on a node-centered dataset - uxds = ux.open_dataset(gridfile_geoflow, dsfile_v1_geoflow) - - v1_nodal_average = uxds['v1'].nodal_average() - - # final dimension should match number of faces - self.assertEqual(v1_nodal_average.shape[-1], uxds.uxgrid.n_face) - - # all other dimensions should remain unchanged - self.assertEqual(uxds['v1'].shape[0:-1], v1_nodal_average.shape[0:-1]) - - # test on a sample mesh with 4 verts - verts = [[[-170, 40], [180, 30], [165, 25], [-170, 20]]] - data = [1, 2, 3, 4] - - uxgrid = ux.open_grid(verts, latlon=True) - - uxda = ux.UxDataArray(uxgrid=uxgrid, data=data, dims=('n_node')) - - uxda_nodal_average = uxda.nodal_average() - - # resulting data should be the mean of the corner nodes of the single face - self.assertEqual(uxda_nodal_average, np.mean(data)) diff --git a/test/test_geometry.py b/test/test_geometry.py index 953942293..0f25f6f07 100644 --- a/test/test_geometry.py +++ b/test/test_geometry.py @@ -27,6 +27,13 @@ grid_files = [gridfile_CSne8, gridfile_geoflow] data_files = [datafile_CSne30, datafile_geoflow] +grid_quad_hex = current_path/ "meshfiles" / "ugrid" / "quad-hexagon" / "grid.nc" +grid_geoflow = current_path/ "meshfiles" / "ugrid" / "geoflow-small" / "grid.nc" +grid_mpas = current_path/ "meshfiles" / "mpas" / "QU" / "oQU480.231010.nc" + + +# List of grid files to test +grid_files_latlonBound = [grid_quad_hex, grid_geoflow, gridfile_CSne8, grid_mpas] class TestAntimeridian(TestCase): @@ -56,6 +63,8 @@ def test_linecollection_execution(self): lines = uxgrid.to_linecollection() + + class TestPredicate(TestCase): def test_pole_point_inside_polygon_from_vertice_north(self): @@ -366,6 +375,19 @@ def test_extreme_gca_latitude_max(self): expected_max_latitude, delta=ERROR_TOLERANCE) + def test_extreme_gca_latitude_max_short(self): + # Define a great circle arc in 3D space that has a small span + gca_cart = np.array( [[ 0.65465367, -0.37796447, -0.65465367], [ 0.6652466, -0.33896007, -0.6652466 ]]) + + # Calculate the maximum latitude + max_latitude = ux.grid.arcs.extreme_gca_latitude(gca_cart, 'max') + + # Check if the maximum latitude is correct + expected_max_latitude = self._max_latitude_rad_iterative(gca_cart) + self.assertAlmostEqual(max_latitude, + expected_max_latitude, + delta=ERROR_TOLERANCE) + def test_extreme_gca_latitude_min(self): # Define a great circle arc that is symmetrical around 0 degrees longitude gca_cart = np.array([ @@ -621,6 +643,103 @@ def test_populate_bounds_antimeridian(self): bounds = _populate_face_latlon_bound(face_edges_connectivity_cartesian, face_edges_connectivity_lonlat) nt.assert_allclose(bounds, expected_bounds, atol=ERROR_TOLERANCE) + def test_populate_bounds_equator(self): + # the face is touching the equator + face_edges_cart = np.array([ + [[0.99726469, -0.05226443, -0.05226443], [0.99862953, 0.0, -0.05233596]], + [[0.99862953, 0.0, -0.05233596], [1.0, 0.0, 0.0]], + [[1.0, 0.0, 0.0], [0.99862953, -0.05233596, 0.0]], + [[0.99862953, -0.05233596, 0.0], [0.99726469, -0.05226443, -0.05226443]] + ] + ) + # Apply the inverse transformation to get the lat lon coordinates + face_edges_lonlat = np.array( + [[_xyz_to_lonlat_rad(*edge[0]), _xyz_to_lonlat_rad(*edge[1])] for edge in face_edges_cart]) + + bounds = _populate_face_latlon_bound(face_edges_cart, face_edges_lonlat) + expected_bounds = np.array([[-0.05235988, 0], [6.23082543, 0]]) + nt.assert_allclose(bounds, expected_bounds, atol=ERROR_TOLERANCE) + + def test_populate_bounds_southSphere(self): + # The face is near the south pole but doesn't contains the pole + face_edges_cart = np.array([ + [[-1.04386773e-01, -5.20500333e-02, -9.93173799e-01], [-1.04528463e-01, -1.28010448e-17, -9.94521895e-01]], + [[-1.04528463e-01, -1.28010448e-17, -9.94521895e-01], [-5.23359562e-02, -6.40930613e-18, -9.98629535e-01]], + [[-5.23359562e-02, -6.40930613e-18, -9.98629535e-01], [-5.22644277e-02, -5.22644277e-02, -9.97264689e-01]], + [[-5.22644277e-02, -5.22644277e-02, -9.97264689e-01], [-1.04386773e-01, -5.20500333e-02, -9.93173799e-01]] + ]) + + # Apply the inverse transformation to get the lat lon coordinates + face_edges_lonlat = np.array( + [[_xyz_to_lonlat_rad(*edge[0]), _xyz_to_lonlat_rad(*edge[1])] for edge in face_edges_cart]) + + bounds = _populate_face_latlon_bound(face_edges_cart, face_edges_lonlat) + expected_bounds = np.array([[-1.51843645, -1.45388627], [3.14159265, 3.92699082]]) + nt.assert_allclose(bounds, expected_bounds, atol=ERROR_TOLERANCE) + + def test_populate_bounds_near_pole(self): + # The face is near the south pole but doesn't contains the pole + face_edges_cart = np.array([ + [[3.58367950e-01, 0.00000000e+00, -9.33580426e-01], [3.57939780e-01, 4.88684203e-02, -9.32465008e-01]], + [[3.57939780e-01, 4.88684203e-02, -9.32465008e-01], [4.06271283e-01, 4.78221112e-02, -9.12500241e-01]], + [[4.06271283e-01, 4.78221112e-02, -9.12500241e-01], [4.06736643e-01, 2.01762691e-16, -9.13545458e-01]], + [[4.06736643e-01, 2.01762691e-16, -9.13545458e-01], [3.58367950e-01, 0.00000000e+00, -9.33580426e-01]] + ]) + + # Apply the inverse transformation to get the lat lon coordinates + face_edges_lonlat = np.array( + [[_xyz_to_lonlat_rad(*edge[0]), _xyz_to_lonlat_rad(*edge[1])] for edge in face_edges_cart]) + + bounds = _populate_face_latlon_bound(face_edges_cart, face_edges_lonlat) + expected_bounds = np.array([[-1.20427718, -1.14935491], [0,0.13568803]]) + nt.assert_allclose(bounds, expected_bounds, atol=ERROR_TOLERANCE) + + def test_populate_bounds_near_pole2(self): + # The face is near the south pole but doesn't contains the pole + face_edges_cart = np.array([ + [[3.57939780e-01, -4.88684203e-02, -9.32465008e-01], [3.58367950e-01, 0.00000000e+00, -9.33580426e-01]], + [[3.58367950e-01, 0.00000000e+00, -9.33580426e-01], [4.06736643e-01, 2.01762691e-16, -9.13545458e-01]], + [[4.06736643e-01, 2.01762691e-16, -9.13545458e-01], [4.06271283e-01, -4.78221112e-02, -9.12500241e-01]], + [[4.06271283e-01, -4.78221112e-02, -9.12500241e-01], [3.57939780e-01, -4.88684203e-02, -9.32465008e-01]] + ]) + + + # Apply the inverse transformation to get the lat lon coordinates + face_edges_lonlat = np.array( + [[_xyz_to_lonlat_rad(*edge[0]), _xyz_to_lonlat_rad(*edge[1])] for edge in face_edges_cart]) + + bounds = _populate_face_latlon_bound(face_edges_cart, face_edges_lonlat) + expected_bounds = np.array([[-1.20427718, -1.14935491], [6.147497,4.960524e-16]]) + nt.assert_allclose(bounds, expected_bounds, atol=ERROR_TOLERANCE) + + def test_populate_bounds_long_face(self): + """Test case where one of the face edges is a longitude GCA.""" + face_edges_cart = np.array([ + [[9.9999946355819702e-01, -6.7040475551038980e-04, 8.0396590055897832e-04], + [9.9999439716339111e-01, -3.2541253603994846e-03, -8.0110825365409255e-04]], + [[9.9999439716339111e-01, -3.2541253603994846e-03, -8.0110825365409255e-04], + [9.9998968839645386e-01, -3.1763643492013216e-03, -3.2474612817168236e-03]], + [[9.9998968839645386e-01, -3.1763643492013216e-03, -3.2474612817168236e-03], + [9.9998861551284790e-01, -8.2993711112067103e-04, -4.7004125081002712e-03]], + [[9.9998861551284790e-01, -8.2993711112067103e-04, -4.7004125081002712e-03], + [9.9999368190765381e-01, 1.7522916896268725e-03, -3.0944822356104851e-03]], + [[9.9999368190765381e-01, 1.7522916896268725e-03, -3.0944822356104851e-03], + [9.9999833106994629e-01, 1.6786820488050580e-03, -6.4892979571595788e-04]], + [[9.9999833106994629e-01, 1.6786820488050580e-03, -6.4892979571595788e-04], + [9.9999946355819702e-01, -6.7040475551038980e-04, 8.0396590055897832e-04]] + ]) + + face_edges_lonlat = np.array( + [[_xyz_to_lonlat_rad(*edge[0]), _xyz_to_lonlat_rad(*edge[1])] for edge in face_edges_cart]) + + bounds = _populate_face_latlon_bound(face_edges_cart, face_edges_lonlat) + + # The expected bounds should not contains the south pole [0,-0.5*np.pi] + self.assertTrue(bounds[1][0] != 0.0) + + + + def test_populate_bounds_node_on_pole(self): # Generate a normal face that is crossing the antimeridian vertices_lonlat = [[10.0, 90.0], [10.0, 10.0], [50.0, 10.0], [50.0, 60.0]] @@ -1116,6 +1235,8 @@ def test_populate_bounds_normal(self): is_GCA_list=[True, False, True, False]) nt.assert_allclose(bounds, expected_bounds, atol=ERROR_TOLERANCE) + + def test_populate_bounds_antimeridian(self): # Generate a normal face that is crossing the antimeridian vertices_lonlat = [[350, 60.0], [350, 10.0], [50.0, 10.0], [50.0, 60.0]] @@ -1173,7 +1294,7 @@ def test_populate_bounds_node_on_pole(self): nt.assert_allclose(bounds, expected_bounds, atol=ERROR_TOLERANCE) def test_populate_bounds_edge_over_pole(self): - # Generate a normal face that is crossing the antimeridian + # Generate a normal face that is around the north pole vertices_lonlat = [[210.0, 80.0], [350.0, 60.0], [10.0, 60.0], [30.0, 80.0]] vertices_lonlat = np.array(vertices_lonlat) @@ -1288,6 +1409,31 @@ def test_populate_bounds_GCAList_mix(self): nt.assert_allclose(face_bounds[i], expected_bounds[i], atol=ERROR_TOLERANCE) +# Test class +class TestLatlonBoundsFiles: + + def test_face_bounds(self): + """Test to ensure ``Grid.face_bounds`` works correctly for all grid + files.""" + for grid_path in grid_files_latlonBound: + try: + # Open the grid file + self.uxgrid = ux.open_grid(grid_path) + + # Test: Ensure the bounds are obtained + bounds = self.uxgrid.bounds + assert bounds is not None, f"Grid.face_bounds should not be None for {grid_path}" + + except Exception as e: + # Print the failing grid file and re-raise the exception + print(f"Test failed for grid file: {grid_path}") + raise e + + finally: + # Clean up the grid object + del self.uxgrid + + class TestGeoDataFrame(TestCase): def test_to_gdf(self): diff --git a/test/test_grid.py b/test/test_grid.py index 57eb3f3b8..6b80aa87a 100644 --- a/test/test_grid.py +++ b/test/test_grid.py @@ -31,6 +31,7 @@ gridfile_fesom = current_path / "meshfiles" / "ugrid" / "fesom" / "fesom.mesh.diag.nc" gridfile_geoflow = current_path / "meshfiles" / "ugrid" / "geoflow-small" / "grid.nc" gridfile_mpas = current_path / 'meshfiles' / "mpas" / "QU" / 'mesh.QU.1920km.151026.nc' +gridfile_mpas_holes = current_path / 'meshfiles' / "mpas" / "QU" / 'oQU480.231010.nc' dsfile_vortex_CSne30 = current_path / "meshfiles" / "ugrid" / "outCSne30" / "outCSne30_vortex.nc" dsfile_var2_CSne30 = current_path / "meshfiles" / "ugrid" / "outCSne30" / "outCSne30_var2.nc" @@ -48,6 +49,14 @@ def test_validate(self): grid_mpas = ux.open_grid(gridfile_mpas) assert (grid_mpas.validate()) + def test_grid_with_holes(self): + """Test _holes_in_mesh function.""" + grid_without_holes = ux.open_grid(gridfile_mpas) + grid_with_holes = ux.open_grid(gridfile_mpas_holes) + + self.assertTrue(grid_with_holes.hole_edge_indices.size != 0) + self.assertTrue(grid_without_holes.hole_edge_indices.size == 0) + def test_encode_as(self): """Reads a ugrid file and encodes it as `xarray.Dataset` in various types.""" @@ -658,8 +667,8 @@ def test_build_face_edges_connectivity_mpas(self): # construct edge nodes edge_nodes_output, _, _ = _build_edge_node_connectivity(mpas_grid_ux.face_node_connectivity.values, - mpas_grid_ux.n_face, - mpas_grid_ux.n_max_face_nodes) + mpas_grid_ux.n_face, + mpas_grid_ux.n_max_face_nodes) # _populate_face_edge_connectivity(mpas_grid_ux) # edge_nodes_output = mpas_grid_ux._ds['edge_node_connectivity'].values @@ -932,6 +941,7 @@ def test_from_face_vertices(self): class TestLatlonBounds(TestCase): gridfile_mpas = current_path / "meshfiles" / "mpas" / "QU" / "oQU480.231010.nc" + def test_populate_bounds_GCA_mix(self): face_1 = [[10.0, 60.0], [10.0, 10.0], [50.0, 10.0], [50.0, 60.0]] face_2 = [[350, 60.0], [350, 10.0], [50.0, 10.0], [50.0, 60.0]] @@ -941,11 +951,10 @@ def test_populate_bounds_GCA_mix(self): faces = [face_1, face_2, face_3, face_4] # Hand calculated bounds for the above faces in radians - expected_bounds = [[[0.17453293, 1.07370494],[0.17453293, 0.87266463]], - [[0.17453293, 1.10714872],[6.10865238, 0.87266463]], - [[1.04719755, 1.57079633],[3.66519143, 0.52359878]], - [[1.04719755,1.57079633],[0., 6.28318531]]] - + expected_bounds = [[[0.17453293, 1.07370494], [0.17453293, 0.87266463]], + [[0.17453293, 1.10714872], [6.10865238, 0.87266463]], + [[1.04719755, 1.57079633], [3.66519143, 0.52359878]], + [[1.04719755, 1.57079633], [0., 6.28318531]]] grid = ux.Grid.from_face_vertices(faces, latlon=True) bounds_xarray = grid.bounds diff --git a/test/test_integrate.py b/test/test_integrate.py index 078f355cf..8fa5b2eb8 100644 --- a/test/test_integrate.py +++ b/test/test_integrate.py @@ -8,8 +8,10 @@ import numpy.testing as nt import uxarray as ux +from uxarray.constants import ERROR_TOLERANCE, INT_FILL_VALUE from uxarray.grid.coordinates import _lonlat_rad_to_xyz -from uxarray.grid.integrate import _get_zonal_face_interval, _process_overlapped_intervals, _get_zonal_faces_weight_at_constLat +from uxarray.grid.integrate import _get_zonal_face_interval, _process_overlapped_intervals, _get_zonal_faces_weight_at_constLat,_get_faces_constLat_intersection_info + current_path = Path(os.path.dirname(os.path.realpath(__file__))) @@ -60,9 +62,169 @@ def test_multi_dim(self): class TestFaceWeights(TestCase): + gridfile_ne30 = current_path / "meshfiles" / "ugrid" / "outCSne30" / "outCSne30.ug" + dsfile_var2_ne30 = current_path / "meshfiles" / "ugrid" / "outCSne30" / "outCSne30_var2.nc" + + def test_get_faces_constLat_intersection_info_one_intersection(self): + face_edges_cart = np.array([ + [[-5.4411371445381629e-01, -4.3910468172333759e-02, -8.3786164521844386e-01], + [-5.4463903501502697e-01, -6.6699045092185599e-17, -8.3867056794542405e-01]], + + [[-5.4463903501502697e-01, -6.6699045092185599e-17, -8.3867056794542405e-01], + [-4.9999999999999994e-01, -6.1232339957367648e-17, -8.6602540378443871e-01]], + + [[-4.9999999999999994e-01, -6.1232339957367648e-17, -8.6602540378443871e-01], + [-4.9948581138450826e-01, -4.5339793804534498e-02, -8.6513480297773349e-01]], + + [[-4.9948581138450826e-01, -4.5339793804534498e-02, -8.6513480297773349e-01], + [-5.4411371445381629e-01, -4.3910468172333759e-02, -8.3786164521844386e-01]] + ]) + + latitude_cart = -0.8660254037844386 + is_directed=False + is_latlonface=False + is_GCA_list=None + unique_intersections, pt_lon_min, pt_lon_max = _get_faces_constLat_intersection_info(face_edges_cart, latitude_cart, is_GCA_list, is_latlonface, is_directed) + # The expected unique_intersections length is 1 + self.assertEqual(len(unique_intersections), 1) + + def test_get_faces_constLat_intersection_info_encompass_pole(self): + face_edges_cart = np.array([ + [[0.03982285692494229, 0.00351700770436231, 0.9992005658140627], + [0.00896106681877875, 0.03896060263227105, 0.9992005658144913]], + + [[0.00896106681877875, 0.03896060263227105, 0.9992005658144913], + [-0.03428461218295055, 0.02056197086916728, 0.9992005658132106]], + + [[-0.03428461218295055, 0.02056197086916728, 0.9992005658132106], + [-0.03015012448894485, -0.02625260499902213, 0.9992005658145248]], + + [[-0.03015012448894485, -0.02625260499902213, 0.9992005658145248], + [0.01565081128889155, -0.03678697293262131, 0.9992005658167203]], + + [[0.01565081128889155, -0.03678697293262131, 0.9992005658167203], + [0.03982285692494229, 0.00351700770436231, 0.9992005658140627]] + ]) + + latitude_cart = 0.9993908270190958 + # Convert the latitude to degrees + latitude_rad = np.arcsin(latitude_cart) + latitude_deg = np.rad2deg(latitude_rad) + print(latitude_deg) + is_directed=False + is_latlonface=False + is_GCA_list=None + unique_intersections, pt_lon_min, pt_lon_max = _get_faces_constLat_intersection_info(face_edges_cart, latitude_cart, is_GCA_list, is_latlonface, is_directed) + # The expected unique_intersections length should be no greater than 2* n_edges + self.assertLessEqual(len(unique_intersections), 2*len(face_edges_cart)) + + def test_get_faces_constLat_intersection_info_on_pole(self): + face_edges_cart = np.array([ + [[-5.2264427688714095e-02, -5.2264427688714102e-02, -9.9726468863423734e-01], + [-5.2335956242942412e-02, -6.4093061293235361e-18, -9.9862953475457394e-01]], + + [[-5.2335956242942412e-02, -6.4093061293235361e-18, -9.9862953475457394e-01], + [6.1232339957367660e-17, 0.0000000000000000e+00, -1.0000000000000000e+00]], + + [[6.1232339957367660e-17, 0.0000000000000000e+00, -1.0000000000000000e+00], + [3.2046530646617680e-18, -5.2335956242942412e-02, -9.9862953475457394e-01]], + + [[3.2046530646617680e-18, -5.2335956242942412e-02, -9.9862953475457394e-01], + [-5.2264427688714095e-02, -5.2264427688714102e-02, -9.9726468863423734e-01]] + ]) + latitude_cart = -0.9998476951563913 + is_directed=False + is_latlonface=False + is_GCA_list=None + unique_intersections, pt_lon_min, pt_lon_max = _get_faces_constLat_intersection_info(face_edges_cart, latitude_cart, is_GCA_list, is_latlonface, is_directed) + # The expected unique_intersections length is 2 + self.assertEqual(len(unique_intersections), 2) + + + def test_get_faces_constLat_intersection_info_near_pole(self): + face_edges_cart = np.array([ + [[-5.1693346290592648e-02, 1.5622531297347531e-01, -9.8636780641686628e-01], + [-5.1195320928843470e-02, 2.0763904784932552e-01, -9.7686491641537532e-01]], + [[-5.1195320928843470e-02, 2.0763904784932552e-01, -9.7686491641537532e-01], + [1.2730919333264125e-17, 2.0791169081775882e-01, -9.7814760073380580e-01]], + [[1.2730919333264125e-17, 2.0791169081775882e-01, -9.7814760073380580e-01], + [9.5788483443923397e-18, 1.5643446504023048e-01, -9.8768834059513777e-01]], + [[9.5788483443923397e-18, 1.5643446504023048e-01, -9.8768834059513777e-01], + [-5.1693346290592648e-02, 1.5622531297347531e-01, -9.8636780641686628e-01]] + ]) + + latitude_cart = -0.9876883405951378 + latitude_rad = np.arcsin(latitude_cart) + latitude_deg = np.rad2deg(latitude_rad) + is_directed=False + is_latlonface=False + is_GCA_list=None + unique_intersections, pt_lon_min, pt_lon_max = _get_faces_constLat_intersection_info(face_edges_cart, latitude_cart, is_GCA_list, is_latlonface, is_directed) + # The expected unique_intersections length is 2 + self.assertEqual(len(unique_intersections), 1) + + + def test_get_faces_constLat_intersection_info_2(self): + """This might test the case where the calculated intersection points + might suffer from floating point errors If not handled properly, the + function might return more than 2 unique intersections.""" + + face_edges_cart = np.array([[[0.6546536707079771, -0.37796447300922714, -0.6546536707079772], + [0.6652465971273088, -0.33896007142593115, -0.6652465971273087]], + + [[0.6652465971273088, -0.33896007142593115, -0.6652465971273087], + [0.6949903639307233, -0.3541152775760984, -0.6257721344312508]], + + [[0.6949903639307233, -0.3541152775760984, -0.6257721344312508], + [0.6829382762718700, -0.39429459764546304, -0.6149203859609873]], + + [[0.6829382762718700, -0.39429459764546304, -0.6149203859609873], + [0.6546536707079771, -0.37796447300922714, -0.6546536707079772]]]) + + latitude_cart = -0.6560590289905073 + is_directed=False + is_latlonface=False + is_GCA_list=None + unique_intersections, pt_lon_min, pt_lon_max = _get_faces_constLat_intersection_info(face_edges_cart, latitude_cart, is_GCA_list, is_latlonface, is_directed) + # The expected unique_intersections length is 2 + self.assertEqual(len(unique_intersections), 2) + + + def test_get_faces_constLat_intersection_info_2(self): + """This might test the case where the calculated intersection points + might suffer from floating point errors If not handled properly, the + function might return more than 2 unique intersections.""" + + face_edges_cart = np.array([[[0.6546536707079771, -0.37796447300922714, -0.6546536707079772], + [0.6652465971273088, -0.33896007142593115, -0.6652465971273087]], + + [[0.6652465971273088, -0.33896007142593115, -0.6652465971273087], + [0.6949903639307233, -0.3541152775760984, -0.6257721344312508]], + + [[0.6949903639307233, -0.3541152775760984, -0.6257721344312508], + [0.6829382762718700, -0.39429459764546304, -0.6149203859609873]], + + [[0.6829382762718700, -0.39429459764546304, -0.6149203859609873], + [0.6546536707079771, -0.37796447300922714, -0.6546536707079772]]]) + + latitude_cart = -0.6560590289905073 + is_directed=False + is_latlonface=False + is_GCA_list=None + unique_intersections, pt_lon_min, pt_lon_max = _get_faces_constLat_intersection_info(face_edges_cart, latitude_cart, is_GCA_list, is_latlonface, is_directed) + # The expected unique_intersections length is 2 + self.assertEqual(len(unique_intersections), 2) def test_get_zonal_face_interval(self): - """Test that the zonal face weights are correct.""" + """Test the _get_zonal_face_interval function for correct interval + computation. + + This test verifies that the _get_zonal_face_interval function + accurately computes the zonal face intervals given a set of face + edge nodes and a constant latitude value (constZ). The expected + intervals are compared against the calculated intervals to + ensure correctness. + """ vertices_lonlat = [[1.6 * np.pi, 0.25 * np.pi], [1.6 * np.pi, -0.25 * np.pi], [0.4 * np.pi, -0.25 * np.pi], @@ -94,8 +256,117 @@ def test_get_zonal_face_interval(self): # Asserting almost equal arrays nt.assert_array_almost_equal(actual_values_sorted, expected_values_sorted, decimal=13) + + def test_get_zonal_face_interval_empty_interval(self): + """Test the _get_zonal_face_interval function for cases where the + interval is empty. + + This test verifies that the _get_zonal_face_interval function + correctly returns an empty interval when the latitude only + touches the face but does not intersect it. + """ + face_edges_cart = np.array([ + [[-5.4411371445381629e-01, -4.3910468172333759e-02, -8.3786164521844386e-01], + [-5.4463903501502697e-01, -6.6699045092185599e-17, -8.3867056794542405e-01]], + + [[-5.4463903501502697e-01, -6.6699045092185599e-17, -8.3867056794542405e-01], + [-4.9999999999999994e-01, -6.1232339957367648e-17, -8.6602540378443871e-01]], + + [[-4.9999999999999994e-01, -6.1232339957367648e-17, -8.6602540378443871e-01], + [-4.9948581138450826e-01, -4.5339793804534498e-02, -8.6513480297773349e-01]], + + [[-4.9948581138450826e-01, -4.5339793804534498e-02, -8.6513480297773349e-01], + [-5.4411371445381629e-01, -4.3910468172333759e-02, -8.3786164521844386e-01]] + ]) + + latitude_cart = -0.8660254037844386 + face_latlon_bounds = np.array([ + [-1.04719755, -0.99335412], + [3.14159265, 3.2321175] + ]) + + res = _get_zonal_face_interval(face_edges_cart, latitude_cart, face_latlon_bounds, is_directed=False) + expected_res = pd.DataFrame({"start": [0.0], "end": [0.0]}) + pd.testing.assert_frame_equal(res, expected_res) + + def test_get_zonal_face_interval_encompass_pole(self): + """Test the _get_zonal_face_interval function for cases where the face + encompasses the pole inside.""" + face_edges_cart = np.array([ + [[0.03982285692494229, 0.00351700770436231, 0.9992005658140627], + [0.00896106681877875, 0.03896060263227105, 0.9992005658144913]], + + [[0.00896106681877875, 0.03896060263227105, 0.9992005658144913], + [-0.03428461218295055, 0.02056197086916728, 0.9992005658132106]], + + [[-0.03428461218295055, 0.02056197086916728, 0.9992005658132106], + [-0.03015012448894485, -0.02625260499902213, 0.9992005658145248]], + + [[-0.03015012448894485, -0.02625260499902213, 0.9992005658145248], + [0.01565081128889155, -0.03678697293262131, 0.9992005658167203]], + + [[0.01565081128889155, -0.03678697293262131, 0.9992005658167203], + [0.03982285692494229, 0.00351700770436231, 0.9992005658140627]] + ]) + + latitude_cart = 0.9993908270190958 + + face_latlon_bounds = np.array([ + [np.arcsin(0.9992005658145248), 0.5*np.pi], + [0, 2*np.pi] + ]) + # Expected result DataFrame + expected_df = pd.DataFrame({ + 'start': [0.000000, 1.101091, 2.357728, 3.614365, 4.871002, 6.127640], + 'end': [0.331721, 1.588358, 2.844995, 4.101632, 5.358270, 6.283185] + }) + + # Call the function to get the result + res = _get_zonal_face_interval(face_edges_cart, latitude_cart, face_latlon_bounds, is_directed=False) + + # Assert the result matches the expected DataFrame + pd.testing.assert_frame_equal(res, expected_df) + + + + def test_get_zonal_face_interval_FILL_VALUE(self): + """Test the _get_zonal_face_interval function for cases where there are + dummy nodes.""" + dummy_node = [INT_FILL_VALUE, INT_FILL_VALUE, INT_FILL_VALUE] + vertices_lonlat = [[1.6 * np.pi, 0.25 * np.pi], + [1.6 * np.pi, -0.25 * np.pi], + [0.4 * np.pi, -0.25 * np.pi], + [0.4 * np.pi, 0.25 * np.pi]] + vertices = [_lonlat_rad_to_xyz(*v) for v in vertices_lonlat] + + face_edge_nodes = np.array([[vertices[0], vertices[1]], + [vertices[1], vertices[2]], + [vertices[2], vertices[3]], + [vertices[3], vertices[0]], + [dummy_node,dummy_node]]) + + constZ = np.sin(0.20) + # The latlon bounds for the latitude is not necessarily correct below since we don't use the latitudes bound anyway + interval_df = _get_zonal_face_interval(face_edge_nodes, constZ, + np.array([[-0.25 * np.pi, 0.25 * np.pi], [1.6 * np.pi, + 0.4 * np.pi]]), + is_directed=False) + expected_interval_df = pd.DataFrame({ + 'start': [1.6 * np.pi, 0.0], + 'end': [2.0 * np.pi, 00.4 * np.pi] + }) + # Sort both DataFrames by 'start' column before comparison + expected_interval_df_sorted = expected_interval_df.sort_values(by='start').reset_index(drop=True) + + # Converting the sorted DataFrames to NumPy arrays + actual_values_sorted = interval_df[['start', 'end']].to_numpy() + expected_values_sorted = expected_interval_df_sorted[['start', 'end']].to_numpy() + + # Asserting almost equal arrays + nt.assert_array_almost_equal(actual_values_sorted, expected_values_sorted, decimal=13) + + def test_get_zonal_face_interval_GCA_constLat(self): - """Test that the zonal face weights are correct.""" vertices_lonlat = [[-0.4 * np.pi, 0.25 * np.pi], [-0.4 * np.pi, -0.25 * np.pi], [0.4 * np.pi, -0.25 * np.pi], @@ -127,8 +398,10 @@ def test_get_zonal_face_interval_GCA_constLat(self): # Asserting almost equal arrays nt.assert_array_almost_equal(actual_values_sorted, expected_values_sorted, decimal=13) + def test_get_zonal_face_interval_equator(self): - """Test that the zonal face weights are correct.""" + """Test that the face interval is correctly computed when the latitude + is at the equator.""" vertices_lonlat = [[-0.4 * np.pi, 0.25 * np.pi], [-0.4 * np.pi, 0.0], [0.4 * np.pi, 0.0], [0.4 * np.pi, 0.25 * np.pi]] @@ -177,8 +450,9 @@ def test_get_zonal_face_interval_equator(self): # Asserting almost equal arrays nt.assert_array_almost_equal(actual_values_sorted, expected_values_sorted, decimal=13) - def test_process_overlapped_intervals(self): - # Example data that has overlapping intervals and gap + + def test_process_overlapped_intervals_overlap_and_gap(self): + # Test intervals data that has overlapping intervals and gap intervals_data = [ { 'start': 0.0, @@ -226,6 +500,7 @@ def test_process_overlapped_intervals(self): nt.assert_array_equal(overlap_contributions, expected_overlap_contributions) + def test_process_overlapped_intervals_antimerdian(self): intervals_data = [ { @@ -273,6 +548,7 @@ def test_process_overlapped_intervals_antimerdian(self): nt.assert_array_equal(overlap_contributions, expected_overlap_contributions) + def test_get_zonal_faces_weight_at_constLat_equator(self): face_0 = [[1.7 * np.pi, 0.25 * np.pi], [1.7 * np.pi, 0.0], [0.3 * np.pi, 0.0], [0.3 * np.pi, 0.25 * np.pi]] @@ -329,13 +605,11 @@ def test_get_zonal_faces_weight_at_constLat_equator(self): weight_df = _get_zonal_faces_weight_at_constLat(np.array([ face_0_edge_nodes, face_1_edge_nodes, face_2_edge_nodes, face_3_edge_nodes - ]), - 0.0, - latlon_bounds, - is_directed=False) + ]), 0.0, latlon_bounds, is_directed=False) nt.assert_array_almost_equal(weight_df, expected_weight_df, decimal=3) + def test_get_zonal_faces_weight_at_constLat_regular(self): face_0 = [[1.7 * np.pi, 0.25 * np.pi], [1.7 * np.pi, 0.0], [0.3 * np.pi, 0.0], [0.3 * np.pi, 0.25 * np.pi]] @@ -388,19 +662,123 @@ def test_get_zonal_faces_weight_at_constLat_regular(self): 'weight': [0.375, 0.0625, 0.3125, 0.25] }) - - # Assert the results is the same to the 3 decimal places weight_df = _get_zonal_faces_weight_at_constLat(np.array([ face_0_edge_nodes, face_1_edge_nodes, face_2_edge_nodes, face_3_edge_nodes - ]), - np.sin(0.1 * np.pi), - latlon_bounds, - is_directed=False) + ]), np.sin(0.1 * np.pi), latlon_bounds, is_directed=False) nt.assert_array_almost_equal(weight_df, expected_weight_df, decimal=3) + def test_get_zonal_faces_weight_at_constLat_on_pole_one_face(self): + #The face is touching the poleļ¼Œ so the weight should be 1.0 since there's only 1 face + face_edges_cart = np.array([[ + [[-5.22644277e-02, -5.22644277e-02, -9.97264689e-01], + [-5.23359562e-02, -6.40930613e-18, -9.98629535e-01]], + + [[-5.23359562e-02, -6.40930613e-18, -9.98629535e-01], + [6.12323400e-17, 0.00000000e+00, -1.00000000e+00]], + + [[6.12323400e-17, 0.00000000e+00, -1.00000000e+00], + [3.20465306e-18, -5.23359562e-02, -9.98629535e-01]], + + [[3.20465306e-18, -5.23359562e-02, -9.98629535e-01], + [-5.22644277e-02, -5.22644277e-02, -9.97264689e-01]] + ]]) + + # Corrected face_bounds + face_bounds = np.array([ + [-1.57079633, -1.4968158], + [3.14159265, 0.] + ]) + constLat_cart = -1 + + weight_df = _get_zonal_faces_weight_at_constLat(face_edges_cart, constLat_cart, face_bounds, is_directed=False) + # Define the expected DataFrame + expected_weight_df = pd.DataFrame({"face_index": [0], "weight": [1.0]}) + + # Assert that the resulting should have weight is 1.0 + pd.testing.assert_frame_equal(weight_df, expected_weight_df) + + + def test_get_zonal_faces_weight_at_constLat_on_pole_faces(self): + #there will be 4 faces touching the pole, so the weight should be 0.25 for each face + face_edges_cart = np.array([ + [ + [[5.22644277e-02, -5.22644277e-02, 9.97264689e-01], [5.23359562e-02, 0.00000000e+00, 9.98629535e-01]], + [[5.23359562e-02, 0.00000000e+00, 9.98629535e-01], [6.12323400e-17, 0.00000000e+00, 1.00000000e+00]], + [[6.12323400e-17, 0.00000000e+00, 1.00000000e+00], [3.20465306e-18, -5.23359562e-02, 9.98629535e-01]], + [[3.20465306e-18, -5.23359562e-02, 9.98629535e-01], [5.22644277e-02, -5.22644277e-02, 9.97264689e-01]] + ], + [ + [[5.23359562e-02, 0.00000000e+00, 9.98629535e-01], [5.22644277e-02, 5.22644277e-02, 9.97264689e-01]], + [[5.22644277e-02, 5.22644277e-02, 9.97264689e-01], [3.20465306e-18, 5.23359562e-02, 9.98629535e-01]], + [[3.20465306e-18, 5.23359562e-02, 9.98629535e-01], [6.12323400e-17, 0.00000000e+00, 1.00000000e+00]], + [[6.12323400e-17, 0.00000000e+00, 1.00000000e+00], [5.23359562e-02, 0.00000000e+00, 9.98629535e-01]] + ], + [ + [[3.20465306e-18, -5.23359562e-02, 9.98629535e-01], [6.12323400e-17, 0.00000000e+00, 1.00000000e+00]], + [[6.12323400e-17, 0.00000000e+00, 1.00000000e+00], [-5.23359562e-02, -6.40930613e-18, 9.98629535e-01]], + [[-5.23359562e-02, -6.40930613e-18, 9.98629535e-01], + [-5.22644277e-02, -5.22644277e-02, 9.97264689e-01]], + [[-5.22644277e-02, -5.22644277e-02, 9.97264689e-01], [3.20465306e-18, -5.23359562e-02, 9.98629535e-01]] + ], + [ + [[6.12323400e-17, 0.00000000e+00, 1.00000000e+00], [3.20465306e-18, 5.23359562e-02, 9.98629535e-01]], + [[3.20465306e-18, 5.23359562e-02, 9.98629535e-01], [-5.22644277e-02, 5.22644277e-02, 9.97264689e-01]], + [[-5.22644277e-02, 5.22644277e-02, 9.97264689e-01], [-5.23359562e-02, -6.40930613e-18, 9.98629535e-01]], + [[-5.23359562e-02, -6.40930613e-18, 9.98629535e-01], [6.12323400e-17, 0.00000000e+00, 1.00000000e+00]] + ] + ]) + + face_bounds = np.array([ + [[1.4968158, 1.57079633], [4.71238898, 0.0]], + [[1.4968158, 1.57079633], [0.0, 1.57079633]], + [[1.4968158, 1.57079633], [3.14159265, 0.0]], + [[1.4968158, 1.57079633], [0.0, 3.14159265]] + ]) + + constLat_cart = 1.0 + + weight_df = _get_zonal_faces_weight_at_constLat(face_edges_cart, constLat_cart, face_bounds, is_directed=False) + # Define the expected DataFrame + expected_weight_df = pd.DataFrame({ + 'face_index': [0, 1, 2, 3], + 'weight': [0.25, 0.25, 0.25, 0.25] + }) + + # Assert that the DataFrame matches the expected DataFrame + pd.testing.assert_frame_equal(weight_df, expected_weight_df) + + + def test_get_zonal_face_interval_pole(self): + #The face is touching the pole + face_edges_cart = np.array([ + [[-5.22644277e-02, -5.22644277e-02, -9.97264689e-01], + [-5.23359562e-02, -6.40930613e-18, -9.98629535e-01]], + + [[-5.23359562e-02, -6.40930613e-18, -9.98629535e-01], + [6.12323400e-17, 0.00000000e+00, -1.00000000e+00]], + + [[6.12323400e-17, 0.00000000e+00, -1.00000000e+00], + [3.20465306e-18, -5.23359562e-02, -9.98629535e-01]], + + [[3.20465306e-18, -5.23359562e-02, -9.98629535e-01], + [-5.22644277e-02, -5.22644277e-02, -9.97264689e-01]] + ]) + + # Corrected face_bounds + face_bounds = np.array([ + [-1.57079633, -1.4968158], + [3.14159265, 0.] + ]) + constLat_cart = -0.9986295347545738 + + weight_df = _get_zonal_face_interval(face_edges_cart, constLat_cart, face_bounds, is_directed=False) + # No Nan values should be present in the weight_df + self.assertFalse(weight_df.isnull().values.any()) + + def test_get_zonal_faces_weight_at_constLat_latlonface(self): face_0 = [[np.deg2rad(350), np.deg2rad(40)], [np.deg2rad(350), np.deg2rad(20)], [np.deg2rad(10), np.deg2rad(20)], [np.deg2rad(10), np.deg2rad(40)]] @@ -449,10 +827,7 @@ def test_get_zonal_faces_weight_at_constLat_latlonface(self): # Assert the results is the same to the 3 decimal places weight_df = _get_zonal_faces_weight_at_constLat(np.array([ face_0_edge_nodes, face_1_edge_nodes, face_2_edge_nodes - ]), - np.sin(np.deg2rad(20)), - latlon_bounds, - is_directed=False, is_latlonface=True) + ]), np.sin(np.deg2rad(20)), latlon_bounds, is_directed=False, is_latlonface=True) nt.assert_array_almost_equal(weight_df, expected_weight_df, decimal=3) @@ -463,8 +838,5 @@ def test_get_zonal_faces_weight_at_constLat_latlonface(self): # It's edges are all GCA with self.assertRaises(ValueError): _get_zonal_faces_weight_at_constLat(np.array([ - face_0_edge_nodes, face_1_edge_nodes, face_2_edge_nodes - ]), - np.deg2rad(20), - latlon_bounds, - is_directed=False) + face_0_edge_nodes, face_1_edge_nodes, face_2_edge_nodes + ]), np.deg2rad(20), latlon_bounds, is_directed=False) diff --git a/test/test_intersections.py b/test/test_intersections.py index b134d9029..769859b51 100644 --- a/test/test_intersections.py +++ b/test/test_intersections.py @@ -13,10 +13,6 @@ class TestGCAGCAIntersection(TestCase): def test_get_GCA_GCA_intersections_antimeridian(self): # Test the case where the two GCAs are on the antimeridian - - - - GCA1 = _lonlat_rad_to_xyz(np.deg2rad(170.0), np.deg2rad(89.99)) GCR1_cart = np.array([ _lonlat_rad_to_xyz(np.deg2rad(170.0), @@ -45,6 +41,7 @@ def test_get_GCA_GCA_intersections_antimeridian(self): ]) res_cart = gca_gca_intersection(GCR1_cart, GCR2_cart) + res_cart = res_cart[0] # Test if the result is normalized self.assertTrue( @@ -70,7 +67,10 @@ def test_get_GCA_GCA_intersections_parallel(self): _lonlat_rad_to_xyz(-0.5 * np.pi - 0.01, 0.0) ]) res_cart = gca_gca_intersection(GCR1_cart, GCR2_cart) - self.assertTrue(np.array_equal(res_cart, np.array([]))) + res_cart = res_cart[0] + expected_res = np.array(_lonlat_rad_to_xyz(0.5 * np.pi, 0.0)) + # Test if two results are equal within the error tolerance + self.assertAlmostEqual(np.linalg.norm(res_cart - expected_res), 0.0, delta=ERROR_TOLERANCE) def test_get_GCA_GCA_intersections_perpendicular(self): # Test the case where the two GCAs are perpendicular to each other @@ -85,7 +85,7 @@ def test_get_GCA_GCA_intersections_perpendicular(self): _lonlat_rad_to_xyz(*[-0.5 * np.pi - 0.01, 0.0]) ]) res_cart = gca_gca_intersection(GCR1_cart, GCR2_cart) - + res_cart = res_cart[0] # Test if the result is normalized self.assertTrue( np.allclose(np.linalg.norm(res_cart, axis=0), @@ -139,3 +139,15 @@ def test_GCA_constLat_intersections_two_pts(self): res = gca_constLat_intersection(GCR1_cart, np.sin(query_lat), verbose=False) self.assertTrue(res.shape[0] == 2) + + + def test_GCA_constLat_intersections_no_convege(self): + # It should return an one single point and a warning about unable to be converged should be raised + GCR1_cart = np.array([[-0.59647278, 0.59647278, -0.53706651], + [-0.61362973, 0.61362973, -0.49690755]]) + + constZ = -0.5150380749100542 + + with self.assertWarns(UserWarning): + res = gca_constLat_intersection(GCR1_cart, constZ, verbose=False) + self.assertTrue(res.shape[0] == 1) diff --git a/test/test_mpas.py b/test/test_mpas.py index a4a678de7..5833e0f85 100644 --- a/test/test_mpas.py +++ b/test/test_mpas.py @@ -132,8 +132,11 @@ def test_set_attrs(self): for mpas_attr in expected_attrs: assert mpas_attr in uxgrid._ds.attrs - def test_face_mask(self): - primal_uxgrid = ux.open_grid(self.mpas_ocean_mesh, use_dual=False) - dual_uxgrid = ux.open_grid(self.mpas_ocean_mesh, use_dual=True) + def test_face_area(self): + """Tests the parsing of face areas for MPAS grids.""" - pass + uxgrid_primal = ux.open_grid(self.mpas_grid_path, use_dual=False) + uxgrid_dual = ux.open_grid(self.mpas_grid_path, use_dual=True) + + assert "face_areas" in uxgrid_primal._ds + assert "face_areas" in uxgrid_dual._ds diff --git a/test/test_plot.py b/test/test_plot.py index 9a503c8ea..c46806e18 100644 --- a/test/test_plot.py +++ b/test/test_plot.py @@ -80,7 +80,7 @@ def test_node_centered_data(self): uxds['v1'][0][0].plot.points(backend=backend) - uxds['v1'][0][0].nodal_average().plot.polygons(backend=backend) + uxds['v1'][0][0].topological_mean(destination='face').plot.polygons(backend=backend) def test_clabel(self): @@ -104,7 +104,6 @@ def test_dataset(self): # plot.hist() is an xarray method assert hasattr(uxds['v1'].plot, 'hist') - def test_dataarray(self): """Tests whether a Xarray Dataset method can be called through the UxDataset plotting accessor.""" diff --git a/test/test_remap.py b/test/test_remap.py index b4dc27d56..e3b6aacf3 100644 --- a/test/test_remap.py +++ b/test/test_remap.py @@ -92,7 +92,7 @@ def test_nn_remap(self): uxgrid = ux.open_grid(gridfile_ne30) uxda = uxds['v1'] - out_da = uxda.remap.nearest_neighbor(destination_obj=uxgrid, remap_to="nodes") + out_da = uxda.remap.nearest_neighbor(destination_grid=uxgrid, remap_to="nodes") # Assert the remapping was successful and the variable is populated self.assertTrue(len(out_da) != 0) @@ -112,20 +112,6 @@ def test_remap_return_types(self): assert isinstance(remap_uxda_to_grid, UxDataArray) - remap_uxda_to_uxda = source_uxds['v1'].remap.nearest_neighbor( - destination_uxds['psi']) - - # Dataset with two vars: original "psi" and remapped "v1" - assert isinstance(remap_uxda_to_uxda, UxDataset) - assert len(remap_uxda_to_uxda.data_vars) == 2 - - remap_uxda_to_uxds = source_uxds['v1'].remap.nearest_neighbor( - destination_uxds) - - # Dataset with two vars: original "psi" and remapped "v1" - assert isinstance(remap_uxda_to_uxds, UxDataset) - assert len(remap_uxda_to_uxds.data_vars) == 2 - remap_uxds_to_grid = source_uxds.remap.nearest_neighbor( destination_uxds.uxgrid) @@ -133,20 +119,6 @@ def test_remap_return_types(self): assert isinstance(remap_uxds_to_grid, UxDataset) assert len(remap_uxds_to_grid.data_vars) == 3 - remap_uxds_to_uxda = source_uxds.remap.nearest_neighbor( - destination_uxds['psi']) - - # Dataset with four vars: original "psi" and remapped "v1, v2, v3" - assert isinstance(remap_uxds_to_uxda, UxDataset) - assert len(remap_uxds_to_uxda.data_vars) == 4 - - remap_uxds_to_uxds = source_uxds.remap.nearest_neighbor( - destination_uxds) - - # Dataset with four vars: original "psi" and remapped "v1, v2, v3" - assert isinstance(remap_uxds_to_uxds, UxDataset) - assert len(remap_uxds_to_uxds.data_vars) == 4 - def test_edge_centers_remapping(self): """Tests the ability to remap on edge centers using Nearest Neighbor Remapping.""" @@ -155,11 +127,11 @@ def test_edge_centers_remapping(self): source_grid = ux.open_dataset(gridfile_geoflow, dsfile_v1_geoflow) destination_grid = ux.open_dataset(mpasfile_QU, mpasfile_QU) - remap_to_edge_centers = source_grid['v1'].remap.nearest_neighbor(destination_obj=destination_grid, + remap_to_edge_centers = source_grid['v1'].remap.nearest_neighbor(destination_grid=destination_grid.uxgrid, remap_to="edge centers") # Assert the data variable lies on the "edge centers" - self.assertTrue(remap_to_edge_centers['v1']._edge_centered()) + self.assertTrue(remap_to_edge_centers._edge_centered()) def test_overwrite(self): """Tests that the remapping no longer overwrites the dataset.""" @@ -169,7 +141,7 @@ def test_overwrite(self): destination_dataset = ux.open_dataset(gridfile_geoflow, dsfile_v1_geoflow) # Perform remapping - remap_to_edge_centers = source_grid['v1'].remap.nearest_neighbor(destination_obj=destination_dataset, + remap_to_edge_centers = source_grid['v1'].remap.nearest_neighbor(destination_grid=destination_dataset.uxgrid, remap_to="nodes") # Assert the remapped data is different from the original data @@ -254,20 +226,6 @@ def test_remap_return_types(self): assert isinstance(remap_uxda_to_grid, UxDataArray) assert len(remap_uxda_to_grid) == 1 - remap_uxda_to_uxda = source_uxds['v1'].remap.inverse_distance_weighted( - destination_uxds['psi'], power=3, k=10) - - # Dataset with two vars: original "psi" and remapped "v1" - assert isinstance(remap_uxda_to_uxda, UxDataset) - assert len(remap_uxda_to_uxda.data_vars) == 2 - - remap_uxda_to_uxds = source_uxds['v1'].remap.inverse_distance_weighted( - destination_uxds, power=3, k=10) - - # Dataset with two vars: original "psi" and remapped "v1" - assert isinstance(remap_uxda_to_uxds, UxDataset) - assert len(remap_uxda_to_uxds.data_vars) == 2 - remap_uxds_to_grid = source_uxds.remap.inverse_distance_weighted( destination_uxds.uxgrid) @@ -275,20 +233,6 @@ def test_remap_return_types(self): assert isinstance(remap_uxds_to_grid, UxDataset) assert len(remap_uxds_to_grid.data_vars) == 3 - remap_uxds_to_uxda = source_uxds.remap.inverse_distance_weighted( - destination_uxds['psi']) - - # Dataset with four vars: original "psi" and remapped "v1, v2, v3" - assert isinstance(remap_uxds_to_uxda, UxDataset) - assert len(remap_uxds_to_uxda.data_vars) == 4 - - remap_uxds_to_uxds = source_uxds.remap.inverse_distance_weighted( - destination_uxds) - - # Dataset with four vars: original "psi" and remapped "v1, v2, v3" - assert isinstance(remap_uxds_to_uxds, UxDataset) - assert len(remap_uxds_to_uxds.data_vars) == 4 - def test_edge_remapping(self): """Tests the ability to remap on edge centers using Inverse Distance Weighted Remapping.""" @@ -299,11 +243,11 @@ def test_edge_remapping(self): # Perform remapping to the edge centers of the dataset - remap_to_edge_centers = source_grid['v1'].remap.inverse_distance_weighted(destination_obj=destination_grid, + remap_to_edge_centers = source_grid['v1'].remap.inverse_distance_weighted(destination_grid=destination_grid.uxgrid, remap_to="edge centers") # Assert the data variable lies on the "edge centers" - self.assertTrue(remap_to_edge_centers['v1']._edge_centered()) + self.assertTrue(remap_to_edge_centers._edge_centered()) def test_overwrite(self): """Tests that the remapping no longer overwrites the dataset.""" @@ -313,7 +257,7 @@ def test_overwrite(self): destination_dataset = ux.open_dataset(gridfile_geoflow, dsfile_v1_geoflow) # Perform Remapping - remap_to_edge_centers = source_grid['v1'].remap.inverse_distance_weighted(destination_obj=destination_dataset, + remap_to_edge_centers = source_grid['v1'].remap.inverse_distance_weighted(destination_grid=destination_dataset.uxgrid, remap_to="nodes") # Assert the remapped data is different from the original data diff --git a/uxarray/constants.py b/uxarray/constants.py index a8baa9c1c..d55e25b49 100644 --- a/uxarray/constants.py +++ b/uxarray/constants.py @@ -10,7 +10,11 @@ # half of the working precision is already the most optimal value for the error tolerance, # more tailored values will be used in the future. -ERROR_TOLERANCE = 1.0e-8 +ERROR_TOLERANCE = np.float64(1.0e-8) + +# The below value is the machine epsilon for the float64 data type, it will be used in the most basic operations as a +# error tolerance, mainly in the intersection calculations. +MACHINE_EPSILON = np.float64(np.finfo(float).eps) ENABLE_JIT_CACHE = True ENABLE_JIT = True diff --git a/uxarray/conventions/descriptors.py b/uxarray/conventions/descriptors.py index 2cd0eb2a7..1d020b913 100644 --- a/uxarray/conventions/descriptors.py +++ b/uxarray/conventions/descriptors.py @@ -25,3 +25,9 @@ "cf_role": "edge_node_distances", "long_name": "Distances between the nodes that make up " "each edge.", } + +HOLE_EDGE_INDICES_DIMS = ["n_edge"] +HOLE_EDGE_INDICES_ATTRS = { + "cf_role": "hole_edge_indices", + "long_name": "Indices of edges that border a region of the grid not covered by any geometry", +} diff --git a/uxarray/core/dataarray.py b/uxarray/core/dataarray.py index a0b61931c..df64b6f65 100644 --- a/uxarray/core/dataarray.py +++ b/uxarray/core/dataarray.py @@ -4,7 +4,7 @@ import numpy as np -from typing import TYPE_CHECKING, Callable, Optional, Union, Hashable, Literal +from typing import TYPE_CHECKING, Callable, Optional, Hashable, Literal from uxarray.constants import GRID_DIMS from uxarray.formatting_html import array_repr @@ -21,8 +21,6 @@ from xarray.core.utils import UncachedAccessor -from warnings import warn - from uxarray.core.gradient import ( _calculate_grad_on_edge_from_faces, @@ -286,65 +284,6 @@ def to_dataset( return uxds - def nearest_neighbor_remap( - self, - destination_obj: Union[Grid, UxDataArray, UxDataset], - remap_to: str = "nodes", - coord_type: str = "spherical", - ): - """Nearest Neighbor Remapping between a source (``UxDataArray``) and - destination.`. - - Parameters - --------- - destination_obj : Grid, UxDataArray, UxDataset - Destination for remapping - remap_to : str, default="nodes" - Location of where to map data, either "nodes" or "face centers" - coord_type : str, default="spherical" - Indicates whether to remap using on spherical or cartesian coordinates - """ - warn( - "This usage of remapping will be deprecated in a future release. It is advised to use uxds.remap.nearest_neighbor() instead.", - DeprecationWarning, - ) - - return self.remap.nearest_neighbor(destination_obj, remap_to, coord_type) - - def inverse_distance_weighted_remap( - self, - destination_obj: Union[Grid, UxDataArray, UxDataset], - remap_to: str = "nodes", - coord_type: str = "spherical", - power=2, - k=8, - ): - """Inverse Distance Weighted Remapping between a source - (``UxDataArray``) and destination.`. - - Parameters - --------- - destination_obj : Grid, UxDataArray, UxDataset - Destination for remapping - remap_to : str, default="nodes" - Location of where to map data, either "nodes" or "face centers" - coord_type : str, default="spherical" - Indicates whether to remap using on spherical or cartesian coordinates - power : int, default=2 - Power parameter for inverse distance weighting. This controls how local or global the remapping is, a higher - power causes points that are further away to have less influence - k : int, default=8 - Number of nearest neighbors to consider in the weighted calculation. - """ - warn( - "This usage of remapping will be deprecated in a future release. It is advised to use uxds.remap.inverse_distance_weighted() instead.", - DeprecationWarning, - ) - - return self.remap.inverse_distance_weighted( - destination_obj, remap_to, coord_type, power, k - ) - def integrate( self, quadrature_rule: Optional[str] = "triangular", order: Optional[int] = 4 ) -> UxDataArray: @@ -400,20 +339,6 @@ def integrate( return uxda - def nodal_average(self): - """Computes the Nodal Average of a Data Variable, which is the mean of - the nodes that surround each face. - - Can be used for remapping node-centered data to each face. - """ - - warnings.warn( - "This function will be deprecated in a future release. Please use uxda.mean(destination=`face`) instead.", - DeprecationWarning, - ) - - return self.topological_mean(destination="face") - def topological_mean( self, destination: Literal["node", "edge", "face"], @@ -887,7 +812,7 @@ def gradient( Face-centered variable >>> uxds['var'].gradient() Node-centered variable - >>> uxds['var'].nodal_average().gradient() + >>> uxds['var'].topological_mean(destination="face").gradient() """ if not self._face_centered(): diff --git a/uxarray/core/dataset.py b/uxarray/core/dataset.py index 2489c23ab..21fc8f76a 100644 --- a/uxarray/core/dataset.py +++ b/uxarray/core/dataset.py @@ -5,7 +5,7 @@ import sys -from typing import Callable, Optional, IO, Union +from typing import Callable, Optional, IO from uxarray.constants import GRID_DIMS from uxarray.grid import Grid @@ -373,62 +373,3 @@ def neighborhood_filter( destination_uxds[var_name] = uxda.transpose(*remember_dim_order) return destination_uxds - - def nearest_neighbor_remap( - self, - destination_obj: Union[Grid, UxDataArray, UxDataset], - remap_to: str = "nodes", - coord_type: str = "spherical", - ): - """Nearest Neighbor Remapping between a source (``UxDataset``) and - destination.`. - - Parameters - --------- - destination_obj : Grid, UxDataArray, UxDataset - Destination for remapping - remap_to : str, default="nodes" - Location of where to map data, either "nodes", "edge centers", or "face centers" - coord_type : str, default="spherical" - Indicates whether to remap using on spherical or cartesian coordinates - """ - warn( - "This usage of remapping will be deprecated in a future release. It is advised to use uxds.remap.nearest_neighbor() instead.", - DeprecationWarning, - ) - - return self.remap.nearest_neighbor(destination_obj, remap_to, coord_type) - - def inverse_distance_weighted_remap( - self, - destination_obj: Union[Grid, UxDataArray, UxDataset], - remap_to: str = "nodes", - coord_type: str = "spherical", - power=2, - k=8, - ): - """Inverse Distance Weighted Remapping between a source (``UxDataset``) - and destination.`. - - Parameters - --------- - destination_obj : Grid, UxDataArray, UxDataset - Destination for remapping - remap_to : str, default="nodes" - Location of where to map data, either "nodes", "edge centers", or "face centers" - coord_type : str, default="spherical" - Indicates whether to remap using on spherical or cartesian coordinates - power : int, default=2 - Power parameter for inverse distance weighting. This controls how local or global the remapping is, a higher - power causes points that are further away to have less influence - k : int, default=8 - Number of nearest neighbors to consider in the weighted calculation. - """ - warn( - "This usage of remapping will be deprecated in a future release. It is advised to use uxds.remap.inverse_distance_weighted() instead.", - DeprecationWarning, - ) - - return self.remap.inverse_distance_weighted( - destination_obj, remap_to, coord_type, power, k - ) diff --git a/uxarray/grid/arcs.py b/uxarray/grid/arcs.py index 63344473e..2bffa2177 100644 --- a/uxarray/grid/arcs.py +++ b/uxarray/grid/arcs.py @@ -2,10 +2,13 @@ # from uxarray.grid.coordinates import node_xyz_to_lonlat_rad, normalize_in_place -from uxarray.grid.coordinates import _xyz_to_lonlat_rad_no_norm, _normalize_xyz_scalar -from uxarray.constants import ERROR_TOLERANCE +from uxarray.grid.coordinates import ( + _xyz_to_lonlat_rad_scalar, + _normalize_xyz_scalar, +) +from uxarray.constants import ERROR_TOLERANCE, MACHINE_EPSILON -from uxarray.utils.computing import isclose, cross, dot +from uxarray.utils.computing import isclose, cross, dot, allclose from numba import njit @@ -26,24 +29,33 @@ def _point_within_gca_body( angle, gca_cart, pt, GCRv0_lonlat, GCRv1_lonlat, pt_lonlat, is_directed ): angle = _angle_of_2_vectors(gca_cart[0], gca_cart[1]) - if isclose(angle, np.pi, rtol=0.0, atol=ERROR_TOLERANCE): + if allclose(angle, np.pi, rtol=0.0, atol=MACHINE_EPSILON): raise ValueError( "The input Great Circle Arc is exactly 180 degree, this Great Circle Arc can have multiple planes. " "Consider breaking the Great Circle Arc" "into two Great Circle Arcs" ) - if not isclose( - dot(cross(np.asarray(gca_cart[0]), np.asarray(gca_cart[1])), pt), + # See if the point is on the plane of the GCA, because we are dealing with floating point numbers with np.dot now + # just using the rtol=MACHINE_EPSILON, atol=MACHINE_EPSILON, but consider using the more proper error tolerance + # in the future + cross_product = cross(np.asarray(gca_cart[0]), np.asarray(gca_cart[1])) + + if not allclose( + dot(np.asarray(cross_product), np.asarray(pt)), # Custom dot function 0, - rtol=0.0, - atol=ERROR_TOLERANCE, + rtol=MACHINE_EPSILON, + atol=MACHINE_EPSILON, ): return False - if isclose(GCRv0_lonlat[0], GCRv1_lonlat[0], rtol=0.0, atol=ERROR_TOLERANCE): + if isclose( + GCRv0_lonlat[0], GCRv1_lonlat[0], rtol=MACHINE_EPSILON, atol=MACHINE_EPSILON + ): # If the pt and the GCA are on the same longitude (the y coordinates are the same) - if isclose(GCRv0_lonlat[0], pt_lonlat[0], rtol=0.0, atol=ERROR_TOLERANCE): + if isclose( + GCRv0_lonlat[0], pt_lonlat[0], rtol=MACHINE_EPSILON, atol=MACHINE_EPSILON + ): # Now use the latitude to determine if the pt falls between the interval return in_between(GCRv0_lonlat[1], pt_lonlat[1], GCRv1_lonlat[1]) else: @@ -51,24 +63,61 @@ def _point_within_gca_body( return False # If the longnitude span is exactly 180 degree, then the GCA goes through the pole point - if isclose( - abs(GCRv1_lonlat[0] - GCRv0_lonlat[0]), np.pi, rtol=0.0, atol=ERROR_TOLERANCE + # Or if one of the endpoints is on the pole point, then the GCA goes through the pole point + if ( + isclose( + abs(GCRv1_lonlat[0] - GCRv0_lonlat[0]), + np.pi, + rtol=0.0, + atol=MACHINE_EPSILON, + ) + or isclose( + abs(GCRv0_lonlat[1]), np.pi / 2, rtol=ERROR_TOLERANCE, atol=ERROR_TOLERANCE + ) + or isclose( + abs(GCRv1_lonlat[1]), np.pi / 2, rtol=ERROR_TOLERANCE, atol=ERROR_TOLERANCE + ) ): # Special case, if the pt is on the pole point, then set its longitude to the GCRv0_lonlat[0] - if isclose(abs(pt_lonlat[1]), np.pi / 2, rtol=0.0, atol=ERROR_TOLERANCE): + # Since the point is our calculated properly, we use the atol=ERROR_TOLERANCE and rtol=ERROR_TOLERANCE + if isclose( + abs(pt_lonlat[1]), np.pi / 2, rtol=ERROR_TOLERANCE, atol=ERROR_TOLERANCE + ): pt_lonlat[0] = GCRv0_lonlat[0] + + # Special case, if one of the GCA endpoints is on the pole point, and another endpoint is not + # then we need to check if the pt is on the GCA + if isclose( + abs(GCRv0_lonlat[1]), np.pi / 2, rtol=ERROR_TOLERANCE, atol=0.0 + ) or isclose(abs(GCRv1_lonlat[1]), np.pi / 2, rtol=ERROR_TOLERANCE, atol=0.0): + # Identify the non-pole endpoint + non_pole_endpoint = None + if not isclose( + abs(GCRv0_lonlat[1]), np.pi / 2, rtol=ERROR_TOLERANCE, atol=0.0 + ): + non_pole_endpoint = GCRv0_lonlat + elif not isclose( + abs(GCRv1_lonlat[1]), np.pi / 2, rtol=ERROR_TOLERANCE, atol=0.0 + ): + non_pole_endpoint = GCRv1_lonlat + + if non_pole_endpoint is not None and not isclose( + non_pole_endpoint[0], pt_lonlat[0], rtol=ERROR_TOLERANCE, atol=0.0 + ): + return False + if not isclose( - GCRv0_lonlat[0], pt_lonlat[0], rtol=0.0, atol=ERROR_TOLERANCE + GCRv0_lonlat[0], pt_lonlat[0], rtol=ERROR_TOLERANCE, atol=0.0 ) and not isclose( - GCRv1_lonlat[0], pt_lonlat[0], rtol=0.0, atol=ERROR_TOLERANCE + GCRv1_lonlat[0], pt_lonlat[0], rtol=ERROR_TOLERANCE, atol=0.0 ): return False else: # Determine the pole latitude and latitude extension - if (GCRv0_lonlat[1] > 0 and GCRv1_lonlat[1] > 0) or ( - GCRv0_lonlat[1] < 0 and GCRv1_lonlat[1] < 0 + if (GCRv0_lonlat[1] > 0.0 and GCRv1_lonlat[1] > 0.0) or ( + GCRv0_lonlat[1] < 0.0 and GCRv1_lonlat[1] < 0.0 ): - pole_lat = np.pi / 2 if GCRv0_lonlat[1] > 0 else -np.pi / 2 + pole_lat = np.pi / 2 if GCRv0_lonlat[1] > 0.0 else -np.pi / 2 lat_extend = ( abs(np.pi / 2 - abs(GCRv0_lonlat[1])) + np.pi / 2 @@ -104,7 +153,7 @@ def _point_within_gca_body( # The necessary condition: the pt longitude is on the opposite side of the anti-meridian # Case 2: The anti-meridian case where 180 -->x0 --> 0 lon --> x1 --> 180 lon - elif 2 * np.pi > GCRv0_lonlat[0] > np.pi > GCRv1_lonlat[0] > 0: + elif 2 * np.pi > GCRv0_lonlat[0] > np.pi > GCRv1_lonlat[0] > 0.0: return in_between( GCRv0_lonlat[0], pt_lonlat[0], 2 * np.pi ) or in_between(0, pt_lonlat[0], GCRv1_lonlat[0]) @@ -167,12 +216,18 @@ def point_within_gca(pt, gca_cart, is_directed=False): Please ensure that the input coordinates are in radians and adhere to the ERROR_TOLERANCE value for floating-point comparisons. """ # Convert the cartesian coordinates to lonlat coordinates - pt_lonlat = np.array(_xyz_to_lonlat_rad_no_norm(pt[0], pt[1], pt[2])) + pt_lonlat = np.array( + _xyz_to_lonlat_rad_scalar(pt[0], pt[1], pt[2], normalize=False) + ) GCRv0_lonlat = np.array( - _xyz_to_lonlat_rad_no_norm(gca_cart[0][0], gca_cart[0][1], gca_cart[0][2]) + _xyz_to_lonlat_rad_scalar( + gca_cart[0][0], gca_cart[0][1], gca_cart[0][2], normalize=False + ) ) GCRv1_lonlat = np.array( - _xyz_to_lonlat_rad_no_norm(gca_cart[1][0], gca_cart[1][1], gca_cart[1][2]) + _xyz_to_lonlat_rad_scalar( + gca_cart[1][0], gca_cart[1][1], gca_cart[1][2], normalize=False + ) ) gca_cart = np.asarray(gca_cart) @@ -312,9 +367,9 @@ def extreme_gca_latitude(gca_cart, extreme_type): or isclose(d_a_max, 1, atol=ERROR_TOLERANCE) else d_a_max ) - - _, lat_n1 = _xyz_to_lonlat_rad_no_norm(n1[0], n1[1], n1[2]) - _, lat_n2 = _xyz_to_lonlat_rad_no_norm(n2[0], n2[1], n2[2]) + # Before we make sure the grid coordinates are normalized, do not try to skip the normalization steps! + _, lat_n1 = _xyz_to_lonlat_rad_scalar(n1[0], n1[1], n1[2], normalize=True) + _, lat_n2 = _xyz_to_lonlat_rad_scalar(n2[0], n2[1], n2[2], normalize=True) if 0 < d_a_max < 1: node3 = (1 - d_a_max) * n1 + d_a_max * n2 diff --git a/uxarray/grid/coordinates.py b/uxarray/grid/coordinates.py index d34f0fc20..f25553321 100644 --- a/uxarray/grid/coordinates.py +++ b/uxarray/grid/coordinates.py @@ -68,6 +68,55 @@ def _xyz_to_lonlat_rad_no_norm( return lon, lat +@njit +def _xyz_to_lonlat_rad_scalar( + x: Union[np.ndarray, float], + y: Union[np.ndarray, float], + z: Union[np.ndarray, float], + normalize: bool = True, +): + """Converts a Cartesian x,y,z coordinates into Spherical latitude and + longitude without normalization, decorated with Numba. + + Parameters + ---------- + x : float + Cartesian x coordinate + y: float + Cartesiain y coordinate + z: float + Cartesian z coordinate + + + Returns + ------- + lon : float + Longitude in radians + lat: float + Latitude in radians + """ + + if normalize: + x, y, z = _normalize_xyz_scalar(x, y, z) + denom = np.abs(x * x + y * y + z * z) + x /= denom + y /= denom + z /= denom + + lon = math.atan2(y, x) + lat = math.asin(z) + + # set longitude range to [0, pi] + lon = np.mod(lon, 2 * np.pi) + + z_mask = np.abs(z) > 1.0 - ERROR_TOLERANCE + + lat = np.where(z_mask, np.sign(z) * np.pi / 2, lat) + lon = np.where(z_mask, 0.0, lon) + + return lon, lat + + def _xyz_to_lonlat_rad( x: Union[np.ndarray, float], y: Union[np.ndarray, float], @@ -103,8 +152,8 @@ def _xyz_to_lonlat_rad( y /= denom z /= denom - lon = np.arctan2(y, x, dtype=np.float64) - lat = np.arcsin(z, dtype=np.float64) + lon = np.arctan2(y, x) + lat = np.arcsin(z) # set longitude range to [0, pi] lon = np.mod(lon, 2 * np.pi) diff --git a/uxarray/grid/geometry.py b/uxarray/grid/geometry.py index 08229d1a0..ac3624e38 100644 --- a/uxarray/grid/geometry.py +++ b/uxarray/grid/geometry.py @@ -6,8 +6,7 @@ _get_cartesian_face_edge_nodes, _get_lonlat_rad_face_edge_nodes, ) - -from uxarray.utils.computing import isclose, allclose, all +from uxarray.utils.computing import allclose, isclose import warnings import pandas as pd import xarray as xr @@ -25,6 +24,57 @@ REFERENCE_POINT_EQUATOR = np.array([1.0, 0.0, 0.0]) +def _unique_points(points, tolerance=ERROR_TOLERANCE): + """Identify unique intersection points from a list of points, considering + floating point precision errors. + + Parameters + ---------- + points : list of array-like + A list containing the intersection points, where each point is an array-like structure (e.g., list, tuple, or numpy array) containing x, y, and z coordinates. + tolerance : float, optional + The distance threshold within which two points are considered identical. Default is 1e-6. + + Returns + ------- + list of np.ndarray + A list of unique snapped points in Cartesian coordinates. + + Notes + ----- + Given the nature of the mathematical equations and the spherical surface, it is more reasonable to calculate the "error radius" of the results using the following formula. + In the equation below, \(\tilde{P}_x\) and \(\tilde{P}_y\) are the calculated results, and \(P_x\) and \(P_y\) are the actual intersection points for the \(x\) and \(y\) coordinates, respectively. + The \(z\) coordinate is always the input \(z_0\), the constant latitude, so it is not included in this error calculation. + + .. math:: + \begin{aligned} + &\frac{\sqrt{(\tilde{v}_x - v_x)^2 + (\tilde{v}_y - v_y)^2 + (\tilde{v}_z - v_z)^2}}{\sqrt{v_x^2 + v_y^2 + v_z^2}}\\ + &= \sqrt{(\tilde{v}_x - v_x)^2 + (\tilde{v}_y - v_y)^2 + (\tilde{v}_z - v_z)^2} + \end{aligned} + + This method ensures that small numerical inaccuracies do not lead to multiple close points being considered different. + """ + unique_points = [] + points = [np.array(point) for point in points] # Ensure all points are numpy arrays + + def error_radius(p1, p2): + """Calculate the error radius between two points in 3D space.""" + numerator = np.sqrt( + (p1[0] - p2[0]) ** 2 + (p1[1] - p2[1]) ** 2 + (p1[2] - p2[2]) ** 2 + ) + denominator = np.sqrt(p2[0] ** 2 + p2[1] ** 2 + p2[2] ** 2) + return numerator / denominator + + for point in points: + if not any( + error_radius(point, unique_point) < tolerance + for unique_point in unique_points + ): + unique_points.append(point) + + return unique_points + + # General Helpers for Polygon Viz # ---------------------------------------------------------------------------------------------------------------------- @njit @@ -433,17 +483,27 @@ def _pole_point_inside_polygon(pole, face_edge_cart): ref_edge_south = np.array([-pole_point, REFERENCE_POINT_EQUATOR]) north_edges = face_edge_cart[np.any(face_edge_cart[:, :, 2] > 0, axis=1)] - south_edges = face_edge_cart[np.any(face_edge_cart[:, :, 2] < 0, axis=1)] - + south_edges = face_edge_cart[ + ~np.isin(face_edge_cart, north_edges).all(axis=(1, 2)) + ] + # The equator one is assigned to the south edges return ( _check_intersection(ref_edge_north, north_edges) + _check_intersection(ref_edge_south, south_edges) ) % 2 != 0 + elif ( + location == "North" + and pole == "South" + or location == "South" + and pole == "North" + ): + return False else: - warnings.warn( - "The given face should not contain both pole points.", UserWarning + raise ValueError( + "Invalid pole point query. Current location: {}, query pole point: {}".format( + location, pole + ) ) - return False def _check_intersection(ref_edge, edges): @@ -463,17 +523,36 @@ def _check_intersection(ref_edge, edges): Count of intersections. """ pole_point, ref_point = ref_edge - intersection_count = 0 + intersection_points = [] for edge in edges: intersection_point = gca_gca_intersection(ref_edge, edge) if intersection_point.size != 0: - if allclose(intersection_point, pole_point, atol=ERROR_TOLERANCE): - return True - intersection_count += 1 + # Handle both single point and multiple points case + if ( + intersection_point.ndim == 1 + ): # If there's only one point, make it a 2D array + intersection_point = [intersection_point] # Convert to list of points + + # for each intersection point, check if it is a pole point + for point in intersection_point: + if allclose(point, pole_point, atol=ERROR_TOLERANCE): + return True + intersection_points.append(point) - return intersection_count + # Only return the unique intersection points, the unique tolerance is set to ERROR_TOLERANCE + intersection_points = _unique_points(intersection_points, tolerance=ERROR_TOLERANCE) + + # If the unique intersection point is one and it is exactly one of the nodes of the face, return 0 + if len(intersection_points) == 1: + for edge in edges: + if allclose( + intersection_points[0], edge[0], atol=ERROR_TOLERANCE + ) or allclose(intersection_points[0], edge[1], atol=ERROR_TOLERANCE): + return 0 + + return len(intersection_points) def _classify_polygon_location(face_edge_cart): @@ -490,9 +569,9 @@ def _classify_polygon_location(face_edge_cart): Returns either 'North', 'South' or 'Equator' based on the polygon's location. """ z_coords = face_edge_cart[:, :, 2] - if all(z_coords > 0): + if np.all(z_coords > 0): return "North" - elif all(z_coords < 0): + elif np.all(z_coords < 0): return "South" else: return "Equator" @@ -586,7 +665,7 @@ def _insert_pt_in_latlonbox(old_box, new_pt, is_lon_periodic=True): >>> _insert_pt_in_latlonbox(np.array([[1.0, 2.0], [3.0, 4.0]]),np.array([1.5, 3.5])) array([[1.0, 2.0], [3.0, 4.0]]) """ - if all(new_pt == INT_FILL_VALUE): + if np.all(new_pt == INT_FILL_VALUE): return old_box latlon_box = np.copy(old_box) # Create a copy of the old box @@ -614,8 +693,9 @@ def _insert_pt_in_latlonbox(old_box, new_pt, is_lon_periodic=True): # Check for pole points and update latitudes is_pole_point = ( lon_pt == INT_FILL_VALUE - and isclose(new_pt[0], 0.5 * np.pi, atol=ERROR_TOLERANCE) - or isclose(new_pt[0], -0.5 * np.pi, atol=ERROR_TOLERANCE) + and isclose( + new_pt[0], np.asarray([0.5 * np.pi, -0.5 * np.pi]), atol=ERROR_TOLERANCE + ).any() ) if is_pole_point: @@ -938,7 +1018,7 @@ def _populate_bounds( intervals_tuple_list = [] intervals_name_list = [] - face_edges_cartesian = _get_cartesian_face_edge_nodes( + faces_edges_cartesian = _get_cartesian_face_edge_nodes( grid.face_node_connectivity.values, grid.n_face, grid.n_max_face_edges, @@ -947,7 +1027,7 @@ def _populate_bounds( grid.node_z.values, ) - face_edges_lonlat_rad = _get_lonlat_rad_face_edge_nodes( + faces_edges_lonlat_rad = _get_lonlat_rad_face_edge_nodes( grid.face_node_connectivity.values, grid.n_face, grid.n_max_face_edges, @@ -955,42 +1035,34 @@ def _populate_bounds( grid.node_lat.values, ) - face_node_connectivity = grid.face_node_connectivity.values - - # # TODO: vectorize dummy face check - s = face_edges_cartesian.shape - dummy_face_face_edges_cart = np.any( - face_edges_cartesian.reshape((s[0], s[1] * s[2] * s[3])) == INT_FILL_VALUE, - axis=1, - ) + for face_idx, face_nodes in enumerate(grid.face_node_connectivity): + face_edges_cartesian = faces_edges_cartesian[face_idx] - s = face_edges_lonlat_rad.shape - dummy_face_face_edges_latlon = np.any( - face_edges_lonlat_rad.reshape((s[0], s[1] * s[2] * s[3])) == INT_FILL_VALUE, - axis=1, - ) + # Remove the edge in the face that contains the fill value + face_edges_cartesian = face_edges_cartesian[ + np.all(face_edges_cartesian != INT_FILL_VALUE, axis=(1, 2)) + ] - dummy_faces = dummy_face_face_edges_cart | dummy_face_face_edges_latlon + face_edges_lonlat_rad = faces_edges_lonlat_rad[face_idx] - for face_idx, face_nodes in enumerate(face_node_connectivity): - if dummy_faces[face_idx]: - # skip dummy faces - continue + # Remove the edge in the face that contains the fill value + face_edges_lonlat_rad = face_edges_lonlat_rad[ + np.all(face_edges_lonlat_rad != INT_FILL_VALUE, axis=(1, 2)) + ] is_GCA_list = ( is_face_GCA_list[face_idx] if is_face_GCA_list is not None else None ) temp_latlon_array[face_idx] = _populate_face_latlon_bound( - face_edges_cartesian[face_idx], - face_edges_lonlat_rad[face_idx], + face_edges_cartesian, + face_edges_lonlat_rad, is_latlonface=is_latlonface, is_GCA_list=is_GCA_list, ) - # # do we need this ? - # assert temp_latlon_array[face_idx][0][0] != temp_latlon_array[face_idx][0][1] - # assert temp_latlon_array[face_idx][1][0] != temp_latlon_array[face_idx][1][1] + assert temp_latlon_array[face_idx][0][0] != temp_latlon_array[face_idx][0][1] + assert temp_latlon_array[face_idx][1][0] != temp_latlon_array[face_idx][1][1] lat_array = temp_latlon_array[face_idx][0] # Now store the latitude intervals in the tuples @@ -1010,7 +1082,7 @@ def _populate_bounds( attrs={ "cf_role": "face_latlon_bounds", "_FillValue": INT_FILL_VALUE, - "long_name": "Latitude and Longitude bounds for each face in radians.", + "long_name": "Provides the latitude and longitude bounds for each face in radians.", "start_index": INT_DTYPE(0), "latitude_intervalsIndex": intervalsIndex, "latitude_intervals_name_map": df_intervals_map, @@ -1021,3 +1093,12 @@ def _populate_bounds( return bounds else: grid._ds["bounds"] = bounds + + +def _construct_hole_edge_indices(edge_face_connectivity): + """Index the missing edges on a partial grid with holes, that is a region + of the grid that is not covered by any geometry.""" + + # If an edge only has one face saddling it than the mesh has holes in it + edge_with_holes = np.where(edge_face_connectivity[:, 1] == INT_FILL_VALUE)[0] + return edge_with_holes diff --git a/uxarray/grid/grid.py b/uxarray/grid/grid.py index 97b9f5d1f..b1ff3e43c 100644 --- a/uxarray/grid/grid.py +++ b/uxarray/grid/grid.py @@ -54,6 +54,7 @@ _grid_to_matplotlib_polycollection, _grid_to_matplotlib_linecollection, _populate_bounds, + _construct_hole_edge_indices, ) from uxarray.grid.neighbors import ( @@ -1085,6 +1086,16 @@ def face_jacobian(self): _ = self.face_areas return self._face_jacobian + @property + def hole_edge_indices(self): + """Indices of edges that border a region of the grid not covered by any + geometry, know as a hole, in a partial grid.""" + if "hole_edge_indices" not in self._ds: + self._ds["hole_edge_indices"] = _construct_hole_edge_indices( + self.edge_face_connectivity.values + ) + return self._ds["hole_edge_indices"] + def chunk(self, n_node="auto", n_edge="auto", n_face="auto"): """Converts all arrays to dask arrays with given chunks across grid dimensions in-place. diff --git a/uxarray/grid/integrate.py b/uxarray/grid/integrate.py index 16cc13194..a562fc2ca 100644 --- a/uxarray/grid/integrate.py +++ b/uxarray/grid/integrate.py @@ -4,13 +4,15 @@ from uxarray.grid.coordinates import _xyz_to_lonlat_rad import pandas as pd +from uxarray.utils.computing import isclose + DUMMY_EDGE_VALUE = [INT_FILL_VALUE, INT_FILL_VALUE, INT_FILL_VALUE] def _get_zonal_faces_weight_at_constLat( - faces_edges_cart, + faces_edges_cart_candidate, latitude_cart, - face_latlon_bound, + face_latlon_bound_candidate, is_directed=False, is_latlonface=False, is_face_GCA_list=None, @@ -20,14 +22,19 @@ def _get_zonal_faces_weight_at_constLat( Parameters ---------- - face_edges_cart : np.ndarray - A list of face polygon represented by edges in Cartesian coordinates. Shape: (n_faces, n_edges, 2, 3) + faces_edges_cart_candidate : np.ndarray + A list of the candidate face polygon represented by edges in Cartesian coordinates. + The faces must be selected in the previous step such that they will be intersected by the constant latitude. + It should have the same shape as the face_latlon_bound_candidate. + The input should not contain any 'INT_FILL_VALUE'. Shape: (n_faces(candidate), n_edges, 2, 3) latitude_cart : float The latitude in Cartesian coordinates (The normalized z coordinate) - face_latlon_bound : np.ndarray - The list of latitude and longitude bounds of faces. Shape: (n_faces,2, 2), + face_latlon_bound_candidate : np.ndarray + The list of latitude and longitude bounds of candidate faces. + It should have the same shape as the face_edges_cart_candidate. + Shape: (n_faces(candidate),,2, 2), [...,[lat_min, lat_max], [lon_min, lon_max],...] is_directed : bool, optional (default=False) @@ -52,11 +59,43 @@ def _get_zonal_faces_weight_at_constLat( - 'face_index': The index of the face (integer). - 'weight': The calculated weight of the face in radian (float). The DataFrame is indexed by the face indices, providing a mapping from each face to its corresponding weight. + + Notes + ----- + Special handling is implemented for the cases when the latitude_cart is close to 1 or -1, + which corresponds to the poles (90 and -90 degrees). In these cases, if a pole point is + inside a face, that face's value is the only value that should be considered. If the pole + point is not inside any face, it lies on the boundary of surrounding faces, and their weights + are considered evenly since they only contain points rather than intervals. + This treatment is hard-coded in the function and should be tested with appropriate test cases. """ + + # Special case if the latitude_cart is 1 or -1, meaning right at the pole + # If the latitude_cart is close to 1 or -1 (indicating the pole), handle it separately. + # The -90 and 90 treatment is hard-coded in the function, based on: + # If a pole point is inside a face, then this face's value is the only value that should be considered. + # If the pole point is not inside any face, then it's on the boundary of faces around it, so their weights are even. + if isclose(latitude_cart, 1, atol=ERROR_TOLERANCE) or isclose( + latitude_cart, -1, atol=ERROR_TOLERANCE + ): + # Now all candidate faces( the faces around the pole) are considered as the same weight + # If the face encompases the pole, then the weight is 1 + weights = { + face_index: 1 / len(faces_edges_cart_candidate) + for face_index in range(len(faces_edges_cart_candidate)) + } + # Convert weights to DataFrame + weights_df = pd.DataFrame( + list(weights.items()), columns=["face_index", "weight"] + ) + return weights_df + intervals_list = [] # Iterate through all faces and their edges - for face_index, face_edges in enumerate(faces_edges_cart): + for face_index, face_edges in enumerate(faces_edges_cart_candidate): + # Remove the Int_fill_value from the face_edges + face_edges = face_edges[np.all(face_edges != INT_FILL_VALUE, axis=(1, 2))] if is_face_GCA_list is not None: is_GCA_list = is_face_GCA_list[face_index] else: @@ -64,28 +103,55 @@ def _get_zonal_faces_weight_at_constLat( face_interval_df = _get_zonal_face_interval( face_edges, latitude_cart, - face_latlon_bound[face_index], + face_latlon_bound_candidate[face_index], is_directed=is_directed, is_latlonface=is_latlonface, is_GCA_list=is_GCA_list, ) - for _, row in face_interval_df.iterrows(): - intervals_list.append( - {"start": row["start"], "end": row["end"], "face_index": face_index} - ) + # If any end of the interval is NaN + if face_interval_df.isnull().values.any(): + # Skip this face as it is just being touched by the constant latitude + continue + # Check if the DataFrame is empty (start and end are both 0) + if (face_interval_df["start"] == 0).all() and ( + face_interval_df["end"] == 0 + ).all(): + # Skip this face as it is just being touched by the constant latitude + continue + else: + for _, row in face_interval_df.iterrows(): + intervals_list.append( + {"start": row["start"], "end": row["end"], "face_index": face_index} + ) intervals_df = pd.DataFrame(intervals_list) - overlap_contributions, total_length = _process_overlapped_intervals(intervals_df) + try: + overlap_contributions, total_length = _process_overlapped_intervals( + intervals_df + ) - # Calculate weights for each face - weights = { - face_index: overlap_contributions.get(face_index, 0.0) / total_length - for face_index in range(len(faces_edges_cart)) - } + # Calculate weights for each face + weights = { + face_index: overlap_contributions.get(face_index, 0.0) / total_length + for face_index in range(len(faces_edges_cart_candidate)) + } - # Convert weights to DataFrame - weights_df = pd.DataFrame(list(weights.items()), columns=["face_index", "weight"]) - return weights_df + # Convert weights to DataFrame + weights_df = pd.DataFrame( + list(weights.items()), columns=["face_index", "weight"] + ) + return weights_df + + except ValueError: + print(f"Face index: {face_index}") + print(f"Face edges information: {face_edges}") + print(f"Constant z0: {latitude_cart}") + print( + f"Face latlon bound information: {face_latlon_bound_candidate[face_index]}" + ) + print(f"Face interval information: {face_interval_df}") + # Handle the exception or propagate it further if necessary + raise def _is_edge_gca(is_GCA_list, is_latlonface, edges_z): @@ -149,8 +215,8 @@ def _get_faces_constLat_intersection_info( tuple A tuple containing: - intersections_pts_list_cart (list): A list of intersection points where each point is where an edge intersects with the latitude. - - overlap_flag (bool): A boolean indicating if any overlap with the latitude was detected. - - overlap_edge (np.ndarray or None): The edge that overlaps with the latitude, if any; otherwise, None. + - pt_lon_min (float): The min longnitude of the interseted intercal in radian if any; otherwise, None.. + - pt_lon_max (float): The max longnitude of the interseted intercal in radian, if any; otherwise, None. """ valid_edges_mask = ~(np.any(face_edges_cart == DUMMY_EDGE_VALUE, axis=(1, 2))) @@ -181,6 +247,7 @@ def _get_faces_constLat_intersection_info( intersections = gca_constLat_intersection( edge, latitude_cart, is_directed=is_directed ) + if intersections.size == 0: continue elif intersections.shape[0] == 2: @@ -188,8 +255,9 @@ def _get_faces_constLat_intersection_info( else: intersections_pts_list_cart.append(intersections[0]) - # Handle unique intersections and check for convex or concave cases + # Find the unique intersection points unique_intersections = np.unique(intersections_pts_list_cart, axis=0) + if len(unique_intersections) == 2: # TODO: vectorize? unique_intersection_lonlat = np.array( @@ -199,10 +267,28 @@ def _get_faces_constLat_intersection_info( sorted_lonlat = np.sort(unique_intersection_lonlat, axis=0) pt_lon_min, pt_lon_max = sorted_lonlat[:, 0] return unique_intersections, pt_lon_min, pt_lon_max - elif len(unique_intersections) != 0: - raise ValueError( - "UXarray doesn't support concave face with intersections points as currently, please modify your grids accordingly" - ) + elif len(unique_intersections) == 1: + return unique_intersections, None, None + elif len(unique_intersections) != 0 and len(unique_intersections) != 1: + # If the unique intersections numbers is larger than n_edges * 2, then it means the face is concave + if len(unique_intersections) > len(valid_edges) * 2: + raise ValueError( + "UXarray doesn't support concave face with intersections points as currently, please modify your grids accordingly" + ) + else: + # Now return all the intersections points and the pt_lon_min, pt_lon_max + unique_intersection_lonlat = np.array( + [_xyz_to_lonlat_rad(pt[0], pt[1], pt[2]) for pt in unique_intersections] + ) + + sorted_lonlat = np.sort(unique_intersection_lonlat, axis=0) + # Extract the minimum and maximum longitudes + pt_lon_min, pt_lon_max = ( + np.min(sorted_lonlat[:, 0]), + np.max(sorted_lonlat[:, 0]), + ) + + return unique_intersections, pt_lon_min, pt_lon_max elif len(unique_intersections) == 0: raise ValueError( "No intersections are found for the face, please make sure the build_latlon_box generates the correct results" @@ -257,40 +343,87 @@ def _get_zonal_face_interval( face_lon_bound_left, face_lon_bound_right = face_latlon_bound[1] # Call the vectorized function to process all edges - ( - unique_intersections, - pt_lon_min, - pt_lon_max, - ) = _get_faces_constLat_intersection_info( - face_edges_cart, latitude_cart, is_GCA_list, is_latlonface, is_directed - ) + try: + # Call the vectorized function to process all edges + unique_intersections, pt_lon_min, pt_lon_max = ( + _get_faces_constLat_intersection_info( + face_edges_cart, latitude_cart, is_GCA_list, is_latlonface, is_directed + ) + ) - # Convert intersection points to longitude-latitude - longitudes = np.array( - [_xyz_to_lonlat_rad(*pt.tolist())[0] for pt in unique_intersections] - ) + # Handle the special case where the unique_intersections is 1, which means the face is just being touched + if len(unique_intersections) == 1: + # If the face is just being touched, then just return the empty DataFrame + return pd.DataFrame({"start": [0.0], "end": [0.0]}, index=[0]) - # Handle special wrap-around cases by checking the face bounds - if face_lon_bound_left >= face_lon_bound_right: - if not ( - (pt_lon_max >= np.pi and pt_lon_min >= np.pi) - or (0 <= pt_lon_max <= np.pi and 0 <= pt_lon_min <= np.pi) + # Convert intersection points to longitude-latitude + longitudes = np.array( + [_xyz_to_lonlat_rad(*pt.tolist())[0] for pt in unique_intersections] + ) + + # Handle special wrap-around cases by checking the face bounds + if face_lon_bound_left >= face_lon_bound_right or ( + face_lon_bound_left == 0 and face_lon_bound_right == 2 * np.pi + ): + if not ( + (pt_lon_max >= np.pi and pt_lon_min >= np.pi) + or (0 <= pt_lon_max <= np.pi and 0 <= pt_lon_min <= np.pi) + ): + # If the anti-meridian is crossed, instead of just being touched,add the wrap-around points + if pt_lon_max != 2 * np.pi and pt_lon_min != 0: + # They're at different sides of the 0-lon, adding wrap-around points + longitudes = np.append(longitudes, [0.0, 2 * np.pi]) + elif pt_lon_max >= np.pi and pt_lon_min == 0: + # That means the face is actually from pt_lon_max to 2*pi. + # Replace the 0 in longnitude with 2*pi + longitudes[longitudes == 0] = 2 * np.pi + + # Ensure longitudes are sorted + longitudes = np.unique(longitudes) + longitudes.sort() + + # Split the sorted longitudes into start and end points of intervals + starts = longitudes[::2] # Start points + ends = longitudes[1::2] # End points + + # Create the intervals DataFrame + Intervals_df = pd.DataFrame({"start": starts, "end": ends}) + # For consistency, sort the intervals by start + interval_df_sorted = Intervals_df.sort_values(by="start").reset_index(drop=True) + return interval_df_sorted + + except ValueError as e: + default_print_options = np.get_printoptions() + if ( + str(e) + == "No intersections are found for the face, please make sure the build_latlon_box generates the correct results" ): - # They're at different sides of the 0-lon, adding wrap-around points - longitudes = np.append(longitudes, [0.0, 2 * np.pi]) + # Set print options for full precision + np.set_printoptions(precision=16, suppress=False) - # Ensure longitudes are sorted - longitudes.sort() + print( + "ValueError: No intersections are found for the face, please make sure the build_latlon_box generates the correct results" + ) + print(f"Face edges information: {face_edges_cart}") + print(f"Constant z_0: {latitude_cart}") + print(f"Face latlon bound information: {face_latlon_bound}") - # Split the sorted longitudes into start and end points of intervals - starts = longitudes[::2] # Start points - ends = longitudes[1::2] # End points + # Reset print options to default + np.set_printoptions(**default_print_options) + + raise + else: + # Set print options for full precision + np.set_printoptions(precision=17, suppress=False) - # Create the intervals DataFrame - Intervals_df = pd.DataFrame({"start": starts, "end": ends}) - # For consistency, sort the intervals by start - interval_df_sorted = Intervals_df.sort_values(by="start").reset_index(drop=True) - return interval_df_sorted + print(f"Face edges information: {face_edges_cart}") + print(f"Constant z_0: {latitude_cart}") + print(f"Face latlon bound information: {face_latlon_bound}") + + # Reset print options to default + np.set_printoptions(**default_print_options) + + raise # Re-raise the exception if it's not the expected ValueError def _process_overlapped_intervals(intervals_df): @@ -327,7 +460,7 @@ def _process_overlapped_intervals(intervals_df): events.append((row["start"], "start", row["face_index"])) events.append((row["end"], "end", row["face_index"])) - events.sort() # Sort by position and then by start/end + events.sort(key=lambda x: (x[0], x[1])) active_faces = set() last_position = None @@ -335,6 +468,8 @@ def _process_overlapped_intervals(intervals_df): overlap_contributions = {} for position, event_type, face_idx in events: + if face_idx == 51: + pass if last_position is not None and active_faces: segment_length = position - last_position segment_weight = segment_length / len(active_faces) if active_faces else 0 @@ -345,9 +480,28 @@ def _process_overlapped_intervals(intervals_df): total_length += segment_length if event_type == "start": - active_faces.add(face_idx) + # use try catch to handle the case where the face_idx is not be able to be added + try: + active_faces.add(face_idx) + except Exception as e: + print(f"An error occurred: {e}") + print(f"Face index: {face_idx}") + print(f"Position: {position}") + print(f"Event type: {event_type}") + print(f"Active faces: {active_faces}") + print(f"Last position: {last_position}") + print(f"Total length: {total_length}") + print(f"Overlap contributions: {overlap_contributions}") + print(f"Intervals data: {intervals_df}") + raise + elif event_type == "end": - active_faces.remove(face_idx) + if face_idx in active_faces: + active_faces.remove(face_idx) + else: + raise ValueError( + f"Error: Trying to remove face_idx {face_idx} which is not in active_faces" + ) last_position = position diff --git a/uxarray/grid/intersections.py b/uxarray/grid/intersections.py index d57969db5..81829a68a 100644 --- a/uxarray/grid/intersections.py +++ b/uxarray/grid/intersections.py @@ -1,15 +1,13 @@ import numpy as np -from uxarray.constants import ERROR_TOLERANCE +from uxarray.constants import MACHINE_EPSILON, ERROR_TOLERANCE from uxarray.grid.utils import _newton_raphson_solver_for_gca_constLat -from uxarray.grid.arcs import point_within_gca +from uxarray.grid.arcs import point_within_gca, extreme_gca_latitude, in_between import platform import warnings -from uxarray.utils.computing import cross_fma, allclose, cross, dot +from uxarray.utils.computing import cross_fma, allclose, dot, cross, norm -import uxarray.constants - -def gca_gca_intersection(gca1_cart, gca2_cart): +def gca_gca_intersection(gca1_cart, gca2_cart, fma_disabled=True): """Calculate the intersection point(s) of two Great Circle Arcs (GCAs) in a Cartesian coordinate system. @@ -29,21 +27,13 @@ def gca_gca_intersection(gca1_cart, gca2_cart): Returns ------- np.ndarray - Cartesian coordinates of the intersection point(s). + Cartesian coordinates of the intersection point(s). Returns an empty array if no intersections, + a 2D array with one row if one intersection, and a 2D array with two rows if two intersections. Raises ------ ValueError If the input GCAs are not in the cartesian [x, y, z] format. - - - - Warning - ------- - If the current input data cannot be computed accurately using floating-point arithmetic. Use with care - - If running on the Windows system with fma_disabled=False since the C/C++ implementation of FMA in MS Windows - is fundamentally broken. (bug report: https://bugs.python.org/msg312480) """ # Support lists as an input @@ -56,13 +46,12 @@ def gca_gca_intersection(gca1_cart, gca2_cart): w0, w1 = gca1_cart v0, v1 = gca2_cart - if not uxarray.constants.ENABLE_FMA: - # Compute normals and orthogonal bases without FMA + # Compute normals and orthogonal bases using FMA + if fma_disabled: w0w1_norm = cross(w0, w1) v0v1_norm = cross(v0, v1) cross_norms = cross(w0w1_norm, v0v1_norm) else: - # Compute normals and orthogonal bases using FMA w0w1_norm = cross_fma(w0, w1) v0v1_norm = cross_fma(v0, v1) cross_norms = cross_fma(w0w1_norm, v0v1_norm) @@ -76,46 +65,54 @@ def gca_gca_intersection(gca1_cart, gca2_cart): ) # Check perpendicularity conditions and floating-point arithmetic limitations - if not allclose(dot(w0w1_norm, w0), 0, atol=ERROR_TOLERANCE) or not allclose( - dot(w0w1_norm, w1), 0, atol=ERROR_TOLERANCE + if not allclose(dot(w0w1_norm, w0), 0.0, atol=MACHINE_EPSILON) or not allclose( + dot(w0w1_norm, w1), 0.0, atol=MACHINE_EPSILON ): warnings.warn( "The current input data cannot be computed accurately using floating-point arithmetic. Use with caution." ) - if not allclose(dot(v0v1_norm, v0), 0, atol=ERROR_TOLERANCE) or not allclose( - dot(v0v1_norm, v1), 0, atol=ERROR_TOLERANCE + if not allclose(dot(v0v1_norm, v0), 0.0, 0.0, atol=MACHINE_EPSILON) or not allclose( + dot(v0v1_norm, v1), 0.0, atol=MACHINE_EPSILON ): warnings.warn( "The current input data cannot be computed accurately using floating-point arithmetic. Use with caution. " ) if not allclose( - dot(cross_norms, v0v1_norm), 0, atol=ERROR_TOLERANCE - ) or not allclose(dot(cross_norms, w0w1_norm), 0, atol=ERROR_TOLERANCE): + dot(cross_norms, v0v1_norm), 0.0, atol=MACHINE_EPSILON + ) or not allclose(dot(cross_norms, w0w1_norm), 0.0, atol=MACHINE_EPSILON): warnings.warn( "The current input data cannot be computed accurately using floating-point arithmetic. Use with caution. " ) # If the cross_norms is zero, the two GCAs are parallel - if allclose(cross_norms, 0, atol=ERROR_TOLERANCE): - return np.array([]) + if allclose(cross_norms, 0.0, atol=MACHINE_EPSILON): + res = [] + # Check if the two GCAs are overlapping + if point_within_gca(v0, [w0, w1]): + # The two GCAs are overlapping, return both end points + res.append(v0) + + if point_within_gca(v1, [w0, w1]): + res.append(v1) + return np.array(res) # Normalize the cross_norms - cross_norms = cross_norms / np.linalg.norm(cross_norms) + cross_norms = cross_norms / norm(cross_norms) x1 = cross_norms x2 = -x1 - res = np.array([]) + res = [] # Determine which intersection point is within the GCAs range if point_within_gca(x1, [w0, w1]) and point_within_gca(x1, [v0, v1]): - res = np.append(res, x1) + res.append(x1) - elif point_within_gca(x2, [w0, w1]) and point_within_gca(x2, [v0, v1]): - res = np.append(res, x2) + if point_within_gca(x2, [w0, w1]) and point_within_gca(x2, [v0, v1]): + res.append(x2) - return res + return np.array(res) def gca_constLat_intersection( @@ -155,9 +152,37 @@ def gca_constLat_intersection( ------- If running on the Windows system with fma_disabled=False since the C/C++ implementation of FMA in MS Windows is fundamentally broken. (bug report: https://bugs.python.org/msg312480) + + If the intersection point cannot be converged using the Newton-Raphson method, the initial guess intersection + point is used instead, proceed with caution. """ x1, x2 = gca_cart + # Check if the constant latitude has the same latitude as the GCA endpoints + # We are using the relative tolerance and ERROR_TOLERANCE since the constZ is calculated from np.sin, which + # may have some floating-point error. + res = None + if np.isclose(x1[2], constZ, rtol=ERROR_TOLERANCE, atol=ERROR_TOLERANCE): + res = np.array([x1]) if res is None else np.vstack((res, x1)) + + if np.isclose(x2[2], constZ, rtol=ERROR_TOLERANCE, atol=ERROR_TOLERANCE): + res = np.array([x2]) if res is None else np.vstack((res, x2)) + + if res is not None: + return res + + # If the constant latitude is not the same as the GCA endpoints, calculate the intersection point + lat_min = extreme_gca_latitude(gca_cart, extreme_type="min") + lat_max = extreme_gca_latitude(gca_cart, extreme_type="max") + + constLat_rad = np.arcsin(constZ) + + # Check if the constant latitude is within the GCA range + # Because the constant latitude is calculated from np.sin, which may have some floating-point error, + if not in_between(lat_min, constLat_rad, lat_max): + pass + return np.array([]) + if fma_disabled: n = cross(x1, x2) @@ -173,7 +198,7 @@ def gca_constLat_intersection( nx, ny, nz = n - s_tilde = np.sqrt(nx**2 + ny**2 - np.linalg.norm(n) ** 2 * constZ**2) + s_tilde = np.sqrt(nx**2 + ny**2 - (nx**2 + ny**2 + nz**2) * constZ**2) p1_x = -(1.0 / (nx**2 + ny**2)) * (constZ * nx * nz + s_tilde * ny) p2_x = -(1.0 / (nx**2 + ny**2)) * (constZ * nx * nz - s_tilde * ny) p1_y = -(1.0 / (nx**2 + ny**2)) * (constZ * ny * nz - s_tilde * nx) @@ -186,19 +211,46 @@ def gca_constLat_intersection( # Now test which intersection point is within the GCA range if point_within_gca(p1, gca_cart, is_directed=is_directed): - converged_pt = _newton_raphson_solver_for_gca_constLat( - p1, gca_cart, verbose=verbose - ) - res = ( - np.array([converged_pt]) if res is None else np.vstack((res, converged_pt)) - ) + try: + converged_pt = _newton_raphson_solver_for_gca_constLat( + p1, gca_cart, verbose=verbose + ) + + if converged_pt is None: + # The point is not be able to be converged using the jacobi method, raise a warning and continue with p2 + warnings.warn( + "The intersection point cannot be converged using the Newton-Raphson method. " + "The initial guess intersection point is used instead, procced with caution." + ) + res = np.array([p1]) if res is None else np.vstack((res, p1)) + else: + res = ( + np.array([converged_pt]) + if res is None + else np.vstack((res, converged_pt)) + ) + except RuntimeError: + raise RuntimeError(f"Error encountered with initial guess: {p1}") if point_within_gca(p2, gca_cart, is_directed=is_directed): - converged_pt = _newton_raphson_solver_for_gca_constLat( - p2, gca_cart, verbose=verbose - ) - res = ( - np.array([converged_pt]) if res is None else np.vstack((res, converged_pt)) - ) + try: + converged_pt = _newton_raphson_solver_for_gca_constLat( + p2, gca_cart, verbose=verbose + ) + if converged_pt is None: + # The point is not be able to be converged using the jacobi method, raise a warning and continue with p2 + warnings.warn( + "The intersection point cannot be converged using the Newton-Raphson method. " + "The initial guess intersection point is used instead, procced with caution." + ) + res = np.array([p2]) if res is None else np.vstack((res, p2)) + else: + res = ( + np.array([converged_pt]) + if res is None + else np.vstack((res, converged_pt)) + ) + except RuntimeError: + raise RuntimeError(f"Error encountered with initial guess: {p2}") return res if res is not None else np.array([]) diff --git a/uxarray/grid/utils.py b/uxarray/grid/utils.py index 020aae1d9..33f9f75da 100644 --- a/uxarray/grid/utils.py +++ b/uxarray/grid/utils.py @@ -1,5 +1,5 @@ import numpy as np -from uxarray.constants import ERROR_TOLERANCE, INT_FILL_VALUE +from uxarray.constants import INT_FILL_VALUE, MACHINE_EPSILON import warnings import uxarray.utils.computing as ac_utils @@ -116,11 +116,22 @@ def _inv_jacobian(x0, x1, y0, y1, z0, z1, x_i_old, y_i_old): [ac_utils._fmms(y0, z1, z0, y1), ac_utils._fmms(x0, z1, z0, x1)], [2 * x_i_old, 2 * y_i_old], ] + + # First check if the Jacobian matrix is singular + if np.linalg.matrix_rank(jacobian) < 2: + warnings.warn("The Jacobian matrix is singular.") + return None + try: inverse_jacobian = np.linalg.inv(jacobian) - except np.linalg.LinAlgError: - raise warnings("Warning: Singular Jacobian matrix encountered.") - return None + except np.linalg.LinAlgError as e: + # Print out the error message + + cond_number = np.linalg.cond(jacobian) + print(f"Condition number: {cond_number}") + print(f"Jacobian matrix:\n{jacobian}") + print(f"An error occurred: {e}") + raise return inverse_jacobian @@ -141,7 +152,7 @@ def _newton_raphson_solver_for_gca_constLat( Returns: np.ndarray or None: The intersection point or None if the solver fails to converge. """ - tolerance = ERROR_TOLERANCE + tolerance = MACHINE_EPSILON * 100 w0_cart, w1_cart = gca_cart error = float("inf") constZ = init_cart[2] @@ -164,19 +175,23 @@ def _newton_raphson_solver_for_gca_constLat( ] ) - j_inv = _inv_jacobian( - w0_cart[0], - w1_cart[0], - w0_cart[1], - w1_cart[1], - w0_cart[2], - w1_cart[2], - y_guess[0], - y_guess[1], - ) + try: + j_inv = _inv_jacobian( + w0_cart[0], + w1_cart[0], + w0_cart[1], + w1_cart[1], + w0_cart[2], + w1_cart[2], + y_guess[0], + y_guess[1], + ) - if j_inv is None: - return None + if j_inv is None: + return None + except RuntimeError as e: + print(f"Encountered an error: {e}") + raise y_new = y_guess - np.matmul(j_inv, f_vector) error = np.max(np.abs(y_guess - y_new)) diff --git a/uxarray/grid/validation.py b/uxarray/grid/validation.py index 1a4852711..4f74249a5 100644 --- a/uxarray/grid/validation.py +++ b/uxarray/grid/validation.py @@ -1,6 +1,7 @@ import numpy as np from warnings import warn + from uxarray.constants import ERROR_TOLERANCE diff --git a/uxarray/io/_mpas.py b/uxarray/io/_mpas.py index af155967e..d9f83142b 100644 --- a/uxarray/io/_mpas.py +++ b/uxarray/io/_mpas.py @@ -59,6 +59,9 @@ def _primal_to_ugrid(in_ds, out_ds): if "cellsOnCell" in in_ds: _parse_face_faces(in_ds, out_ds) + if "areaCell" in in_ds: + _parse_face_areas(in_ds, out_ds, mesh_type="primal") + # set global attributes _parse_global_attrs(in_ds, out_ds) @@ -121,6 +124,9 @@ def _dual_to_ugrid(in_ds, out_ds): if "dcEdge" in in_ds: _parse_edge_face_distances(in_ds, out_ds) + if "areaTriangle" in in_ds: + _parse_face_areas(in_ds, out_ds, mesh_type="dual") + # set global attributes _parse_global_attrs(in_ds, out_ds) @@ -489,6 +495,21 @@ def _parse_face_faces(in_ds, out_ds): ) +def _parse_face_areas(in_ds, out_ds, mesh_type): + """Parses the face area for either a primal or dual grid.""" + + if mesh_type == "primal": + face_area = in_ds["areaCell"].data + else: + face_area = in_ds["areaTriangle"].data + + out_ds["face_areas"] = xr.DataArray( + data=face_area, + dims=descriptors.FACE_AREAS_DIMS, + attrs=descriptors.FACE_AREAS_ATTRS, + ) + + def _replace_padding(verticesOnCell, nEdgesOnCell): """Replaces the padded values in verticesOnCell defined by nEdgesOnCell with a fill-value. diff --git a/uxarray/remap/dataarray_accessor.py b/uxarray/remap/dataarray_accessor.py index 4a5f21dfe..3ea5f78db 100644 --- a/uxarray/remap/dataarray_accessor.py +++ b/uxarray/remap/dataarray_accessor.py @@ -1,6 +1,5 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Optional -from warnings import warn +from typing import TYPE_CHECKING from uxarray.remap.nearest_neighbor import _nearest_neighbor_uxda from uxarray.remap.inverse_distance_weighted import ( @@ -8,7 +7,6 @@ ) if TYPE_CHECKING: - from uxarray.core.dataset import UxDataset from uxarray.core.dataarray import UxDataArray from uxarray.grid import Grid @@ -31,8 +29,7 @@ def __repr__(self): def nearest_neighbor( self, - destination_grid: Optional[Grid] = None, - destination_obj: Optional[Grid, UxDataArray, UxDataset] = None, + destination_grid: Grid, remap_to: str = "face centers", coord_type: str = "spherical", ): @@ -43,38 +40,17 @@ def nearest_neighbor( --------- destination_grid : Grid Destination Grid for remapping - destination_obj : Grid, UxDataArray, UxDataset - Optional destination for remapping, deprecating remap_to : str, default="nodes" Location of where to map data, either "nodes" or "face centers" coord_type : str, default="spherical" Indicates whether to remap using on spherical or cartesian coordinates """ - if destination_grid is not None and destination_obj is not None: - raise ValueError( - "Only one destination allowed, " - "please remove either `destination_grid` or `destination_obj`." - ) - elif destination_grid is None and destination_obj is None: - raise ValueError("Destination needed for remap.") - if destination_grid is not None: - return _nearest_neighbor_uxda( - self.uxda, destination_grid, remap_to, coord_type - ) - elif destination_obj is not None: - warn( - "destination_obj will be deprecated in a future release. Please use destination_grid instead.", - DeprecationWarning, - ) - return _nearest_neighbor_uxda( - self.uxda, destination_obj, remap_to, coord_type - ) + return _nearest_neighbor_uxda(self.uxda, destination_grid, remap_to, coord_type) def inverse_distance_weighted( self, - destination_grid: Optional[Grid] = None, - destination_obj: Optional[Grid, UxDataArray, UxDataset] = None, + destination_grid: Grid, remap_to: str = "face centers", coord_type: str = "spherical", power=2, @@ -87,8 +63,6 @@ def inverse_distance_weighted( --------- destination_grid : Grid Destination Grid for remapping - destination_obj : Grid, UxDataArray, UxDataset - Optional destination for remapping, deprecating remap_to : str, default="nodes" Location of where to map data, either "nodes" or "face centers" coord_type : str, default="spherical" @@ -99,23 +73,7 @@ def inverse_distance_weighted( k : int, default=8 Number of nearest neighbors to consider in the weighted calculation. """ - if destination_grid is not None and destination_obj is not None: - raise ValueError( - "Only one destination allowed, " - "please remove either `destination_grid` or `destination_obj`." - ) - elif destination_grid is None and destination_obj is None: - raise ValueError("Destination needed for remap.") - if destination_grid is not None: - return _inverse_distance_weighted_remap_uxda( - self.uxda, destination_grid, remap_to, coord_type, power, k - ) - elif destination_obj is not None: - warn( - "destination_obj will be deprecated in a future release. Please use destination_grid instead.", - DeprecationWarning, - ) - return _inverse_distance_weighted_remap_uxda( - self.uxda, destination_obj, remap_to, coord_type, power, k - ) + return _inverse_distance_weighted_remap_uxda( + self.uxda, destination_grid, remap_to, coord_type, power, k + ) diff --git a/uxarray/remap/dataset_accessor.py b/uxarray/remap/dataset_accessor.py index d5a9edf3f..dbf58e4ad 100644 --- a/uxarray/remap/dataset_accessor.py +++ b/uxarray/remap/dataset_accessor.py @@ -1,6 +1,5 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Optional -from warnings import warn +from typing import TYPE_CHECKING from uxarray.remap.nearest_neighbor import _nearest_neighbor_uxds from uxarray.remap.inverse_distance_weighted import ( @@ -9,7 +8,6 @@ if TYPE_CHECKING: from uxarray.core.dataset import UxDataset - from uxarray.core.dataarray import UxDataArray from uxarray.grid import Grid @@ -31,8 +29,7 @@ def __repr__(self): def nearest_neighbor( self, - destination_grid: Optional[Grid] = None, - destination_obj: Optional[Grid, UxDataArray, UxDataset] = None, + destination_grid: Grid, remap_to: str = "face centers", coord_type: str = "spherical", ): @@ -43,39 +40,17 @@ def nearest_neighbor( --------- destination_grid : Grid Destination Grid for remapping - destination_obj : Grid, UxDataArray, UxDataset - Optional destination for remapping, deprecating remap_to : str, default="nodes" Location of where to map data, either "nodes", "edge centers", or "face centers" coord_type : str, default="spherical" Indicates whether to remap using on spherical or cartesian coordinates """ - if destination_grid is not None and destination_obj is not None: - raise ValueError( - "Only one destination allowed, " - "please remove either `destination_grid` or `destination_obj`." - ) - elif destination_grid is None and destination_obj is None: - raise ValueError("Destination needed for remap.") - - if destination_grid is not None: - return _nearest_neighbor_uxds( - self.uxds, destination_grid, remap_to, coord_type - ) - elif destination_obj is not None: - warn( - "destination_obj will be deprecated in a future release. Please use destination_grid instead.", - DeprecationWarning, - ) - return _nearest_neighbor_uxds( - self.uxds, destination_obj, remap_to, coord_type - ) + return _nearest_neighbor_uxds(self.uxds, destination_grid, remap_to, coord_type) def inverse_distance_weighted( self, - destination_grid: Optional[Grid] = None, - destination_obj: Optional[Grid, UxDataArray, UxDataset] = None, + destination_grid: Grid, remap_to: str = "face centers", coord_type: str = "spherical", power=2, @@ -88,8 +63,6 @@ def inverse_distance_weighted( --------- destination_grid : Grid Destination Grid for remapping - destination_obj : Grid, UxDataArray, UxDataset - Optional destination for remapping, deprecating remap_to : str, default="nodes" Location of where to map data, either "nodes", "edge centers", or "face centers" coord_type : str, default="spherical" @@ -101,23 +74,6 @@ def inverse_distance_weighted( Number of nearest neighbors to consider in the weighted calculation. """ - if destination_grid is not None and destination_obj is not None: - raise ValueError( - "Only one destination allowed, " - "please remove either `destination_grid` or `destination_obj`." - ) - elif destination_grid is None and destination_obj is None: - raise ValueError("Destination needed for remap.") - - if destination_grid is not None: - return _inverse_distance_weighted_remap_uxds( - self.uxds, destination_grid, remap_to, coord_type, power, k - ) - elif destination_obj is not None: - warn( - "destination_obj will be deprecated in a future release. Please use destination_grid instead.", - DeprecationWarning, - ) - return _inverse_distance_weighted_remap_uxds( - self.uxds, destination_obj, remap_to, coord_type, power, k - ) + return _inverse_distance_weighted_remap_uxds( + self.uxds, destination_grid, remap_to, coord_type, power, k + ) diff --git a/uxarray/remap/inverse_distance_weighted.py b/uxarray/remap/inverse_distance_weighted.py index 219ed0fb2..92cf1602f 100644 --- a/uxarray/remap/inverse_distance_weighted.py +++ b/uxarray/remap/inverse_distance_weighted.py @@ -1,5 +1,5 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Union +from typing import TYPE_CHECKING if TYPE_CHECKING: from uxarray.core.dataset import UxDataset @@ -159,7 +159,7 @@ def _inverse_distance_weighted_remap( def _inverse_distance_weighted_remap_uxda( source_uxda: UxDataArray, - destination_obj: Union[Grid, UxDataArray, UxDataset], + destination_grid: Grid, remap_to: str = "face centers", coord_type: str = "spherical", power=2, @@ -171,8 +171,8 @@ def _inverse_distance_weighted_remap_uxda( --------- source_uxda : UxDataArray Source UxDataArray for remapping - destination_obj : Grid, UxDataArray, UxDataset - Destination for remapping + destination_grid : Grid + Destination grid for remapping remap_to : str, default="nodes" Location of where to map data, either "nodes", "edge centers", or "face centers" coord_type : str, default="spherical" @@ -206,16 +206,6 @@ def _inverse_distance_weighted_remap_uxda( destination_dims = list(source_uxda.dims) destination_dims[-1] = destination_dim - if isinstance(destination_obj, Grid): - destination_grid = destination_obj - elif isinstance( - destination_obj, - (uxarray.core.dataarray.UxDataArray, uxarray.core.dataset.UxDataset), - ): - destination_grid = destination_obj.uxgrid - else: - raise ValueError("TODO: Invalid Input") - # perform remapping destination_data = _inverse_distance_weighted_remap( source_uxda.uxgrid, @@ -234,26 +224,14 @@ def _inverse_distance_weighted_remap_uxda( dims=destination_dims, uxgrid=destination_grid, ) - # add remapped variable to existing UxDataset - if isinstance(destination_obj, uxarray.core.dataset.UxDataset): - uxds = destination_obj.copy() - uxds[source_uxda.name] = uxda_remap - return uxds - - # construct a UxDataset from remapped variable and existing variable - elif isinstance(destination_obj, uxarray.core.dataset.UxDataArray): - uxds = destination_obj.copy().to_dataset() - uxds[source_uxda.name] = uxda_remap - return uxds # return UxDataArray with remapped variable - else: - return uxda_remap + return uxda_remap def _inverse_distance_weighted_remap_uxds( source_uxds: UxDataset, - destination_obj: Union[Grid, UxDataArray, UxDataset], + destination_grid: Grid, remap_to: str = "face centers", coord_type: str = "spherical", power=2, @@ -265,7 +243,7 @@ def _inverse_distance_weighted_remap_uxds( --------- source_uxds : UxDataset Source UxDataset for remapping - destination_obj : Grid, UxDataArray, UxDataset + destination_grid : Grid Destination for remapping remap_to : str, default="nodes" Location of where to map data, either "nodes", "edge centers", or "face centers" @@ -277,20 +255,11 @@ def _inverse_distance_weighted_remap_uxds( k : int, default=8 Number of nearest neighbors to consider in the weighted calculation. """ - - if isinstance(destination_obj, Grid): - destination_uxds = uxarray.core.dataset.UxDataset(uxgrid=destination_obj) - elif isinstance(destination_obj, uxarray.core.dataset.UxDataArray): - destination_uxds = destination_obj.to_dataset() - elif isinstance(destination_obj, uxarray.core.dataset.UxDataset): - destination_uxds = destination_obj - else: - raise ValueError - + destination_uxds = uxarray.UxDataset(uxgrid=destination_grid) for var_name in source_uxds.data_vars: - destination_uxds = _inverse_distance_weighted_remap_uxda( + destination_uxds[var_name] = _inverse_distance_weighted_remap_uxda( source_uxds[var_name], - destination_uxds, + destination_grid, remap_to, coord_type, power, diff --git a/uxarray/remap/nearest_neighbor.py b/uxarray/remap/nearest_neighbor.py index 744f974a3..1923bd1b7 100644 --- a/uxarray/remap/nearest_neighbor.py +++ b/uxarray/remap/nearest_neighbor.py @@ -1,5 +1,5 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Union +from typing import TYPE_CHECKING if TYPE_CHECKING: from uxarray.core.dataset import UxDataset @@ -150,7 +150,7 @@ def _nearest_neighbor( def _nearest_neighbor_uxda( source_uxda: UxDataArray, - destination_obj: Union[Grid, UxDataArray, UxDataset], + destination_grid: Grid, remap_to: str = "face centers", coord_type: str = "spherical", ): @@ -160,7 +160,7 @@ def _nearest_neighbor_uxda( --------- source_uxda : UxDataArray Source UxDataArray for remapping - destination_obj : Grid, UxDataArray, UxDataset + destination_grid : Grid Destination for remapping remap_to : str, default="nodes" Location of where to map data, either "nodes", "edge centers", or "face centers" @@ -180,16 +180,6 @@ def _nearest_neighbor_uxda( destination_dims = list(source_uxda.dims) destination_dims[-1] = destination_dim - if isinstance(destination_obj, Grid): - destination_grid = destination_obj - elif isinstance( - destination_obj, - (uxarray.core.dataarray.UxDataArray, uxarray.core.dataset.UxDataset), - ): - destination_grid = destination_obj.uxgrid - else: - raise ValueError("TODO: Invalid Input") - # perform remapping destination_data = _nearest_neighbor( source_uxda.uxgrid, destination_grid, source_uxda.data, remap_to, coord_type @@ -202,26 +192,13 @@ def _nearest_neighbor_uxda( dims=destination_dims, uxgrid=destination_grid, ) - # add remapped variable to existing UxDataset - if isinstance(destination_obj, uxarray.core.dataset.UxDataset): - uxds = destination_obj.copy() - uxds[source_uxda.name] = uxda_remap - return uxds - - # construct a UxDataset from remapped variable and existing variable - elif isinstance(destination_obj, uxarray.core.dataset.UxDataArray): - uxds = destination_obj.copy().to_dataset() - uxds[source_uxda.name] = uxda_remap - return uxds - # return UxDataArray with remapped variable - else: - return uxda_remap + return uxda_remap def _nearest_neighbor_uxds( source_uxds: UxDataset, - destination_obj: Union[Grid, UxDataArray, UxDataset], + destination_grid: Grid, remap_to: str = "face centers", coord_type: str = "spherical", ): @@ -231,26 +208,17 @@ def _nearest_neighbor_uxds( --------- source_uxds : UxDataset Source UxDataset for remapping - destination_obj : Grid, UxDataArray, UxDataset + destination_grid : Grid Destination for remapping remap_to : str, default="nodes" Location of where to map data, either "nodes", "edge centers", or "face centers" coord_type : str, default="spherical" Indicates whether to remap using on Spherical or Cartesian coordinates """ - - if isinstance(destination_obj, Grid): - destination_uxds = uxarray.core.dataset.UxDataset(uxgrid=destination_obj) - elif isinstance(destination_obj, uxarray.core.dataset.UxDataArray): - destination_uxds = destination_obj.to_dataset() - elif isinstance(destination_obj, uxarray.core.dataset.UxDataset): - destination_uxds = destination_obj - else: - raise ValueError - + destination_uxds = uxarray.UxDataset(uxgrid=destination_grid) for var_name in source_uxds.data_vars: - destination_uxds = _nearest_neighbor_uxda( - source_uxds[var_name], destination_uxds, remap_to, coord_type + destination_uxds[var_name] = _nearest_neighbor_uxda( + source_uxds[var_name], destination_grid, remap_to, coord_type ) return destination_uxds diff --git a/uxarray/utils/computing.py b/uxarray/utils/computing.py index f9929deb4..6baa25313 100644 --- a/uxarray/utils/computing.py +++ b/uxarray/utils/computing.py @@ -61,6 +61,17 @@ def dot(a, b): return np.dot(a, b) +@njit +def norm(x): + """Numba decorated implementation of ``np.linalg.norm()`` + + See Also + -------- + numpy.linalg.norm + """ + return np.linalg.norm(x) + + def _fmms(a, b, c, d): """ Calculate the difference of products using the FMA (fused multiply-add) operation: (a * b) - (c * d).