From de95a87fa9e5b2e6b230bccb95c6bfe2468bb12d Mon Sep 17 00:00:00 2001 From: Sotiris Date: Wed, 17 Jan 2024 11:51:12 +0100 Subject: [PATCH 01/14] Add wave reader --- .coverage | Bin 53248 -> 53248 bytes cov.xml | 199 ++++++++++++++++++++--------------- tests/test_io_interface.py | 2 +- tests/test_readers_nwp.py | 61 ++++++++++- unimodel/io/interface.py | 5 +- unimodel/io/readers_nwp.py | 208 ++++++++++++++++++++++++++++--------- 6 files changed, 338 insertions(+), 137 deletions(-) diff --git a/.coverage b/.coverage index f7556aa6246987a9a4047f919be05f767ace8a8f..68fdb631a5b63242147a25358071c6508bf36ad5 100644 GIT binary patch delta 237 zcmZozz}&Eac|(#uzlpAap{}91f}x?6iKUhC=0g3c0ZP*t_%VPr64l;Q;nnDPEPeP5oPVMBD5E$jdOPwi)ler?WXocys*i#?Z< zm65YCYO+#)gxoadhFS>$)dNKg8~7&+B{1@E05wZ-aQ*pj&)D#Ha%aE0vLZ83$c6dU lY}SUz)qD*3RaOi)e8XfJ65Q{<|9*LjeA`8)%^&;u9RPIXOV|Ja delta 216 zcmZozz}&Eac|(#uzp<`?1rRA1m|K~eS($Dw)Snulw3>ncC;vzO3;ZYeXYx;L{w?Pq>j?-rZ - + @@ -34,64 +34,64 @@ - + + - - + + - - - - - + + + + + - - - - - - - - + + + + + + - + - - - - + + + + - - + - - - - + + + - + - - + + + - - - - - - - - - - - + + + + + + + + + + + + + @@ -101,20 +101,20 @@ - - - + - - - - - + + + + + + + - + @@ -207,32 +207,32 @@ - - - + + - + - - + + + + - @@ -250,17 +250,20 @@ + + - - + - + + + - + @@ -465,6 +468,38 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -727,20 +762,20 @@ - - - - - + + + + - - + + + @@ -751,26 +786,26 @@ - - - + + - + - - + + - + - + + - - - - - + + + + + diff --git a/tests/test_io_interface.py b/tests/test_io_interface.py index bad5604..8ff8bbc 100644 --- a/tests/test_io_interface.py +++ b/tests/test_io_interface.py @@ -51,5 +51,5 @@ def test_io_interface(self): "'bolam', 'icon', 'moloch_gfs', 'moloch_ecm'," " 'wrf_ecm', 'wrf_exp', 'wrf_gfs_3', " "'wrf_gfs_9', 'ecmwf', 'ecmwf_hres', 'ecmwf_ens', " - "'unified_model', 'wrf_tl_ens', 'gfs', 'gefs']", + "'unified_model', 'wrf_tl_ens', 'gfs', 'gefs', 'swan', 'ww3']", ) diff --git a/tests/test_readers_nwp.py b/tests/test_readers_nwp.py index 5c43ba0..80a2ce7 100644 --- a/tests/test_readers_nwp.py +++ b/tests/test_readers_nwp.py @@ -2,6 +2,7 @@ """ import unittest import os +import numpy as np from unimodel.io.readers_nwp import ( read_arome_grib, @@ -14,6 +15,8 @@ read_wrf_prs, read_wrf_tl_ens_grib, read_ncep_grib, + read_swan_grib, + read_ww3_grib, ) @@ -296,7 +299,7 @@ def test_read_unified_model_grib(self): def test_read_wrf_tl_ens_grib(self): """Tests WRF-TL-ENS member grib to xarray""" - file = "tests/data/nwp_src/wrf_tl_ens/tl_ens-03-002.2023101900_01.grib" + file = "tests/data/nwp_src/wrf_tl_ens/ens-002.2023032009_01.grib" file_idx = file + ".02ccc.idx" variable = "tp" data_var = read_wrf_tl_ens_grib(file, variable, "wrf_tl_ens") @@ -308,7 +311,7 @@ def test_read_wrf_tl_ens_grib(self): self.assertEqual(data_var.x.shape[0], 231) self.assertEqual(data_var.y.shape[0], 161) - self.assertAlmostEqual(data_var.values[76, 160], 0.0, 2) + self.assertAlmostEqual(data_var.values[76, 160], 0.109, 2) self.assertAlmostEqual(data_var.values[160, 72], 0.0) self.assertAlmostEqual(data_var.rio.transform().a, 0.025) @@ -371,3 +374,57 @@ def test_read_ncep_grib(self): self.assertAlmostEqual(data_var.rio.transform().f, 38.75) self.assertFalse(os.path.isfile(file_idx)) + + def test_read_swan_grib(self): + """Tests SWAN grib to xarray""" + + # GFS file test + file_gfs = "tests/data/nwp_src/swan/swan-03.2024011000_00.grb" + file_idx = file_gfs + ".02ccc.idx" + variable = "swh" + data_var = read_swan_grib(file_gfs, variable, "swan") + + self.assertEqual(data_var.rio.crs.data["proj"], "longlat") + self.assertEqual(data_var.rio.crs.data["datum"], "WGS84") + + self.assertEqual(data_var.x.shape[0], 218) + self.assertEqual(data_var.y.shape[0], 185) + + self.assertAlmostEqual(data_var.values[120, 190], 0.659, 2) + self.assertTrue(np.isnan(data_var.values[0, 0])) + + self.assertAlmostEqual(data_var.rio.transform().a, 0.03) + self.assertAlmostEqual(data_var.rio.transform().b, 0.0) + self.assertAlmostEqual(data_var.rio.transform().c, -0.845) + self.assertAlmostEqual(data_var.rio.transform().d, 0.0) + self.assertAlmostEqual(data_var.rio.transform().e, 0.03) + self.assertAlmostEqual(data_var.rio.transform().f, 38.415) + + self.assertFalse(os.path.isfile(file_idx)) + + def test_read_ww3_grib(self): + """Tests WW3 grib to xarray""" + + # GFS file test + file_gfs = "tests/data/nwp_src/ww3/ww3-03.2024011000_00.grb" + file_idx = file_gfs + ".02ccc.idx" + variable = "swh" + data_var = read_ww3_grib(file_gfs, variable, "ww3") + + self.assertEqual(data_var.rio.crs.data["proj"], "longlat") + self.assertEqual(data_var.rio.crs.data["datum"], "WGS84") + + self.assertEqual(data_var.x.shape[0], 218) + self.assertEqual(data_var.y.shape[0], 185) + + self.assertAlmostEqual(data_var.values[120, 190], 0.563, 2) + self.assertTrue(np.isnan(data_var.values[0, 0])) + + self.assertAlmostEqual(data_var.rio.transform().a, 0.03) + self.assertAlmostEqual(data_var.rio.transform().b, 0.0) + self.assertAlmostEqual(data_var.rio.transform().c, -0.845) + self.assertAlmostEqual(data_var.rio.transform().d, 0.0) + self.assertAlmostEqual(data_var.rio.transform().e, 0.03) + self.assertAlmostEqual(data_var.rio.transform().f, 38.415) + + self.assertFalse(os.path.isfile(file_idx)) diff --git a/unimodel/io/interface.py b/unimodel/io/interface.py index cb46a88..456df31 100644 --- a/unimodel/io/interface.py +++ b/unimodel/io/interface.py @@ -4,7 +4,8 @@ read_bolam_grib, read_ecmwf_grib, read_icon_grib, read_moloch_grib, read_unified_model_grib, read_wrf_prs, - read_wrf_tl_ens_grib, read_ncep_grib) + read_wrf_tl_ens_grib, read_ncep_grib, + read_swan_grib, read_ww3_grib) _readers = dict() _readers['arome'] = read_arome_grib @@ -24,6 +25,8 @@ _readers['wrf_tl_ens'] = read_wrf_tl_ens_grib _readers['gfs'] = read_ncep_grib _readers['gefs'] = read_ncep_grib +_readers['swan'] = read_swan_grib +_readers['ww3'] = read_ww3_grib def get_reader(name): diff --git a/unimodel/io/readers_nwp.py b/unimodel/io/readers_nwp.py index fd48284..7d14118 100644 --- a/unimodel/io/readers_nwp.py +++ b/unimodel/io/readers_nwp.py @@ -11,7 +11,7 @@ def read_wrf_prs( - grib_file: str, variable: str, model: str, extra_filters: dict = None + grib_file: str, variable: str, model: str, extra_filters={} ) -> xarray.DataArray: """Reads a WRF grib file and transforms it into an xarray.DataArray. @@ -27,8 +27,7 @@ def read_wrf_prs( """ filter_keys = {"shortName": variable} - if extra_filters is not None: - filter_keys.update(extra_filters) + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -36,8 +35,8 @@ def read_wrf_prs( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as err: - raise_reader_missing_filters(grib_file, variable, model, err) + except DatasetBuildError as e: + raise_reader_missing_filters(grib_file, variable, model, e) geographics = _get_wrf_prs_metadata(grib_data, model) grib_data = grib_data.rio.write_crs(geographics["crs"]) @@ -127,7 +126,7 @@ def _get_wrf_prs_metadata(xarray_var: xarray.DataArray, model: str) -> dict: def read_icon_grib( - grib_file: str, variable: str, model: str, extra_filters: dict = None + grib_file: str, variable: str, model: str, extra_filters={} ) -> xarray.DataArray: """Reads an ICON grib file and transforms it into an xarray.DataArray. @@ -143,8 +142,7 @@ def read_icon_grib( """ filter_keys = {"shortName": variable} - if extra_filters is not None: - filter_keys.update(extra_filters) + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -152,8 +150,8 @@ def read_icon_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as err: - raise_reader_missing_filters(grib_file, variable, model, err) + except DatasetBuildError as e: + raise_reader_missing_filters(grib_file, variable, model, e) geographics = _get_icon_metadata(grib_data) grib_data = grib_data.rio.write_crs(geographics["crs"]) @@ -183,7 +181,7 @@ def _get_icon_metadata(xarray_var: xarray.DataArray) -> dict: def read_moloch_grib( - grib_file: str, variable: str, model: str, extra_filters: dict = None + grib_file: str, variable: str, model: str, extra_filters={} ) -> xarray.DataArray: """Reads a Moloch grib file and transforms it into an xarray.DataArray. @@ -199,8 +197,7 @@ def read_moloch_grib( """ filter_keys = {"shortName": variable} - if extra_filters is not None: - filter_keys.update(extra_filters) + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -208,8 +205,8 @@ def read_moloch_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as err: - raise_reader_missing_filters(grib_file, variable, model, err) + except DatasetBuildError as e: + raise_reader_missing_filters(grib_file, variable, model, e) grib_md = _get_moloch_metadata(grib_data) @@ -265,7 +262,7 @@ def _get_moloch_metadata(moloch_data: xarray.DataArray) -> dict: def read_bolam_grib( - grib_file: str, variable: str, model: str, extra_filters: dict = None + grib_file: str, variable: str, model: str, extra_filters={} ) -> xarray.DataArray: """Reads a Bolam grib file and transforms it into an xarray.DataArray. @@ -281,8 +278,7 @@ def read_bolam_grib( """ filter_keys = {"shortName": variable} - if extra_filters is not None: - filter_keys.update(extra_filters) + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -290,8 +286,8 @@ def read_bolam_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as err: - raise_reader_missing_filters(grib_file, variable, model, err) + except DatasetBuildError as e: + raise_reader_missing_filters(grib_file, variable, model, e) grib_md = _get_bolam_metadata(grib_data) @@ -347,7 +343,7 @@ def _get_bolam_metadata(bolam_data: xarray.DataArray) -> dict: def read_arome_grib( - grib_file: str, variable: str, model: str, extra_filters: dict = None + grib_file: str, variable: str, model: str, extra_filters={} ) -> xarray.DataArray: """Reads an AROME grib file and transforms it into an xarray.DataArray. @@ -363,8 +359,7 @@ def read_arome_grib( """ filter_keys = {"shortName": variable} - if extra_filters is not None: - filter_keys.update(extra_filters) + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -372,8 +367,8 @@ def read_arome_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as err: - raise_reader_missing_filters(grib_file, variable, model, err) + except DatasetBuildError as e: + raise_reader_missing_filters(grib_file, variable, model, e) grib_md = _get_arome_metadata(grib_data) @@ -411,7 +406,7 @@ def _get_arome_metadata(arome_data: xarray.DataArray) -> dict: def read_arpege_grib( - grib_file: str, variable: str, model: str, extra_filters: dict = None + grib_file: str, variable: str, model: str, extra_filters={} ) -> xarray.DataArray: """Reads an ARPEGE grib file and transforms it into an xarray.DataArray. @@ -427,8 +422,7 @@ def read_arpege_grib( """ filter_keys = {"shortName": variable} - if extra_filters is not None: - filter_keys.update(extra_filters) + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -436,8 +430,8 @@ def read_arpege_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as err: - raise_reader_missing_filters(grib_file, variable, model, err) + except DatasetBuildError as e: + raise_reader_missing_filters(grib_file, variable, model, e) grib_md = _get_arpege_metadata(grib_data) @@ -474,7 +468,7 @@ def _get_arpege_metadata(arpege_data: xarray.DataArray) -> dict: def read_ecmwf_grib( - grib_file: str, variable: str, model: str, extra_filters: dict = None + grib_file: str, variable: str, model: str, extra_filters={} ) -> xarray.DataArray: """Reads an ECMWF grib file and transforms it into an xarray.DataArray. @@ -490,8 +484,7 @@ def read_ecmwf_grib( """ filter_keys = {"shortName": variable} - if extra_filters is not None: - filter_keys.update(extra_filters) + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -499,8 +492,8 @@ def read_ecmwf_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as err: - raise_reader_missing_filters(grib_file, variable, model, err) + except DatasetBuildError as e: + raise_reader_missing_filters(grib_file, variable, model, e) if variable == "tp": grib_data.data = grib_data.data * 1000 @@ -535,7 +528,7 @@ def _get_ecmwf_hres_metadata(xarray_var): def read_unified_model_grib( - grib_file: str, variable: str, model: str, extra_filters: dict = None + grib_file: str, variable: str, model: str, extra_filters={} ) -> xarray.DataArray: """Reads an Unified Model grib file and transforms it into an xarray.DataArray. @@ -552,8 +545,7 @@ def read_unified_model_grib( """ filter_keys = {"shortName": variable} - if extra_filters is not None: - filter_keys.update(extra_filters) + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -561,8 +553,8 @@ def read_unified_model_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as err: - raise_reader_missing_filters(grib_file, variable, model, err) + except DatasetBuildError as e: + raise_reader_missing_filters(grib_file, variable, model, e) geographics = _get_unified_model_metadata(grib_data) grib_data = grib_data.rio.write_crs(geographics["crs"]) @@ -592,7 +584,7 @@ def _get_unified_model_metadata(xarray_var): def read_wrf_tl_ens_grib( - grib_file: str, variable: str, model: str, extra_filters: dict = None + grib_file: str, variable: str, model: str, extra_filters={} ) -> xarray.DataArray: """Reads a WRF-TLS-ENS member grib file and transforms it into an xarray.DataArray. @@ -609,8 +601,7 @@ def read_wrf_tl_ens_grib( """ filter_keys = {"shortName": variable} - if extra_filters is not None: - filter_keys.update(extra_filters) + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -618,8 +609,8 @@ def read_wrf_tl_ens_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as err: - raise_reader_missing_filters(grib_file, variable, model, err) + except DatasetBuildError as e: + raise_reader_missing_filters(grib_file, variable, model, e) grib_md = _get_wrf_tl_ens_metadata(grib_data) @@ -630,7 +621,7 @@ def read_wrf_tl_ens_grib( # Add ensemble member coordinate grib_data = grib_data.assign_coords( - realization=int(re.search(r"tl_ens-\d+-(\d+)\.", grib_file).group(1)) + realization=int(re.search(r"(?<=-).+?(?=\.)", grib_file).group()) ) # Add model name to attributes @@ -655,7 +646,7 @@ def _get_wrf_tl_ens_metadata(xarray_var: xarray.DataArray) -> dict: def read_ncep_grib( - grib_file: str, variable: str, model: str, extra_filters: dict = None + grib_file: str, variable: str, model: str, extra_filters={} ) -> xarray.DataArray: """Reads an NCEP (GEFS or GFS) grib file and transforms it into an xarray.DataArray. @@ -671,8 +662,7 @@ def read_ncep_grib( """ filter_keys = {"shortName": variable} - if extra_filters is not None: - filter_keys.update(extra_filters) + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -680,8 +670,8 @@ def read_ncep_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as err: - raise_reader_missing_filters(grib_file, variable, model, err) + except DatasetBuildError as e: + raise_reader_missing_filters(grib_file, variable, model, e) geographics = _get_ncep_metadata(grib_data) grib_data = grib_data.rio.write_crs(geographics["crs"]) @@ -708,3 +698,119 @@ def _get_ncep_metadata(xarray_var): crs_model = pyproj.crs.CRS.from_dict(projparams) return {"crs": crs_model} + + +def read_swan_grib( + grib_file: str, variable: str, model: str, extra_filters={} +) -> xarray.DataArray: + """Reads a SWAN grib file and transforms it into an xarray.DataArray. + + Args: + grib_file (string): Path to a SWAN grib file. + variable (string): Variable to extract. + model (str): Model to be read. + extra_filters (dict, optional): Other filters + needed to read the variable + + Returns: + xarray.DataArray: SWAN grib file data. + """ + + filter_keys = {"shortName": variable} + filter_keys.update(extra_filters) + + try: + grib_data = xarray.open_dataarray( + grib_file, + engine="cfgrib", + backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, + ) + except DatasetBuildError as e: + raise_reader_missing_filters(grib_file, variable, model, e) + + geographics = _get_swan_metadata(grib_data) + grib_data = grib_data.rio.write_crs(geographics["crs"]) + + # Change time for steps (timedelta) + # grib_data["time"] = (grib_data.time - grib_data.time[0]) + + # grib_data = grib_data.drop_vars("step") + + # Rename coordinates for further reprojection + grib_data = grib_data.rename({"longitude": "x", "latitude": "y"}) + # , "time": "step"}) + + # Add model name to attributes + grib_data.attrs["model"] = model + + return grib_data + + +def _get_swan_metadata(xarray_var): + """Gets projection of a SWAN xarray. + + Args: + xarray_var (xarray): SWAN grib data. + + Returns: + dict: Coordinate reference system. + """ + projparams = proj4_from_grib(xarray_var) + crs_model = pyproj.crs.CRS.from_dict(projparams) + + return {"crs": crs_model} + + +def read_ww3_grib( + grib_file: str, variable: str, model: str, extra_filters={} +) -> xarray.DataArray: + """Reads a WW3 grib file and transforms it into an xarray.DataArray. + + Args: + grib_file (string): Path to a WW3 grib file. + variable (string): Variable to extract. + model (str): Model to be read. + extra_filters (dict, optional): Other filters + needed to read the variable + + Returns: + xarray.DataArray: WW3 grib file data. + """ + + filter_keys = {"shortName": variable} + filter_keys.update(extra_filters) + + try: + grib_data = xarray.open_dataarray( + grib_file, + engine="cfgrib", + backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, + ) + except DatasetBuildError as e: + raise_reader_missing_filters(grib_file, variable, model, e) + + geographics = _get_ww3_metadata(grib_data) + grib_data = grib_data.rio.write_crs(geographics["crs"]) + + # Rename coordinates for further reprojection + grib_data = grib_data.rename({"longitude": "x", "latitude": "y"}) + + # Add model name to attributes + grib_data.attrs["model"] = model + + return grib_data + + +def _get_ww3_metadata(xarray_var): + """Gets projection of a WW3 xarray. + + Args: + xarray_var (xarray): WW3 grib data. + + Returns: + dict: Coordinate reference system. + """ + projparams = proj4_from_grib(xarray_var) + crs_model = pyproj.crs.CRS.from_dict(projparams) + + return {"crs": crs_model} From 5129dfc64cc7124b94d536e2ffc054a6055b45df Mon Sep 17 00:00:00 2001 From: Sotiris Date: Wed, 17 Jan 2024 12:34:42 +0100 Subject: [PATCH 02/14] Add wave reader --- tests/test_readers_nwp.py | 4 +- unimodel/io/readers_nwp.py | 110 ++++++++++++++++++++----------------- 2 files changed, 63 insertions(+), 51 deletions(-) diff --git a/tests/test_readers_nwp.py b/tests/test_readers_nwp.py index 80a2ce7..606427c 100644 --- a/tests/test_readers_nwp.py +++ b/tests/test_readers_nwp.py @@ -299,7 +299,7 @@ def test_read_unified_model_grib(self): def test_read_wrf_tl_ens_grib(self): """Tests WRF-TL-ENS member grib to xarray""" - file = "tests/data/nwp_src/wrf_tl_ens/ens-002.2023032009_01.grib" + file = "tests/data/nwp_src/wrf_tl_ens/tl_ens-03-002.2023101900_01.grib" file_idx = file + ".02ccc.idx" variable = "tp" data_var = read_wrf_tl_ens_grib(file, variable, "wrf_tl_ens") @@ -311,7 +311,7 @@ def test_read_wrf_tl_ens_grib(self): self.assertEqual(data_var.x.shape[0], 231) self.assertEqual(data_var.y.shape[0], 161) - self.assertAlmostEqual(data_var.values[76, 160], 0.109, 2) + self.assertAlmostEqual(data_var.values[76, 160], 0.0, 2) self.assertAlmostEqual(data_var.values[160, 72], 0.0) self.assertAlmostEqual(data_var.rio.transform().a, 0.025) diff --git a/unimodel/io/readers_nwp.py b/unimodel/io/readers_nwp.py index 7d14118..423079e 100644 --- a/unimodel/io/readers_nwp.py +++ b/unimodel/io/readers_nwp.py @@ -11,7 +11,7 @@ def read_wrf_prs( - grib_file: str, variable: str, model: str, extra_filters={} + grib_file: str, variable: str, model: str, extra_filters: dict = None ) -> xarray.DataArray: """Reads a WRF grib file and transforms it into an xarray.DataArray. @@ -27,7 +27,8 @@ def read_wrf_prs( """ filter_keys = {"shortName": variable} - filter_keys.update(extra_filters) + if extra_filters is not None: + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -35,8 +36,8 @@ def read_wrf_prs( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as e: - raise_reader_missing_filters(grib_file, variable, model, e) + except DatasetBuildError as err: + raise_reader_missing_filters(grib_file, variable, model, err) geographics = _get_wrf_prs_metadata(grib_data, model) grib_data = grib_data.rio.write_crs(geographics["crs"]) @@ -126,7 +127,7 @@ def _get_wrf_prs_metadata(xarray_var: xarray.DataArray, model: str) -> dict: def read_icon_grib( - grib_file: str, variable: str, model: str, extra_filters={} + grib_file: str, variable: str, model: str, extra_filters: dict = None ) -> xarray.DataArray: """Reads an ICON grib file and transforms it into an xarray.DataArray. @@ -142,7 +143,8 @@ def read_icon_grib( """ filter_keys = {"shortName": variable} - filter_keys.update(extra_filters) + if extra_filters is not None: + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -150,8 +152,8 @@ def read_icon_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as e: - raise_reader_missing_filters(grib_file, variable, model, e) + except DatasetBuildError as err: + raise_reader_missing_filters(grib_file, variable, model, err) geographics = _get_icon_metadata(grib_data) grib_data = grib_data.rio.write_crs(geographics["crs"]) @@ -181,7 +183,7 @@ def _get_icon_metadata(xarray_var: xarray.DataArray) -> dict: def read_moloch_grib( - grib_file: str, variable: str, model: str, extra_filters={} + grib_file: str, variable: str, model: str, extra_filters: dict = None ) -> xarray.DataArray: """Reads a Moloch grib file and transforms it into an xarray.DataArray. @@ -197,7 +199,8 @@ def read_moloch_grib( """ filter_keys = {"shortName": variable} - filter_keys.update(extra_filters) + if extra_filters is not None: + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -205,8 +208,8 @@ def read_moloch_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as e: - raise_reader_missing_filters(grib_file, variable, model, e) + except DatasetBuildError as err: + raise_reader_missing_filters(grib_file, variable, model, err) grib_md = _get_moloch_metadata(grib_data) @@ -262,7 +265,7 @@ def _get_moloch_metadata(moloch_data: xarray.DataArray) -> dict: def read_bolam_grib( - grib_file: str, variable: str, model: str, extra_filters={} + grib_file: str, variable: str, model: str, extra_filters: dict = None ) -> xarray.DataArray: """Reads a Bolam grib file and transforms it into an xarray.DataArray. @@ -278,7 +281,8 @@ def read_bolam_grib( """ filter_keys = {"shortName": variable} - filter_keys.update(extra_filters) + if extra_filters is not None: + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -286,8 +290,8 @@ def read_bolam_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as e: - raise_reader_missing_filters(grib_file, variable, model, e) + except DatasetBuildError as err: + raise_reader_missing_filters(grib_file, variable, model, err) grib_md = _get_bolam_metadata(grib_data) @@ -343,7 +347,7 @@ def _get_bolam_metadata(bolam_data: xarray.DataArray) -> dict: def read_arome_grib( - grib_file: str, variable: str, model: str, extra_filters={} + grib_file: str, variable: str, model: str, extra_filters: dict = None ) -> xarray.DataArray: """Reads an AROME grib file and transforms it into an xarray.DataArray. @@ -359,7 +363,8 @@ def read_arome_grib( """ filter_keys = {"shortName": variable} - filter_keys.update(extra_filters) + if extra_filters is not None: + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -367,8 +372,8 @@ def read_arome_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as e: - raise_reader_missing_filters(grib_file, variable, model, e) + except DatasetBuildError as err: + raise_reader_missing_filters(grib_file, variable, model, err) grib_md = _get_arome_metadata(grib_data) @@ -406,7 +411,7 @@ def _get_arome_metadata(arome_data: xarray.DataArray) -> dict: def read_arpege_grib( - grib_file: str, variable: str, model: str, extra_filters={} + grib_file: str, variable: str, model: str, extra_filters: dict = None ) -> xarray.DataArray: """Reads an ARPEGE grib file and transforms it into an xarray.DataArray. @@ -422,7 +427,8 @@ def read_arpege_grib( """ filter_keys = {"shortName": variable} - filter_keys.update(extra_filters) + if extra_filters is not None: + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -430,8 +436,8 @@ def read_arpege_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as e: - raise_reader_missing_filters(grib_file, variable, model, e) + except DatasetBuildError as err: + raise_reader_missing_filters(grib_file, variable, model, err) grib_md = _get_arpege_metadata(grib_data) @@ -468,7 +474,7 @@ def _get_arpege_metadata(arpege_data: xarray.DataArray) -> dict: def read_ecmwf_grib( - grib_file: str, variable: str, model: str, extra_filters={} + grib_file: str, variable: str, model: str, extra_filters: dict = None ) -> xarray.DataArray: """Reads an ECMWF grib file and transforms it into an xarray.DataArray. @@ -484,7 +490,8 @@ def read_ecmwf_grib( """ filter_keys = {"shortName": variable} - filter_keys.update(extra_filters) + if extra_filters is not None: + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -492,8 +499,8 @@ def read_ecmwf_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as e: - raise_reader_missing_filters(grib_file, variable, model, e) + except DatasetBuildError as err: + raise_reader_missing_filters(grib_file, variable, model, err) if variable == "tp": grib_data.data = grib_data.data * 1000 @@ -528,7 +535,7 @@ def _get_ecmwf_hres_metadata(xarray_var): def read_unified_model_grib( - grib_file: str, variable: str, model: str, extra_filters={} + grib_file: str, variable: str, model: str, extra_filters: dict = None ) -> xarray.DataArray: """Reads an Unified Model grib file and transforms it into an xarray.DataArray. @@ -545,7 +552,8 @@ def read_unified_model_grib( """ filter_keys = {"shortName": variable} - filter_keys.update(extra_filters) + if extra_filters is not None: + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -553,8 +561,8 @@ def read_unified_model_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as e: - raise_reader_missing_filters(grib_file, variable, model, e) + except DatasetBuildError as err: + raise_reader_missing_filters(grib_file, variable, model, err) geographics = _get_unified_model_metadata(grib_data) grib_data = grib_data.rio.write_crs(geographics["crs"]) @@ -584,7 +592,7 @@ def _get_unified_model_metadata(xarray_var): def read_wrf_tl_ens_grib( - grib_file: str, variable: str, model: str, extra_filters={} + grib_file: str, variable: str, model: str, extra_filters: dict = None ) -> xarray.DataArray: """Reads a WRF-TLS-ENS member grib file and transforms it into an xarray.DataArray. @@ -601,7 +609,8 @@ def read_wrf_tl_ens_grib( """ filter_keys = {"shortName": variable} - filter_keys.update(extra_filters) + if extra_filters is not None: + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -609,8 +618,8 @@ def read_wrf_tl_ens_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as e: - raise_reader_missing_filters(grib_file, variable, model, e) + except DatasetBuildError as err: + raise_reader_missing_filters(grib_file, variable, model, err) grib_md = _get_wrf_tl_ens_metadata(grib_data) @@ -621,7 +630,7 @@ def read_wrf_tl_ens_grib( # Add ensemble member coordinate grib_data = grib_data.assign_coords( - realization=int(re.search(r"(?<=-).+?(?=\.)", grib_file).group()) + realization=int(re.search(r"tl_ens-\d+-(\d+)\.", grib_file).group(1)) ) # Add model name to attributes @@ -646,7 +655,7 @@ def _get_wrf_tl_ens_metadata(xarray_var: xarray.DataArray) -> dict: def read_ncep_grib( - grib_file: str, variable: str, model: str, extra_filters={} + grib_file: str, variable: str, model: str, extra_filters: dict = None ) -> xarray.DataArray: """Reads an NCEP (GEFS or GFS) grib file and transforms it into an xarray.DataArray. @@ -662,7 +671,8 @@ def read_ncep_grib( """ filter_keys = {"shortName": variable} - filter_keys.update(extra_filters) + if extra_filters is not None: + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -670,8 +680,8 @@ def read_ncep_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as e: - raise_reader_missing_filters(grib_file, variable, model, e) + except DatasetBuildError as err: + raise_reader_missing_filters(grib_file, variable, model, err) geographics = _get_ncep_metadata(grib_data) grib_data = grib_data.rio.write_crs(geographics["crs"]) @@ -701,7 +711,7 @@ def _get_ncep_metadata(xarray_var): def read_swan_grib( - grib_file: str, variable: str, model: str, extra_filters={} + grib_file: str, variable: str, model: str, extra_filters: dict = None ) -> xarray.DataArray: """Reads a SWAN grib file and transforms it into an xarray.DataArray. @@ -717,7 +727,8 @@ def read_swan_grib( """ filter_keys = {"shortName": variable} - filter_keys.update(extra_filters) + if extra_filters is not None: + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -725,8 +736,8 @@ def read_swan_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as e: - raise_reader_missing_filters(grib_file, variable, model, e) + except DatasetBuildError as err: + raise_reader_missing_filters(grib_file, variable, model, err) geographics = _get_swan_metadata(grib_data) grib_data = grib_data.rio.write_crs(geographics["crs"]) @@ -762,7 +773,7 @@ def _get_swan_metadata(xarray_var): def read_ww3_grib( - grib_file: str, variable: str, model: str, extra_filters={} + grib_file: str, variable: str, model: str, extra_filters: dict = None ) -> xarray.DataArray: """Reads a WW3 grib file and transforms it into an xarray.DataArray. @@ -778,7 +789,8 @@ def read_ww3_grib( """ filter_keys = {"shortName": variable} - filter_keys.update(extra_filters) + if extra_filters is not None: + filter_keys.update(extra_filters) try: grib_data = xarray.open_dataarray( @@ -786,8 +798,8 @@ def read_ww3_grib( engine="cfgrib", backend_kwargs={"filter_by_keys": filter_keys, "indexpath": ""}, ) - except DatasetBuildError as e: - raise_reader_missing_filters(grib_file, variable, model, e) + except DatasetBuildError as err: + raise_reader_missing_filters(grib_file, variable, model, err) geographics = _get_ww3_metadata(grib_data) grib_data = grib_data.rio.write_crs(geographics["crs"]) From fe5e2e37c6607a451adda2be23fcefaf6c23e787 Mon Sep 17 00:00:00 2001 From: Sotiris Date: Wed, 17 Jan 2024 12:36:39 +0100 Subject: [PATCH 03/14] update coverage --- .coverage | Bin 53248 -> 53248 bytes cov.xml | 364 ++++++++++++++++++++++++++++-------------------------- 2 files changed, 188 insertions(+), 176 deletions(-) diff --git a/.coverage b/.coverage index 68fdb631a5b63242147a25358071c6508bf36ad5..af9b7ad85c3954cab73cf42065bc30324fa711ec 100644 GIT binary patch delta 198 zcmZozz}&Eac|*27n~{~VnU%rjLj9=$N>ds5fAW9iKgWNJe>#6Ze>;CAKPTTmzSn&B z`401K;##6Ze>;CAKNsJBzBhai z_>S;x=3BuxZL^?29^d4teI@m|oUDwTjZvJho--uKvz^cOV&G?HFn=b`&hXnnlW`hz zL+y&^Gldqs1qlS4H&%7HZO_N>ZJ|jJlMYCvZwkX{#shYHJUKVWFx=PDNC}P;aKUpY&k%xnYkyDC;>(76C#)iLa?3+9L%N+oo2}5xJ diff --git a/cov.xml b/cov.xml index f00618d..072971e 100644 --- a/cov.xml +++ b/cov.xml @@ -1,5 +1,5 @@ - + @@ -114,7 +114,7 @@ - + @@ -263,7 +263,7 @@ - + @@ -276,230 +276,242 @@ - + - + - + - - - - + + + + - - - - + + + + - - + + - + - + - - - + + + - - - - - - - - + + + + + + + + + - - - + + + - - - + + + - - - - - - - - - + + + + + + + + + + - + - - + - - - + + + - - - - + + + + + - - - + + + - - - - - - + + + + + - - - - - + + + + + - - - - + + + + + + + - - - - + + + - - - - - - - - - + + + + + + + + + + - - - - - - + + + + + - - - - - - - - - - - - + + + + + + + + + + + + + - - - - - - - - - - - - + + + + + + + + + + + + + - - - + + + - - - - - - - - - + + + + + + + + + - - - - - - + + + + + + + + + - - - - - - - - - - + + + + + + + - - - - - + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + - + + - - + + + + + - + + + From 0f5f5ef776bd01dfd8c6b59cde25f064fb779f93 Mon Sep 17 00:00:00 2001 From: Sotiris Date: Wed, 17 Jan 2024 12:52:20 +0100 Subject: [PATCH 04/14] change env for test --- environment_dev.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/environment_dev.yml b/environment_dev.yml index f9a2906..40ecea0 100644 --- a/environment_dev.yml +++ b/environment_dev.yml @@ -97,7 +97,7 @@ dependencies: - libbrotlienc=1.0.9=h166bdaf_9 - libcblas=3.9.0=17_linux64_openblas - libcrc32c=1.1.2=h9c3ff4c_0 - - libcurl=8.2.1=hca28451_0 + - libcurl - libdeflate=1.18=h0b41bf4_0 - libedit=3.1.20191231=he28a2e2_2 - libev=4.33=h516909a_1 @@ -154,7 +154,7 @@ dependencies: - numba=0.57.1=py311h96b013e_0 - numpy=1.24.4=py311h64a7726_0 - openjpeg=2.5.0=hfec8fc6_2 - - openssl=3.1.2=hd590300_0 + - openssl - orc=1.9.0=h385abfd_1 - packaging=23.1=pyhd8ed1ab_0 - pandas=1.5.3=py311h2872171_1 @@ -181,7 +181,7 @@ dependencies: - pysocks=1.7.1=pyha2e5f31_6 - pytest=7.4.0=pyhd8ed1ab_0 - pytest-cov=4.1.0=pyhd8ed1ab_0 - - python=3.11.4=hab00c5b_0_cpython + - python=3.11 - python-dateutil=2.8.2=pyhd8ed1ab_0 - python-eccodes=1.6.0=py311h1f0f07a_0 - python-tzdata=2023.3=pyhd8ed1ab_0 From b6830cdd14edc6ea2acc892903062493e0b117cf Mon Sep 17 00:00:00 2001 From: sotis-am <35930547+sotis-am@users.noreply.github.com> Date: Wed, 17 Jan 2024 13:09:45 +0100 Subject: [PATCH 05/14] Update python-package-conda.yml --- .github/workflows/python-package-conda.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/python-package-conda.yml b/.github/workflows/python-package-conda.yml index decbadd..f1887c9 100644 --- a/.github/workflows/python-package-conda.yml +++ b/.github/workflows/python-package-conda.yml @@ -20,7 +20,10 @@ jobs: echo $CONDA/bin >> $GITHUB_PATH - name: Install dependencies run: | - conda env update --file environment_dev.yml --name base + conda env create --file environment_dev.yml + - name: Activate environment + run: | + conda activate unimodel-dev - name: Lint with flake8 run: | conda install flake8 From 6a1d49ecf4effa5ac9afb1cac248d54f9eb4ac9c Mon Sep 17 00:00:00 2001 From: sotis-am <35930547+sotis-am@users.noreply.github.com> Date: Wed, 17 Jan 2024 13:15:30 +0100 Subject: [PATCH 06/14] Update python-package-conda.yml - py 3.11.4 --- .github/workflows/python-package-conda.yml | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/.github/workflows/python-package-conda.yml b/.github/workflows/python-package-conda.yml index f1887c9..356b5c3 100644 --- a/.github/workflows/python-package-conda.yml +++ b/.github/workflows/python-package-conda.yml @@ -10,20 +10,17 @@ jobs: steps: - uses: actions/checkout@v3 - - name: Set up Python 3.11 + - name: Set up Python 3.11.4 uses: actions/setup-python@v4 with: - python-version: '3.11' + python-version: '3.11.4' - name: Add conda to system path run: | # $CONDA is an environment variable pointing to the root of the miniconda directory echo $CONDA/bin >> $GITHUB_PATH - name: Install dependencies run: | - conda env create --file environment_dev.yml - - name: Activate environment - run: | - conda activate unimodel-dev + conda env update --file environment_dev.yml --name base - name: Lint with flake8 run: | conda install flake8 From d982b730d13139a68a938e312ce53a307bafd353 Mon Sep 17 00:00:00 2001 From: Sotiris Date: Wed, 17 Jan 2024 13:20:51 +0100 Subject: [PATCH 07/14] update env_dev.yml --- environment_dev.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/environment_dev.yml b/environment_dev.yml index 40ecea0..f9a2906 100644 --- a/environment_dev.yml +++ b/environment_dev.yml @@ -97,7 +97,7 @@ dependencies: - libbrotlienc=1.0.9=h166bdaf_9 - libcblas=3.9.0=17_linux64_openblas - libcrc32c=1.1.2=h9c3ff4c_0 - - libcurl + - libcurl=8.2.1=hca28451_0 - libdeflate=1.18=h0b41bf4_0 - libedit=3.1.20191231=he28a2e2_2 - libev=4.33=h516909a_1 @@ -154,7 +154,7 @@ dependencies: - numba=0.57.1=py311h96b013e_0 - numpy=1.24.4=py311h64a7726_0 - openjpeg=2.5.0=hfec8fc6_2 - - openssl + - openssl=3.1.2=hd590300_0 - orc=1.9.0=h385abfd_1 - packaging=23.1=pyhd8ed1ab_0 - pandas=1.5.3=py311h2872171_1 @@ -181,7 +181,7 @@ dependencies: - pysocks=1.7.1=pyha2e5f31_6 - pytest=7.4.0=pyhd8ed1ab_0 - pytest-cov=4.1.0=pyhd8ed1ab_0 - - python=3.11 + - python=3.11.4=hab00c5b_0_cpython - python-dateutil=2.8.2=pyhd8ed1ab_0 - python-eccodes=1.6.0=py311h1f0f07a_0 - python-tzdata=2023.3=pyhd8ed1ab_0 From a1ec31255039ceda2ddee66e0237c661d8beaef7 Mon Sep 17 00:00:00 2001 From: sotis-am <35930547+sotis-am@users.noreply.github.com> Date: Wed, 17 Jan 2024 13:51:38 +0100 Subject: [PATCH 08/14] Update python-package-conda.yml --- .github/workflows/python-package-conda.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/python-package-conda.yml b/.github/workflows/python-package-conda.yml index 356b5c3..16f8faa 100644 --- a/.github/workflows/python-package-conda.yml +++ b/.github/workflows/python-package-conda.yml @@ -20,7 +20,11 @@ jobs: echo $CONDA/bin >> $GITHUB_PATH - name: Install dependencies run: | - conda env update --file environment_dev.yml --name base + conda env create --file environment_dev.yml + - name: Init and activate environment + run: | + conda init + conda activate unimodel-dev - name: Lint with flake8 run: | conda install flake8 From 4c06532d4253dc72f3746c5689da714a600b29eb Mon Sep 17 00:00:00 2001 From: sotis-am <35930547+sotis-am@users.noreply.github.com> Date: Wed, 17 Jan 2024 13:54:50 +0100 Subject: [PATCH 09/14] Update python-package-conda.yml --- .github/workflows/python-package-conda.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/python-package-conda.yml b/.github/workflows/python-package-conda.yml index 16f8faa..b700efd 100644 --- a/.github/workflows/python-package-conda.yml +++ b/.github/workflows/python-package-conda.yml @@ -21,9 +21,11 @@ jobs: - name: Install dependencies run: | conda env create --file environment_dev.yml - - name: Init and activate environment + - name: Init conda run: | conda init + - name: Activate environment + run: | conda activate unimodel-dev - name: Lint with flake8 run: | From bd706c7c23161952d004e39e275c4c8de7ab8f80 Mon Sep 17 00:00:00 2001 From: sotis-am <35930547+sotis-am@users.noreply.github.com> Date: Wed, 17 Jan 2024 14:02:43 +0100 Subject: [PATCH 10/14] Update python-package-conda.yml --- .github/workflows/python-package-conda.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/python-package-conda.yml b/.github/workflows/python-package-conda.yml index b700efd..9709cda 100644 --- a/.github/workflows/python-package-conda.yml +++ b/.github/workflows/python-package-conda.yml @@ -5,6 +5,10 @@ on: [pull_request] jobs: build-linux: runs-on: ubuntu-latest + # To activate environment: https://stackoverflow.com/a/72735253/11135165 + defaults: + run: + shell: bash -el {0} strategy: max-parallel: 5 @@ -21,9 +25,6 @@ jobs: - name: Install dependencies run: | conda env create --file environment_dev.yml - - name: Init conda - run: | - conda init - name: Activate environment run: | conda activate unimodel-dev From cabe3950ad0d7d2cc6530410e8fc73855946b674 Mon Sep 17 00:00:00 2001 From: sotis-am <35930547+sotis-am@users.noreply.github.com> Date: Wed, 17 Jan 2024 14:05:43 +0100 Subject: [PATCH 11/14] Update python-package-conda.yml --- .github/workflows/python-package-conda.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/python-package-conda.yml b/.github/workflows/python-package-conda.yml index 9709cda..56765e6 100644 --- a/.github/workflows/python-package-conda.yml +++ b/.github/workflows/python-package-conda.yml @@ -25,6 +25,9 @@ jobs: - name: Install dependencies run: | conda env create --file environment_dev.yml + - name: Init conda + run: | + conda init - name: Activate environment run: | conda activate unimodel-dev From 5d425bb1529e047ae3e4a2b5707c7bcfdebb4b58 Mon Sep 17 00:00:00 2001 From: sotis-am <35930547+sotis-am@users.noreply.github.com> Date: Wed, 17 Jan 2024 14:13:38 +0100 Subject: [PATCH 12/14] Update python-package-conda.yml --- .github/workflows/python-package-conda.yml | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/.github/workflows/python-package-conda.yml b/.github/workflows/python-package-conda.yml index 56765e6..d1cd22b 100644 --- a/.github/workflows/python-package-conda.yml +++ b/.github/workflows/python-package-conda.yml @@ -14,10 +14,17 @@ jobs: steps: - uses: actions/checkout@v3 - - name: Set up Python 3.11.4 - uses: actions/setup-python@v4 + - uses: conda-incubator/setup-miniconda@v3 with: - python-version: '3.11.4' + activate-environment: anaconda-client-env + environment-file: etc/example-environment.yml + python-version: 3.11.4 + condarc-file: etc/example-condarc.yml + auto-activate-base: false + #- name: Set up Python 3.11.4 + # uses: actions/setup-python@v4 + # with: + # python-version: '3.11.4' - name: Add conda to system path run: | # $CONDA is an environment variable pointing to the root of the miniconda directory From 85743765c51dcd65c77870f037b7c9305812c46c Mon Sep 17 00:00:00 2001 From: sotis-am <35930547+sotis-am@users.noreply.github.com> Date: Wed, 17 Jan 2024 14:14:49 +0100 Subject: [PATCH 13/14] Update python-package-conda.yml --- .github/workflows/python-package-conda.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-package-conda.yml b/.github/workflows/python-package-conda.yml index d1cd22b..7d5159c 100644 --- a/.github/workflows/python-package-conda.yml +++ b/.github/workflows/python-package-conda.yml @@ -13,7 +13,7 @@ jobs: max-parallel: 5 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: conda-incubator/setup-miniconda@v3 with: activate-environment: anaconda-client-env From 5a5ebb16e49a1b8550c0644b9a1511c64cf992f9 Mon Sep 17 00:00:00 2001 From: sotis-am <35930547+sotis-am@users.noreply.github.com> Date: Wed, 17 Jan 2024 14:18:11 +0100 Subject: [PATCH 14/14] Update python-package-conda.yml --- .github/workflows/python-package-conda.yml | 23 +++++++++++----------- 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/.github/workflows/python-package-conda.yml b/.github/workflows/python-package-conda.yml index 7d5159c..013b69d 100644 --- a/.github/workflows/python-package-conda.yml +++ b/.github/workflows/python-package-conda.yml @@ -16,10 +16,9 @@ jobs: - uses: actions/checkout@v4 - uses: conda-incubator/setup-miniconda@v3 with: - activate-environment: anaconda-client-env - environment-file: etc/example-environment.yml + activate-environment: unimodel-dev + environment-file: environment_dev.yml python-version: 3.11.4 - condarc-file: etc/example-condarc.yml auto-activate-base: false #- name: Set up Python 3.11.4 # uses: actions/setup-python@v4 @@ -29,15 +28,15 @@ jobs: run: | # $CONDA is an environment variable pointing to the root of the miniconda directory echo $CONDA/bin >> $GITHUB_PATH - - name: Install dependencies - run: | - conda env create --file environment_dev.yml - - name: Init conda - run: | - conda init - - name: Activate environment - run: | - conda activate unimodel-dev + # - name: Install dependencies + # run: | + # conda env create --file environment_dev.yml + # - name: Init conda + # run: | + # conda init + # - name: Activate environment + # run: | + # conda activate unimodel-dev - name: Lint with flake8 run: | conda install flake8