From 2cafc87330cbfb4d8c8f801da2035ff59df061fa Mon Sep 17 00:00:00 2001 From: George Breyiannis Date: Wed, 24 Jul 2024 16:38:23 +0200 Subject: [PATCH] tests: fix-updates Work towards tests with schism-5.12rc1 --- pyposeidon/dem_tools.py | 10 +- pyposeidon/mgmsh.py | 7 +- pyposeidon/misc/param.nml | 5 + pyposeidon/schism.py | 6 ++ tests/test_mesh_global.py | 13 +-- tests/test_schism_cast.py | 7 +- tests/test_schism_reforecast.py | 170 -------------------------------- 7 files changed, 32 insertions(+), 186 deletions(-) delete mode 100644 tests/test_schism_reforecast.py diff --git a/pyposeidon/dem_tools.py b/pyposeidon/dem_tools.py index 01de4ea..9c7134d 100644 --- a/pyposeidon/dem_tools.py +++ b/pyposeidon/dem_tools.py @@ -810,11 +810,11 @@ def scale_dem(b, res_min, res_max, **kwargs): return nodes -def make_bgmesh(df, fpos, dem=None, scale=True, **kwargs): - lon_min = df.bounds.minx.min() - lon_max = df.bounds.maxx.max() - lat_min = df.bounds.miny.min() - lat_max = df.bounds.maxy.max() +def make_bgmesh(dc, fpos, dem=None, scale=True, **kwargs): + lon_min = dc.bounds.minx.min() + lon_max = dc.bounds.maxx.max() + lat_min = dc.bounds.miny.min() + lat_max = dc.bounds.maxy.max() kwargs_ = kwargs.copy() kwargs_.pop("lon_min", None) diff --git a/pyposeidon/mgmsh.py b/pyposeidon/mgmsh.py index f74514c..84a5448 100644 --- a/pyposeidon/mgmsh.py +++ b/pyposeidon/mgmsh.py @@ -591,10 +591,13 @@ def to_geo(df, **kwargs): f.write("Physical Surface(1) = {2};\n") f.write("Field[1] = Distance;\n") + curve_list = np.arange(ltag + 1) if not gglobal: - curve_list = [x for x in np.arange(1, ltag + 1) if x not in loops] + idx = [0] + loops + curve_list = np.delete(curve_list, idx) else: - curve_list = [x for x in np.arange(2, ltag + 1) if x not in loops] + idx = [0, 1] + loops + curve_list = np.delete(curve_list, idx) if not bspline: f.write("Field[1].CurvesList = {{{}}};\n".format(",".join(map(str, curve_list)))) diff --git a/pyposeidon/misc/param.nml b/pyposeidon/misc/param.nml index 69ffe1a..83878b8 100644 --- a/pyposeidon/misc/param.nml +++ b/pyposeidon/misc/param.nml @@ -502,6 +502,10 @@ drampwind = 0. !needed if nrampwind/=0; ramp-up period in days ! iwindoff = 0 !needed only if nws/=0; '1': needs windfactor.gr3 iwind_form = -1 !needed if nws/=0 + !If impose_net_flux/=0 and nws=2, read in net _surface_ heat flux as var 'dlwrf' + !(Downward Long Wave) in sflux_rad (solar radiation is still used separately), + !and if PREC_EVAP is on, also read in net P-E as 'prate' (Surface Precipitation Rate) in sflux_prc. + ! impose_net_flux = 0 !----------------------------------------------------------------------- ! Heat and salt exchange. isconsv=1 needs ihconsv=1; ihconsv=1 needs nws=2. @@ -650,6 +654,7 @@ ! If USE_MARSH is on and isav=1, all .gr3 must have constant depths! !---------------------------------------------------------------------- iveg = 0 !on/off flag +! isav = 0 !on/off flag !---------------------------------------------------------------------- ! Coupling step with ICE module. diff --git a/pyposeidon/schism.py b/pyposeidon/schism.py index ea76804..0c4f358 100644 --- a/pyposeidon/schism.py +++ b/pyposeidon/schism.py @@ -1335,6 +1335,12 @@ def results(self, **kwargs): # fix fortran/python index x2d["SCHISM_hgrid_face_nodes"][:, :3] = x2d["SCHISM_hgrid_face_nodes"].values[:, :3] - 1 # set time to Datetime + if x2d.time.dtype == "float64": + times = pd.to_datetime(x2d.time.values, unit="s", origin=sdate.tz_convert(None)) + else: + times = pd.to_datetime(x2d.time) + + x2d = x2d.assign_coords({"time": ("time", times, x2d.time.attrs)}) logger.info("get combined 3D netcdf files \n") diff --git a/tests/test_mesh_global.py b/tests/test_mesh_global.py index f7164b8..d79dd64 100644 --- a/tests/test_mesh_global.py +++ b/tests/test_mesh_global.py @@ -15,17 +15,13 @@ @pytest.mark.parametrize("ggor", ["jigsaw", "gmsh"]) @pytest.mark.parametrize("bgmesh", [None, DEM_FILE]) @pytest.mark.parametrize("bindings", [True, False]) -def test_io(pytestconfig, tmpdir, ggor, bgmesh, bindings): +@pytest.mark.parametrize("cbuffer", [None, 0.001]) +def test_io(pytestconfig, tmpdir, ggor, bgmesh, bindings, cbuffer): # Skip the test unless --runslow has been passed if bgmesh is not None: if not pytestconfig.getoption("--runslow"): pytest.skip("slow test") - if ggor == "jigsaw": - cb = 0.001 - else: - cb = None - mesh = pmesh.set( type="tri2d", geometry="global", @@ -34,7 +30,7 @@ def test_io(pytestconfig, tmpdir, ggor, bgmesh, bindings): mesh_generator=ggor, dem_source=bgmesh, use_bindings=bindings, - cbuffer=cb, + cbuffer=cbuffer, ) # save to file @@ -62,9 +58,10 @@ def test_io(pytestconfig, tmpdir, ggor, bgmesh, bindings): @pytest.mark.schism -@pytest.mark.parametrize("ggor,cbuffer", [("jigsaw", 0.001), ("gmsh", None)]) +@pytest.mark.parametrize("ggor", ["jigsaw", "gmsh"]) @pytest.mark.parametrize("bgmesh", [None, DEM_FILE]) @pytest.mark.parametrize("bindings", [True, False]) +@pytest.mark.parametrize("cbuffer", [None, 0.001]) def test_validate(pytestconfig, tmpdir, ggor, cbuffer, bgmesh, bindings): if bgmesh is not None: if not pytestconfig.getoption("--runslow"): diff --git a/tests/test_schism_cast.py b/tests/test_schism_cast.py index 12a16af..ccc00d6 100644 --- a/tests/test_schism_cast.py +++ b/tests/test_schism_cast.py @@ -140,7 +140,11 @@ def test_schism_cast(tmpdir, copy): @pytest.mark.schism -def test_schism_cast_workflow(tmpdir): +@pytest.mark.parametrize( + "hotstart", + [1, 2], +) +def test_schism_cast_workflow(tmpdir, hotstart): # initialize a model rpath = str(tmpdir) + "/schism/" test_case.update({"rpath": rpath + "20181001.00/"}) # use tmpdir for running the model @@ -177,6 +181,7 @@ def test_schism_cast_workflow(tmpdir): cpath=rpaths[l + 1], meteo=meteo[l + 1], sdate=date_list[l + 1], + ihot=hotstart, ) h.run(execute=True) # execute diff --git a/tests/test_schism_reforecast.py b/tests/test_schism_reforecast.py deleted file mode 100644 index 3418191..0000000 --- a/tests/test_schism_reforecast.py +++ /dev/null @@ -1,170 +0,0 @@ -import pytest -import pyposeidon -from pyposeidon.utils import cast, data -import pyposeidon.meteo as pm -import json -import pandas as pd -import datetime -import os -import numpy as np -import xarray as xr - -from . import DATA_DIR - - -MESH_FILE = (DATA_DIR / "hgrid.gr3").as_posix() -DEM_FILE = (DATA_DIR / "dem.nc").as_posix() -METEO_FILES_1 = [(DATA_DIR / "uvp_2018100100.grib").as_posix()] -METEO_FILES_2 = [ - (DATA_DIR / name).as_posix() - for name in ( - "uvp_2018100100.grib", - "uvp_2018100112.grib", - "uvp_2018100200.grib", - "uvp_2018100212.grib", - ) -] - - -# define in a dictionary the properties of the model.. -case = { - "solver_name": "schism", - "mesh_file": MESH_FILE, - "manning": 0.12, - "windrot": 0.00001, - "tag": "schism", - "start_date": "2018-10-1 0:0:0", - "time_frame": "12h", - "dem_source": DEM_FILE, - "meteo_source": METEO_FILES_1, - "meteo_merge": "first", # combine meteo - "meteo_combine_by": "nested", - "meteo_xr_kwargs": {"concat_dim": "step"}, - "update": ["all"], # update only meteo, keep dem - "parameters": { - "dt": 400, - "rnday": 0.5, - "nhot": 1, - "ihot": 0, - "nspool": 9, - "ihfskip": 36, - "nhot_write": 108, - }, - "scribes": 1, -} - - -# define in a dictionary the properties of the model.. -check = { - "solver_name": "schism", - "mesh_file": MESH_FILE, - "manning": 0.12, - "windrot": 0.00001, - "tag": "schism", - "start_date": "2018-10-1 0:0:0", - "time_frame": "36h", - "dem_source": DEM_FILE, - "update": ["all"], # update only meteo, keep dem - "parameters": { - "dt": 400, - "rnday": 1.5, - "nhot": 0, - "ihot": 0, - "nspool": 9, - "ihfskip": 36, - "nhot_write": 108, - }, - "scribes": 1, -} - - -@pytest.mark.schism -def test_schism_reforecast_workflow(tmpdir): - # initialize a model - rpath = str(tmpdir) + "/schism/" - case.update({"rpath": rpath + "20181001.00/"}) # use tmpdir for running the model - - b = pyposeidon.model.set(**case) - - b.execute() - - # creating a time sequence of the runs - start_date = pd.to_datetime("2018-10-1 0:0:0") - end_date = pd.to_datetime("2018-10-2 0:0:0") - date_list = pd.date_range(start_date, end_date, freq="12h") - - # creating a sequence of folder to store the runs. In this case we name them after the date attribute. - # NOTE that the first folder is the fisrt run already perfomed!! - rpaths = [rpath + datetime.datetime.strftime(x, "%Y%m%d.%H") + "/" for x in date_list] - - # creating a sequence of folder from which we read the meteo. - meteo = [] - for date in date_list: - prev_date = pd.to_datetime(date) - pd.to_timedelta("12h") - prev_date = prev_date.strftime(format="%Y-%m-%d %H:%M:%S") - dr = pd.date_range(prev_date, date, freq="12h") - names = ["uvp_" + datetime.datetime.strftime(x, "%Y%m%d%H") + ".grib" for x in dr] - dur = [(DATA_DIR / name).as_posix() for name in names] - meteo.append(dur) - - # set cast - for l in range(len(rpaths) - 1): - h = cast.set( - solver_name="schism", - model=b, - ppath=rpaths[l], - cpath=rpaths[l + 1], - meteo=meteo[l + 1], - sdate=date_list[l + 1], - ) - h.run(execute=True) # execute - - # Run check case - Total duration - check.update({"rpath": rpath + "check/"}) # use tmpdir for running the model - - # Combine meteo appropriately - - m1 = pm.Meteo(meteo_source=METEO_FILES_2[0]) - m2 = pm.Meteo(meteo_source=METEO_FILES_2[1]) - m3 = pm.Meteo(meteo_source=METEO_FILES_2[2]) - m4 = pm.Meteo(meteo_source=METEO_FILES_2[3]) - - # extract correct chunk - - w1 = m1.Dataset.isel(time=slice(0, 13)) - w2 = m2.Dataset.isel(time=slice(1, 13)) # note that we keep the 12 hour from the previous file - w3 = m3.Dataset.isel(time=slice(1, 13)) - w4 = m4.Dataset.isel(time=slice(1, 13)) - - # combine - meteo = xr.combine_by_coords([w1, w2, w3, w4], combine_attrs="override") - # saving - check.update({"meteo_source": meteo}) - - c = pyposeidon.model.set(**check) - - c.execute() - - # COMPARE - output = data.get_output(folders=rpaths, solver_name="schism") - - total = data.get_output(folders=[rpath + "check/"], solver_name="schism") - - r = output.Dataset.isel(time=slice(0, 36)) - - rb = [] - for var in total.Dataset.data_vars: - if not total.Dataset[var].equals(r[var]): - rb.append(var) - - # print(rb) - - # flag = True TODO - # for var in rb: - # flag = False - # mdif = np.abs(total.results.Dataset[var].values - output.results.Dataset[var].values).max() - # if mdif < 1.e-14 : - # flag = True - # print(mdif) - # expected = ["wetdry_side", "wetdry_elem", "wetdry_node", "zcor", "elev", "hvel"] - assert (rb == ["zcor"]) or (rb == [])