diff --git a/pyposeidon/telemac.py b/pyposeidon/telemac.py index 9cb68b2..8733574 100644 --- a/pyposeidon/telemac.py +++ b/pyposeidon/telemac.py @@ -366,22 +366,23 @@ def write_meteo_selafin(outpath, input_atm_zarr): remove(input_atm_zarr) -def get_boundary_settings(boundary_type, glo_node, bnd_node): - settings = { - "lihbor": 5 if boundary_type == "open" else 2, - "liubor": 6 if boundary_type == "open" else 2, - "livbor": 6 if boundary_type == "open" else 2, - "hbor": 0.0, - "ubor": 0.0, - "vbor": 0.0, - "aubor": 0.0, - "litbor": 5 if boundary_type == "open" else 2, - "tbor": 0.0, - "atbor": 0.0, - "btbor": 0.0, - "nbor": glo_node + 1, - "k": bnd_node + 1, - } +def get_boundary_settings(boundary_type, glo_node, bnd_node, module): + if module in ["telemac2d", "telemac3d", "tomawac"]: + settings = { + "lihbor": 5 if boundary_type == "open" else 2, + "liubor": 6 if boundary_type == "open" else 2, + "livbor": 6 if boundary_type == "open" else 2, + "hbor": 0.0, + "ubor": 0.0, + "vbor": 0.0, + "aubor": 0.0, + "litbor": 5 if boundary_type == "open" else 2, + "tbor": 0.0, + "atbor": 0.0, + "btbor": 0.0, + "nbor": glo_node + 1, + "k": bnd_node + 1, + } return settings @@ -456,7 +457,6 @@ def export_cli(ds: xr.Dataset, tel_path: str, outCli: str, tel_module: str = "te lines = [] bnd_node = 0 contours, contours_idx = extract_contour(ds, tel_path) - print(contours_idx) for bnd in contours_idx: poly_bnd = [] coord_bnd = [] @@ -475,7 +475,7 @@ def export_cli(ds: xr.Dataset, tel_path: str, outCli: str, tel_module: str = "te # If both adjacent nodes are not 'open', then bnd is closed if prev_boundary_type != "open" and next_boundary_type != "open": boundary_type = "Unknown" - boundary_settings = get_boundary_settings(boundary_type, glo_node, bnd_node) + boundary_settings = get_boundary_settings(boundary_type, glo_node, bnd_node, tel_module) keys_order = [ "lihbor", @@ -492,9 +492,8 @@ def export_cli(ds: xr.Dataset, tel_path: str, outCli: str, tel_module: str = "te "nbor", "k", ] - if tel_module == "telemac2d": - line = " ".join(str(boundary_settings[key]) for key in keys_order) - lines.append(line) + line = " ".join(str(boundary_settings[key]) for key in keys_order) + lines.append(line) bnd_node += 1 poly_bnd.append((coord_bnd, bnd)) domains_bnd.append(poly_bnd) @@ -1441,6 +1440,8 @@ def set_obs(self, **kwargs): df.index += 1 df["ind"] = df.index df["unique_id"] = tgn[id_str].values + if id_str != "unique_id": + df[id_str] = tgn[id_str].values df["depth"] = self.mesh.Dataset.depth[df.mesh_index.values] # convert to MERCATOR coordinates @@ -1462,6 +1463,7 @@ def set_obs(self, **kwargs): timestep=self.params["tstep"], offset=offset, ) + self.stations = os.path.join(path, "stations.csv") def get_output_data(self, **kwargs): dic = self.__dict__ @@ -1471,7 +1473,7 @@ def get_output_data(self, **kwargs): self.data = data.get_output(**dic) def get_station_obs_data(self, **kwargs): - self.station_obs_data = get_obs_data(self.obs, self.start_date, self.end_date) + self.station_obs_data = pd.read_csv(self.stations) def open_thalassa(self, **kwargs): # open a Thalassa instance to visualize the output diff --git a/pyposeidon/utils/data.py b/pyposeidon/utils/data.py index e24a777..3463c0c 100644 --- a/pyposeidon/utils/data.py +++ b/pyposeidon/utils/data.py @@ -220,7 +220,7 @@ def __init__(self, **kwargs): res_type = kwargs.get("result_type", "2D") convert = kwargs.get("convert_results", True) extract_TS = kwargs.get("extract_TS", False) - station_id_str = kwargs.get("id_str", "ioc_code") + id_str = kwargs.get("id_str", "ioc_code") max_dist = kwargs.get("max_dist", 1000) if res_type not in ["1D", "2D"]: @@ -307,28 +307,23 @@ def __init__(self, **kwargs): # export parquet time series if extract_TS: - if "stations_mesh_id" in p.__dict__: - if isinstance(p.stations_mesh_id, dict): - stations = pd.DataFrame(p.stations_mesh_id) - elif isinstance(p.stations_mesh_id, pd.DataFrame): - stations = p.stations_mesh_id - else: - raise ValueError("stations_mesh_id format not supported") + if "stations" in p.__dict__: + file = p.stations elif "stations.csv" in os.listdir(rpath): - stations = pd.read_csv(os.path.join(rpath, "stations.csv")) + file = os.path.join(rpath, "stations.csv") elif "obs" in self.__dict__: p.set_obs() - stations = p.stations_mesh_id + file = p.stations else: raise ValueError("no stations file info found") - + stations = pd.read_csv(file) logger.info("extracting parquet files from TELEMAC Selafin output \n") - for i_s, id_ in enumerate(stations[station_id_str]): - s = stations[stations[station_id_str] == id_] + for i_s, id_ in enumerate(stations[id_str]): + s = stations[stations[id_str] == id_] mod, mlon, mlat = extract_t_elev_2D( self.Dataset, - s.longitude.values[0], - s.latitude.values[0], + s.lon.values[0], + s.lat.values[0], var, model_xstr, model_ystr,