Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

60 observed flow and stage data #61

Merged
merged 25 commits into from
Nov 26, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
42a3e20
added functions for retrieving input observed data from the hdf
zherbz Oct 23, 2024
e4d9469
updated doc strings for new function
zherbz Oct 23, 2024
43b7523
added parameters within doc string
zherbz Oct 23, 2024
9b1f0ae
updated activation instructions for windows users
zherbz Nov 5, 2024
b581e9c
revised function observed_timeseries_input
zherbz Nov 5, 2024
c49baac
added tests for observed_timeseries_input
zherbz Nov 5, 2024
4be57a1
new test hdf and csv data for testing the observed_timeseries_input f…
zherbz Nov 5, 2024
9f2537a
updated for devcontainer users
zherbz Nov 7, 2024
6a7c443
added fiona==1.9.6 as a dependency in order for all plan tests to suc…
zherbz Nov 7, 2024
fb23af9
added a devcontainer folder for those using Windows OS systems needin…
zherbz Nov 7, 2024
a3dda6f
replaced black-formatted and pylint with charliermash.ruff
zherbz Nov 20, 2024
b19acc5
updated for documentation on venv activation across platforms
zherbz Nov 20, 2024
164911a
moved the pinned version of Fiona into the list of optional dependencies
zherbz Nov 20, 2024
ffc1f1b
removed redundant methods and updated bytes conversion to use utils …
zherbz Nov 20, 2024
728b661
updated test functions related to observed hdf data reading
zherbz Nov 20, 2024
6443edb
updated test csv datasets related to observed hdf function testing
zherbz Nov 20, 2024
f448399
updated mac/linux env install docs
zherbz Nov 25, 2024
93fdadd
removed parse_ras_datetime import, and instead defaulting to stage un…
zherbz Nov 25, 2024
1ef65f2
added tests for value error and rasplanhdf error related to observed …
zherbz Nov 25, 2024
fe0421a
updated observed_timeseries_input to instead return an xarray dataset…
zherbz Nov 25, 2024
6cd4371
updated tests with new function features
zherbz Nov 25, 2024
9d6ea25
updated test datasets now with added dims for columns
zherbz Nov 25, 2024
5c9be9c
renamed the col Date to time for consistency with other methods in li…
zherbz Nov 26, 2024
59b1a8f
renamed the col Date to time for consistency with other methods in li…
zherbz Nov 26, 2024
5086776
renamed the col Date to time for consistency with other methods in li…
zherbz Nov 26, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .devcontainer/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
FROM mcr.microsoft.com/devcontainers/base:jammy
27 changes: 27 additions & 0 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
{
"name": "rashdf-devcontainer",
"build": {
"dockerfile": "Dockerfile",
"context": ".."
},
"features": {
"ghcr.io/devcontainers/features/git:1": {},
"ghcr.io/devcontainers/features/python:1":{"version":"3.12"}
},
"mounts": [],
"customizations": {
"vscode": {
"extensions": [
"ms-python.python",
"charliermarsh.ruff",
"GitHub.copilot"
],
"settings": {
"python.defaultInterpreterPath": "/opt/conda/envs/rashdf/bin/python"
}
}
},
// avoid dubious ownership of the workspace folder https://www.kenmuse.com/blog/avoiding-dubious-ownership-in-dev-containers/
"postStartCommand": "git config --global --add safe.directory ${containerWorkspaceFolder}",
"postCreateCommand": "sudo chown -R vscode:vscode ${containerWorkspaceFolder}"
}
6 changes: 5 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -110,8 +110,12 @@ $ python -m venv venv-rashdf

Activate the virtual environment:
```
$ source ./venv/bin/activate
# For macOS/Linux
$ source ./venv-rashdf/bin/activate
(venv-rashdf) $

# For Windows
> ./venv-rashdf/Scripts/activate
```

Install dev dependencies:
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ version = "0.6.0"
dependencies = ["h5py", "geopandas>=1.0,<2.0", "pyarrow", "xarray"]

[project.optional-dependencies]
dev = ["pre-commit", "ruff", "pytest", "pytest-cov", "fiona", "kerchunk", "zarr", "dask", "fsspec", "s3fs"]
dev = ["pre-commit", "ruff", "pytest", "pytest-cov", "kerchunk", "zarr", "dask", "fsspec", "s3fs", "fiona==1.9.6"]
docs = ["sphinx", "numpydoc", "sphinx_rtd_theme"]

[project.urls]
Expand Down
72 changes: 72 additions & 0 deletions src/rashdf/plan.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
ras_timesteps_to_datetimes,
parse_ras_datetime_ms,
deprecated,
convert_ras_hdf_value,
)

from geopandas import GeoDataFrame
Expand Down Expand Up @@ -156,6 +157,7 @@ class RasPlanHdf(RasGeomHdf):
PLAN_INFO_PATH = "Plan Data/Plan Information"
PLAN_PARAMS_PATH = "Plan Data/Plan Parameters"
PRECIP_PATH = "Event Conditions/Meteorology/Precipitation"
OBS_DATA_PATH = "Event Conditions/Observed Data"
RESULTS_UNSTEADY_PATH = "Results/Unsteady"
RESULTS_UNSTEADY_SUMMARY_PATH = f"{RESULTS_UNSTEADY_PATH}/Summary"
VOLUME_ACCOUNTING_PATH = f"{RESULTS_UNSTEADY_PATH}/Volume Accounting"
Expand All @@ -166,6 +168,8 @@ class RasPlanHdf(RasGeomHdf):
UNSTEADY_TIME_SERIES_PATH = f"{BASE_OUTPUT_PATH}/Unsteady Time Series"
REFERENCE_LINES_OUTPUT_PATH = f"{UNSTEADY_TIME_SERIES_PATH}/Reference Lines"
REFERENCE_POINTS_OUTPUT_PATH = f"{UNSTEADY_TIME_SERIES_PATH}/Reference Points"
OBS_FLOW_OUTPUT_PATH = f"{OBS_DATA_PATH}/Flow"
OBS_STAGE_OUTPUT_PATH = f"{OBS_DATA_PATH}/Stage"

RESULTS_STEADY_PATH = "Results/Steady"
BASE_STEADY_PATH = f"{RESULTS_STEADY_PATH}/Output/Output Blocks/Base Output"
Expand Down Expand Up @@ -1117,6 +1121,74 @@ def reference_lines_timeseries_output(self) -> xr.Dataset:
"""
return self.reference_timeseries_output(reftype="lines")

def observed_timeseries_input(self, vartype: str = "Flow") -> dict:
"""Return observed timeseries input data for reference lines and points from a HEC-RAS HDF plan file.

Parameters
----------
vartype : str, optional
The type of observed data to retrieve. Must be either "Flow" or "Stage".
(default: "Flow")

Returns
-------
xr.Dataset
An xarray Dataset with observed timeseries input data for both reference lines and reference points.
"""
if vartype == "Flow":
output_path = self.OBS_FLOW_OUTPUT_PATH
elif vartype == "Stage":
output_path = self.OBS_STAGE_OUTPUT_PATH
else:
raise ValueError('vartype must be either "Flow" or "Stage".')

observed_group = self.get(output_path)
if observed_group is None:
raise RasPlanHdfError(
f"Could not find HDF group at path '{output_path}'."
f" Does the Plan HDF file contain reference {vartype} output data?"
)
if "Attributes" in observed_group.keys():
attr_path = observed_group["Attributes"]
attrs_df = pd.DataFrame(attr_path[:]).map(convert_ras_hdf_value)

das = {}
thwllms marked this conversation as resolved.
Show resolved Hide resolved
for idx, site in enumerate(observed_group.keys()):
if site != "Attributes":
# Site Ex: 'Ref Point: Grapevine_Lake_RP'
site_path = observed_group[site]
site_name = site.split(":")[1][1:] # Grapevine_Lake_RP
ref_type = site.split(":")[0] # Ref Point
if ref_type == "Ref Line":
ref_type = "refln"
else:
ref_type = "refpt"
df = pd.DataFrame(site_path[:]).map(convert_ras_hdf_value)
# rename Date to time
df = df.rename(columns={"Date": "time"})
# Ensure the Date index is unique
df = df.drop_duplicates(subset="time")
# Package into an 1D xarray DataArray
values = df["Value"].values
times = df["time"].values
da = xr.DataArray(
values,
name=vartype,
dims=["time"],
coords={
"time": times,
},
attrs={
"hdf_path": f"{output_path}/{site}",
},
)
# Expand dimensions to add additional coordinates
da = da.expand_dims({f"{ref_type}_id": [idx - 1]})
da = da.expand_dims({f"{ref_type}_name": [site_name]})
das[site_name] = da
das = xr.concat([das[site] for site in das.keys()], dim="time")
return das

def reference_points_timeseries_output(self) -> xr.Dataset:
"""Return timeseries output data for reference points from a HEC-RAS HDF plan file.

Expand Down
Loading