Skip to content

Commit

Permalink
248 coarse dems ignored if no lidar (#249)
Browse files Browse the repository at this point in the history
* Ensure an empty initialised DEM with no LiDAR is all NaN's

* Added check for if no LiDAR files in dataset to ignore the dataset.

* Fixup: Format Python code with Black

* Update version

---------

Co-authored-by: github-actions <[email protected]>
  • Loading branch information
rosepearson and github-actions authored Feb 29, 2024
1 parent 2e07142 commit ad0901b
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 8 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "geofabrics"
version = "1.1.12"
version = "1.1.13"
description = "A package for creating geofabrics for flood modelling."
readme = "README.md"
authors = [{ name = "Rose pearson", email = "[email protected]" }]
Expand Down
21 changes: 15 additions & 6 deletions src/geofabrics/dem.py
Original file line number Diff line number Diff line change
Expand Up @@ -1161,7 +1161,7 @@ def _check_valid_inputs(self, lidar_datasets_info):
A dictionary of dictionaties of LiDAR dataset information. The CRS, list of
LAS files, and tile index file are included for each dataset.
"""

empty_datasets = []
for dataset_name in lidar_datasets_info:
# Check the source_crs is valid
source_crs = lidar_datasets_info[dataset_name]["crs"]
Expand All @@ -1178,9 +1178,12 @@ def _check_valid_inputs(self, lidar_datasets_info):
)
# Check some LiDAR files are specified
lidar_files = lidar_datasets_info[dataset_name]["file_paths"]
assert len(lidar_files) >= 1, (
"There are no LiDAR files specified in dataset: " f"{dataset_name}"
)
if len(lidar_files) == 0:
self.logger.warning(
f"Ignoring LiDAR dataset {dataset_name} as there are no LiDAR files within the ROI."
)
empty_datasets.append(dataset_name)
continue
# Check for valid combination of chunk_size, lidar_files and tile_index_file
if self.chunk_size is None:
assert len(lidar_files) == 1, (
Expand All @@ -1196,6 +1199,10 @@ def _check_valid_inputs(self, lidar_datasets_info):
"A tile index file must be provided if chunking is "
f"defined for {dataset_name}"
)
# Re move a dataset if no LIDAR
if len(empty_datasets) > 0:
for empty_dataset in empty_datasets:
lidar_datasets_info.pop(empty_dataset)
# There should only be one dataset if there is no chunking information
if self.chunk_size is None:
assert len(lidar_datasets_info) == 1, (
Expand Down Expand Up @@ -1446,8 +1453,10 @@ def add_lidar(
"no LiDAR": self.SOURCE_CLASSIFICATION["no data"]
}
elevations = {
"no LiDAR": dask.array.empty(
shape=(len(y), len(x)), dtype=raster_options["raster_type"]
"no LiDAR": dask.array.full(
fill_value=numpy.nan,
shape=(len(y), len(x)),
dtype=raster_options["raster_type"],
)
}
dem = self._create_data_set(
Expand Down
2 changes: 1 addition & 1 deletion src/geofabrics/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@
Contains the package version information
"""

__version__ = "1.1.12"
__version__ = "1.1.13"

0 comments on commit ad0901b

Please sign in to comment.