Skip to content

Commit

Permalink
Merge branch 'main' into osi_saf
Browse files Browse the repository at this point in the history
  • Loading branch information
mraspaud committed Nov 28, 2023
2 parents 00df29f + ed220be commit 5d6bbd9
Show file tree
Hide file tree
Showing 52 changed files with 185 additions and 151 deletions.
17 changes: 13 additions & 4 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -64,27 +64,36 @@ jobs:
- name: Install unstable dependencies
if: matrix.experimental == true
shell: bash -l {0}
# Install pykdtree with --no-build-isolation so it builds with numpy 2.0
# We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels
# may break the conda-forge libraries trying to use newer glibc versions
run: |
python -m pip install versioneer extension-helpers setuptools-scm configobj pkgconfig
python -m pip install \
--index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \
--trusted-host pypi.anaconda.org \
--no-deps --pre --upgrade \
matplotlib \
numpy \
pandas \
scipy; \
python -m pip install \
--no-deps --upgrade \
scipy
mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf netcdf4 h5py
python -m pip install --upgrade --no-deps --pre --no-build-isolation \
git+https://github.com/storpipfugl/pykdtree \
git+https://github.com/pytroll/pyresample \
git+https://github.com/pytroll/trollimage \
git+https://github.com/fhs/pyhdf \
git+https://github.com/takluyver/h5py@cython-3 \
git+https://github.com/Unidata/netcdf4-python \
git+https://github.com/dask/dask \
git+https://github.com/dask/distributed \
git+https://github.com/zarr-developers/zarr \
git+https://github.com/Unidata/cftime \
git+https://github.com/rasterio/rasterio \
git+https://github.com/pydata/bottleneck \
git+https://github.com/pydata/xarray \
git+https://github.com/astropy/astropy;
git+https://github.com/shapely/shapely \
git+https://github.com/astropy/astropy
LD_PRELOAD=$(python -c "import sys; print(sys.prefix)")/lib/libstdc++.so
echo "LD_PRELOAD=${LD_PRELOAD}" >> $GITHUB_ENV
Expand Down
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ repos:
- id: bandit
args: [--ini, .bandit]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: 'v1.7.0' # Use the sha / tag you want to point at
rev: 'v1.7.1' # Use the sha / tag you want to point at
hooks:
- id: mypy
additional_dependencies:
Expand Down
2 changes: 1 addition & 1 deletion doc/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def __getattr__(cls, name):

# General information about the project.
project = u"Satpy"
copyright = u"2009-{}, The PyTroll Team".format(datetime.utcnow().strftime("%Y"))
copyright = u"2009-{}, The PyTroll Team".format(datetime.utcnow().strftime("%Y")) # noqa: A001

# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
Expand Down
4 changes: 4 additions & 0 deletions doc/source/doi_role.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@


def doi_role(typ, rawtext, text, lineno, inliner, options=None, content=None):
"""Create a doi role."""
if options is None:
options = {}
if content is None:
Expand All @@ -34,6 +35,7 @@ def doi_role(typ, rawtext, text, lineno, inliner, options=None, content=None):


def arxiv_role(typ, rawtext, text, lineno, inliner, options=None, content=None):
"""Create an arxive role."""
if options is None:
options = {}
if content is None:
Expand All @@ -48,12 +50,14 @@ def arxiv_role(typ, rawtext, text, lineno, inliner, options=None, content=None):


def setup_link_role(app):
"""Set up the role link."""
app.add_role("doi", doi_role, override=True)
app.add_role("DOI", doi_role, override=True)
app.add_role("arXiv", arxiv_role, override=True)
app.add_role("arxiv", arxiv_role, override=True)


def setup(app):
"""Set up the app."""
app.connect("builder-inited", setup_link_role)
return {"version": "0.1", "parallel_read_safe": True}
2 changes: 1 addition & 1 deletion doc/source/reader_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def rst_table_row(columns=None):
return row


def rst_table_header(name=None, header=None, header_rows=1, widths="auto"):
def rst_table_header(name=None, header=None, header_rows=1, widths="auto"): # noqa: D417
"""Create header for rst table.
Args:
Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ line_length = 120

[tool.ruff]
# See https://docs.astral.sh/ruff/rules/
# In the future, add "A", "B", "S", "N", "D"
select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"]
# In the future, add "B", "S", "N"
select = ["A", "D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"]
line-length = 120

[tool.ruff.per-file-ignores]
Expand Down
33 changes: 16 additions & 17 deletions satpy/composites/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwar
self.attrs = kwargs

@property
def id(self):
def id(self): # noqa: A003
"""Return the DataID of the object."""
try:
return self.attrs["_satpy_id"]
Expand Down Expand Up @@ -343,7 +343,7 @@ class CategoricalDataCompositor(CompositeBase):
res = [[20, 40, 30], [50, 30, 10]]
"""

def __init__(self, name, lut=None, **kwargs):
def __init__(self, name, lut=None, **kwargs): # noqa: D417
"""Get look-up-table used to recategorize data.
Args:
Expand Down Expand Up @@ -381,7 +381,7 @@ class GenericCompositor(CompositeBase):

modes = {1: "L", 2: "LA", 3: "RGB", 4: "RGBA"}

def __init__(self, name, common_channel_mask=True, **kwargs):
def __init__(self, name, common_channel_mask=True, **kwargs): # noqa: D417
"""Collect custom configuration values.
Args:
Expand Down Expand Up @@ -679,7 +679,7 @@ class DayNightCompositor(GenericCompositor):
of the image (night or day). See the documentation below for more details.
"""

def __init__(self, name, lim_low=85., lim_high=88., day_night="day_night", include_alpha=True, **kwargs):
def __init__(self, name, lim_low=85., lim_high=88., day_night="day_night", include_alpha=True, **kwargs): # noqa: D417
"""Collect custom configuration values.
Args:
Expand Down Expand Up @@ -713,9 +713,7 @@ def __call__(
datasets = self.match_data_arrays(datasets)
# At least one composite is requested.
foreground_data = datasets[0]

weights = self._get_coszen_blending_weights(datasets)

# Apply enhancements to the foreground data
foreground_data = enhance2dataset(foreground_data)

Expand Down Expand Up @@ -759,7 +757,6 @@ def _get_coszen_blending_weights(
# Calculate blending weights
coszen -= np.min((lim_high, lim_low))
coszen /= np.abs(lim_low - lim_high)

return coszen.clip(0, 1)

def _get_data_for_single_side_product(
Expand All @@ -786,8 +783,8 @@ def _mask_weights(self, weights):

def _get_day_night_data_for_single_side_product(self, foreground_data):
if "day" in self.day_night:
return foreground_data, 0
return 0, foreground_data
return foreground_data, foreground_data.dtype.type(0)
return foreground_data.dtype.type(0), foreground_data

def _get_data_for_combined_product(self, day_data, night_data):
# Apply enhancements also to night-side data
Expand Down Expand Up @@ -848,15 +845,16 @@ def _weight_data(
def _get_band_names(day_data, night_data):
try:
bands = day_data["bands"]
except TypeError:
except (IndexError, TypeError):
bands = night_data["bands"]
return bands


def _get_single_band_data(data, band):
if isinstance(data, int):
try:
return data.sel(bands=band)
except AttributeError:
return data
return data.sel(bands=band)


def _get_single_channel(data: xr.DataArray) -> xr.DataArray:
Expand All @@ -871,7 +869,7 @@ def _get_single_channel(data: xr.DataArray) -> xr.DataArray:


def _get_weight_mask_for_single_side_product(data_a, data_b):
if isinstance(data_a, int):
if data_b.shape:
return ~da.isnan(data_b)
return ~da.isnan(data_a)

Expand All @@ -894,7 +892,8 @@ def add_alpha_bands(data):
alpha = new_data[0].copy()
alpha.data = da.ones((data.sizes["y"],
data.sizes["x"]),
chunks=new_data[0].chunks)
chunks=new_data[0].chunks,
dtype=data.dtype)
# Rename band to indicate it's alpha
alpha["bands"] = "A"
new_data.append(alpha)
Expand Down Expand Up @@ -1014,7 +1013,7 @@ def __call__(self, projectables, *args, **kwargs):
class CloudCompositor(GenericCompositor):
"""Detect clouds based on thresholding and use it as a mask for compositing."""

def __init__(self, name, transition_min=258.15, transition_max=298.15,
def __init__(self, name, transition_min=258.15, transition_max=298.15, # noqa: D417
transition_gamma=3.0, **kwargs):
"""Collect custom configuration values.
Expand Down Expand Up @@ -1357,7 +1356,7 @@ class StaticImageCompositor(GenericCompositor, DataDownloadMixin):
"""

def __init__(self, name, filename=None, url=None, known_hash=None, area=None,
def __init__(self, name, filename=None, url=None, known_hash=None, area=None, # noqa: D417
**kwargs):
"""Collect custom configuration values.
Expand Down Expand Up @@ -1735,7 +1734,7 @@ def _get_flag_value(mask, val):
class LongitudeMaskingCompositor(SingleBandCompositor):
"""Masks areas outside defined longitudes."""

def __init__(self, name, lon_min=None, lon_max=None, **kwargs):
def __init__(self, name, lon_min=None, lon_max=None, **kwargs): # noqa: D417
"""Collect custom configuration values.
Args:
Expand Down
2 changes: 1 addition & 1 deletion satpy/composites/abi.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ class SimulatedGreen(GenericCompositor):
"""

def __init__(self, name, fractions=(0.465, 0.465, 0.07), **kwargs):
def __init__(self, name, fractions=(0.465, 0.465, 0.07), **kwargs): # noqa: D417
"""Initialize fractions for input channels.
Args:
Expand Down
2 changes: 1 addition & 1 deletion satpy/composites/agri.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ class SimulatedRed(GenericCompositor):
"""

def __init__(self, name, fractions=(1.0, 0.13, 0.87), **kwargs):
def __init__(self, name, fractions=(1.0, 0.13, 0.87), **kwargs): # noqa: D417
"""Initialize fractions for input channels.
Args:
Expand Down
2 changes: 1 addition & 1 deletion satpy/composites/glm.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ class HighlightCompositor(GenericCompositor):
"""

def __init__(self, name, min_highlight=0.0, max_highlight=10.0,
def __init__(self, name, min_highlight=0.0, max_highlight=10.0, # noqa: D417
max_factor=(0.8, 0.8, -0.8, 0), **kwargs):
"""Initialize composite with highlight factor options.
Expand Down
4 changes: 2 additions & 2 deletions satpy/dataset/data_dict.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def get_best_dataset_key(key, choices):
return [choice for choice, distance in zip(sorted_choices, distances) if distance == distances[0]]


def get_key(key, key_container, num_results=1, best=True, query=None,
def get_key(key, key_container, num_results=1, best=True, query=None, # noqa: D417
**kwargs):
"""Get the fully-specified key best matching the provided key.
Expand Down Expand Up @@ -139,7 +139,7 @@ def keys(self, names=False, wavelengths=False):
else:
return keys

def get_key(self, match_key, num_results=1, best=True, **dfilter):
def get_key(self, match_key, num_results=1, best=True, **dfilter): # noqa: D417
"""Get multiple fully-specified keys that match the provided query.
Args:
Expand Down
8 changes: 4 additions & 4 deletions satpy/dependency_tree.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,7 @@ def _create_subtree_from_reader(self, dataset_key, query):
LOG.trace("Found reader provided dataset:\n\tRequested: {}\n\tFound: {}".format(dataset_key, node.name))
return node

def _find_reader_node(self, dataset_key, query):
def _find_reader_node(self, dataset_key, query): # noqa: D417
"""Attempt to find a `DataID` in the available readers.
Args:
Expand Down Expand Up @@ -517,7 +517,7 @@ def get_modifier(self, comp_id):

raise KeyError("Could not find modifier '{}'".format(modifier))

def _create_required_subtrees(self, parent, prereqs, query=None):
def _create_required_subtrees(self, parent, prereqs, query=None): # noqa: D417
"""Determine required prerequisite Nodes for a composite.
Args:
Expand All @@ -531,7 +531,7 @@ def _create_required_subtrees(self, parent, prereqs, query=None):
raise MissingDependencies(unknown_datasets)
return prereq_nodes

def _create_optional_subtrees(self, parent, prereqs, query=None):
def _create_optional_subtrees(self, parent, prereqs, query=None): # noqa: D417
"""Determine optional prerequisite Nodes for a composite.
Args:
Expand All @@ -549,7 +549,7 @@ def _create_optional_subtrees(self, parent, prereqs, query=None):

return prereq_nodes

def _create_prerequisite_subtrees(self, parent, prereqs, query=None):
def _create_prerequisite_subtrees(self, parent, prereqs, query=None): # noqa: D417
"""Determine prerequisite Nodes for a composite.
Args:
Expand Down
10 changes: 5 additions & 5 deletions satpy/enhancements/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def wrapper(data, **kwargs):
return on_dask_array(wrapper)


def piecewise_linear_stretch(
def piecewise_linear_stretch( # noqa: D417
img: XRImage,
xp: ArrayLike,
fp: ArrayLike,
Expand Down Expand Up @@ -229,7 +229,7 @@ def _cira_stretch(band_data):
return band_data


def reinhard_to_srgb(img, saturation=1.25, white=100, **kwargs):
def reinhard_to_srgb(img, saturation=1.25, white=100, **kwargs): # noqa: D417
"""Stretch method based on the Reinhard algorithm, using luminance.
Args:
Expand Down Expand Up @@ -293,7 +293,7 @@ def _lookup_table(band_data, luts=None, index=-1):
return lut[band_data]


def colorize(img, **kwargs):
def colorize(img, **kwargs): # noqa: D417
"""Colorize the given image.
Args:
Expand Down Expand Up @@ -365,7 +365,7 @@ def _merge_colormaps(kwargs, img=None):
return full_cmap


def create_colormap(palette, img=None):
def create_colormap(palette, img=None): # noqa: D417
"""Create colormap of the given numpy file, color vector, or colormap.
Args:
Expand Down Expand Up @@ -525,7 +525,7 @@ def _three_d_effect_delayed(band_data, kernel, mode):
return new_data.reshape((1, band_data.shape[0], band_data.shape[1]))


def btemp_threshold(img, min_in, max_in, threshold, threshold_out=None, **kwargs):
def btemp_threshold(img, min_in, max_in, threshold, threshold_out=None, **kwargs): # noqa: D417
"""Scale data linearly in two separate regions.
This enhancement scales the input data linearly by splitting the data
Expand Down
3 changes: 2 additions & 1 deletion satpy/etc/readers/nwcsaf-geo.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,8 @@ file_types:

nc_nwcsaf_crr-ph:
file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF
file_patterns: ['S_NWC_CRR-Ph_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc']
file_patterns: ['S_NWC_CRR-Ph_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc',
'S_NWC_CRRPh_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc']

nc_nwcsaf_ishai:
file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF
Expand Down
2 changes: 1 addition & 1 deletion satpy/modifiers/_crefl.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ class ReflectanceCorrector(ModifierBase, DataDownloadMixin):
Uses a python rewrite of the C CREFL code written for VIIRS and MODIS.
"""

def __init__(self, *args, dem_filename=None, dem_sds="averaged elevation",
def __init__(self, *args, dem_filename=None, dem_sds="averaged elevation", # noqa: D417
url=None, known_hash=None, **kwargs):
"""Initialize the compositor with values from the user or from the configuration file.
Expand Down
3 changes: 3 additions & 0 deletions satpy/modifiers/angles.py
Original file line number Diff line number Diff line change
Expand Up @@ -403,6 +403,9 @@ def get_cos_sza(data_arr: xr.DataArray) -> xr.DataArray:
"""
chunks = _geo_chunks_from_data_arr(data_arr)
lons, lats = _get_valid_lonlats(data_arr.attrs["area"], chunks)
if lons.dtype != data_arr.dtype and np.issubdtype(data_arr.dtype, np.floating):
lons = lons.astype(data_arr.dtype)
lats = lats.astype(data_arr.dtype)
cos_sza = _get_cos_sza(data_arr.attrs["start_time"], lons, lats)
return _geo_dask_to_data_array(cos_sza)

Expand Down
Loading

0 comments on commit 5d6bbd9

Please sign in to comment.