diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 6e1fdfc781..14e5e3ffcd 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -64,9 +64,11 @@ jobs: - name: Install unstable dependencies if: matrix.experimental == true shell: bash -l {0} + # Install pykdtree with --no-build-isolation so it builds with numpy 2.0 # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels # may break the conda-forge libraries trying to use newer glibc versions run: | + python -m pip install versioneer extension-helpers setuptools-scm configobj pkgconfig python -m pip install \ --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ @@ -74,9 +76,15 @@ jobs: matplotlib \ numpy \ pandas \ - scipy; \ - python -m pip install \ - --no-deps --upgrade \ + scipy + mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf netcdf4 h5py + python -m pip install --upgrade --no-deps --pre --no-build-isolation \ + git+https://github.com/storpipfugl/pykdtree \ + git+https://github.com/pytroll/pyresample \ + git+https://github.com/pytroll/trollimage \ + git+https://github.com/fhs/pyhdf \ + git+https://github.com/takluyver/h5py@cython-3 \ + git+https://github.com/Unidata/netcdf4-python \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ git+https://github.com/zarr-developers/zarr \ @@ -84,7 +92,8 @@ jobs: git+https://github.com/rasterio/rasterio \ git+https://github.com/pydata/bottleneck \ git+https://github.com/pydata/xarray \ - git+https://github.com/astropy/astropy; + git+https://github.com/shapely/shapely \ + git+https://github.com/astropy/astropy LD_PRELOAD=$(python -c "import sys; print(sys.prefix)")/lib/libstdc++.so echo "LD_PRELOAD=${LD_PRELOAD}" >> $GITHUB_ENV diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d84659c6f0..99e77cb56a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,7 +19,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.7.0' # Use the sha / tag you want to point at + rev: 'v1.7.1' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: diff --git a/doc/source/conf.py b/doc/source/conf.py index df006727c0..3aa810420e 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -117,7 +117,7 @@ def __getattr__(cls, name): # General information about the project. project = u"Satpy" -copyright = u"2009-{}, The PyTroll Team".format(datetime.utcnow().strftime("%Y")) +copyright = u"2009-{}, The PyTroll Team".format(datetime.utcnow().strftime("%Y")) # noqa: A001 # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/source/doi_role.py b/doc/source/doi_role.py index 115e7895c6..0bb48d9880 100644 --- a/doc/source/doi_role.py +++ b/doc/source/doi_role.py @@ -20,6 +20,7 @@ def doi_role(typ, rawtext, text, lineno, inliner, options=None, content=None): + """Create a doi role.""" if options is None: options = {} if content is None: @@ -34,6 +35,7 @@ def doi_role(typ, rawtext, text, lineno, inliner, options=None, content=None): def arxiv_role(typ, rawtext, text, lineno, inliner, options=None, content=None): + """Create an arxive role.""" if options is None: options = {} if content is None: @@ -48,6 +50,7 @@ def arxiv_role(typ, rawtext, text, lineno, inliner, options=None, content=None): def setup_link_role(app): + """Set up the role link.""" app.add_role("doi", doi_role, override=True) app.add_role("DOI", doi_role, override=True) app.add_role("arXiv", arxiv_role, override=True) @@ -55,5 +58,6 @@ def setup_link_role(app): def setup(app): + """Set up the app.""" app.connect("builder-inited", setup_link_role) return {"version": "0.1", "parallel_read_safe": True} diff --git a/doc/source/reader_table.py b/doc/source/reader_table.py index 3ddec3444b..618cb2b96b 100644 --- a/doc/source/reader_table.py +++ b/doc/source/reader_table.py @@ -39,7 +39,7 @@ def rst_table_row(columns=None): return row -def rst_table_header(name=None, header=None, header_rows=1, widths="auto"): +def rst_table_header(name=None, header=None, header_rows=1, widths="auto"): # noqa: D417 """Create header for rst table. Args: diff --git a/pyproject.toml b/pyproject.toml index 1282120a59..4de1e302f4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,8 +15,8 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ -# In the future, add "A", "B", "S", "N", "D" -select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] +# In the future, add "B", "S", "N" +select = ["A", "D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] line-length = 120 [tool.ruff.per-file-ignores] diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index d3a1e510cb..9295f94dc7 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -112,7 +112,7 @@ def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwar self.attrs = kwargs @property - def id(self): + def id(self): # noqa: A003 """Return the DataID of the object.""" try: return self.attrs["_satpy_id"] @@ -343,7 +343,7 @@ class CategoricalDataCompositor(CompositeBase): res = [[20, 40, 30], [50, 30, 10]] """ - def __init__(self, name, lut=None, **kwargs): + def __init__(self, name, lut=None, **kwargs): # noqa: D417 """Get look-up-table used to recategorize data. Args: @@ -381,7 +381,7 @@ class GenericCompositor(CompositeBase): modes = {1: "L", 2: "LA", 3: "RGB", 4: "RGBA"} - def __init__(self, name, common_channel_mask=True, **kwargs): + def __init__(self, name, common_channel_mask=True, **kwargs): # noqa: D417 """Collect custom configuration values. Args: @@ -679,7 +679,7 @@ class DayNightCompositor(GenericCompositor): of the image (night or day). See the documentation below for more details. """ - def __init__(self, name, lim_low=85., lim_high=88., day_night="day_night", include_alpha=True, **kwargs): + def __init__(self, name, lim_low=85., lim_high=88., day_night="day_night", include_alpha=True, **kwargs): # noqa: D417 """Collect custom configuration values. Args: @@ -713,9 +713,7 @@ def __call__( datasets = self.match_data_arrays(datasets) # At least one composite is requested. foreground_data = datasets[0] - weights = self._get_coszen_blending_weights(datasets) - # Apply enhancements to the foreground data foreground_data = enhance2dataset(foreground_data) @@ -759,7 +757,6 @@ def _get_coszen_blending_weights( # Calculate blending weights coszen -= np.min((lim_high, lim_low)) coszen /= np.abs(lim_low - lim_high) - return coszen.clip(0, 1) def _get_data_for_single_side_product( @@ -786,8 +783,8 @@ def _mask_weights(self, weights): def _get_day_night_data_for_single_side_product(self, foreground_data): if "day" in self.day_night: - return foreground_data, 0 - return 0, foreground_data + return foreground_data, foreground_data.dtype.type(0) + return foreground_data.dtype.type(0), foreground_data def _get_data_for_combined_product(self, day_data, night_data): # Apply enhancements also to night-side data @@ -848,15 +845,16 @@ def _weight_data( def _get_band_names(day_data, night_data): try: bands = day_data["bands"] - except TypeError: + except (IndexError, TypeError): bands = night_data["bands"] return bands def _get_single_band_data(data, band): - if isinstance(data, int): + try: + return data.sel(bands=band) + except AttributeError: return data - return data.sel(bands=band) def _get_single_channel(data: xr.DataArray) -> xr.DataArray: @@ -871,7 +869,7 @@ def _get_single_channel(data: xr.DataArray) -> xr.DataArray: def _get_weight_mask_for_single_side_product(data_a, data_b): - if isinstance(data_a, int): + if data_b.shape: return ~da.isnan(data_b) return ~da.isnan(data_a) @@ -894,7 +892,8 @@ def add_alpha_bands(data): alpha = new_data[0].copy() alpha.data = da.ones((data.sizes["y"], data.sizes["x"]), - chunks=new_data[0].chunks) + chunks=new_data[0].chunks, + dtype=data.dtype) # Rename band to indicate it's alpha alpha["bands"] = "A" new_data.append(alpha) @@ -1014,7 +1013,7 @@ def __call__(self, projectables, *args, **kwargs): class CloudCompositor(GenericCompositor): """Detect clouds based on thresholding and use it as a mask for compositing.""" - def __init__(self, name, transition_min=258.15, transition_max=298.15, + def __init__(self, name, transition_min=258.15, transition_max=298.15, # noqa: D417 transition_gamma=3.0, **kwargs): """Collect custom configuration values. @@ -1357,7 +1356,7 @@ class StaticImageCompositor(GenericCompositor, DataDownloadMixin): """ - def __init__(self, name, filename=None, url=None, known_hash=None, area=None, + def __init__(self, name, filename=None, url=None, known_hash=None, area=None, # noqa: D417 **kwargs): """Collect custom configuration values. @@ -1735,7 +1734,7 @@ def _get_flag_value(mask, val): class LongitudeMaskingCompositor(SingleBandCompositor): """Masks areas outside defined longitudes.""" - def __init__(self, name, lon_min=None, lon_max=None, **kwargs): + def __init__(self, name, lon_min=None, lon_max=None, **kwargs): # noqa: D417 """Collect custom configuration values. Args: diff --git a/satpy/composites/abi.py b/satpy/composites/abi.py index 3ae5237906..88c0db1d8e 100644 --- a/satpy/composites/abi.py +++ b/satpy/composites/abi.py @@ -42,7 +42,7 @@ class SimulatedGreen(GenericCompositor): """ - def __init__(self, name, fractions=(0.465, 0.465, 0.07), **kwargs): + def __init__(self, name, fractions=(0.465, 0.465, 0.07), **kwargs): # noqa: D417 """Initialize fractions for input channels. Args: diff --git a/satpy/composites/agri.py b/satpy/composites/agri.py index 839706457e..20024282d2 100644 --- a/satpy/composites/agri.py +++ b/satpy/composites/agri.py @@ -42,7 +42,7 @@ class SimulatedRed(GenericCompositor): """ - def __init__(self, name, fractions=(1.0, 0.13, 0.87), **kwargs): + def __init__(self, name, fractions=(1.0, 0.13, 0.87), **kwargs): # noqa: D417 """Initialize fractions for input channels. Args: diff --git a/satpy/composites/glm.py b/satpy/composites/glm.py index e9b6ef275e..e1c9b676c6 100644 --- a/satpy/composites/glm.py +++ b/satpy/composites/glm.py @@ -43,7 +43,7 @@ class HighlightCompositor(GenericCompositor): """ - def __init__(self, name, min_highlight=0.0, max_highlight=10.0, + def __init__(self, name, min_highlight=0.0, max_highlight=10.0, # noqa: D417 max_factor=(0.8, 0.8, -0.8, 0), **kwargs): """Initialize composite with highlight factor options. diff --git a/satpy/dataset/data_dict.py b/satpy/dataset/data_dict.py index 790d688b24..783ddc4487 100644 --- a/satpy/dataset/data_dict.py +++ b/satpy/dataset/data_dict.py @@ -51,7 +51,7 @@ def get_best_dataset_key(key, choices): return [choice for choice, distance in zip(sorted_choices, distances) if distance == distances[0]] -def get_key(key, key_container, num_results=1, best=True, query=None, +def get_key(key, key_container, num_results=1, best=True, query=None, # noqa: D417 **kwargs): """Get the fully-specified key best matching the provided key. @@ -139,7 +139,7 @@ def keys(self, names=False, wavelengths=False): else: return keys - def get_key(self, match_key, num_results=1, best=True, **dfilter): + def get_key(self, match_key, num_results=1, best=True, **dfilter): # noqa: D417 """Get multiple fully-specified keys that match the provided query. Args: diff --git a/satpy/dependency_tree.py b/satpy/dependency_tree.py index d99fb536eb..7c2b65a6c5 100644 --- a/satpy/dependency_tree.py +++ b/satpy/dependency_tree.py @@ -327,7 +327,7 @@ def _create_subtree_from_reader(self, dataset_key, query): LOG.trace("Found reader provided dataset:\n\tRequested: {}\n\tFound: {}".format(dataset_key, node.name)) return node - def _find_reader_node(self, dataset_key, query): + def _find_reader_node(self, dataset_key, query): # noqa: D417 """Attempt to find a `DataID` in the available readers. Args: @@ -517,7 +517,7 @@ def get_modifier(self, comp_id): raise KeyError("Could not find modifier '{}'".format(modifier)) - def _create_required_subtrees(self, parent, prereqs, query=None): + def _create_required_subtrees(self, parent, prereqs, query=None): # noqa: D417 """Determine required prerequisite Nodes for a composite. Args: @@ -531,7 +531,7 @@ def _create_required_subtrees(self, parent, prereqs, query=None): raise MissingDependencies(unknown_datasets) return prereq_nodes - def _create_optional_subtrees(self, parent, prereqs, query=None): + def _create_optional_subtrees(self, parent, prereqs, query=None): # noqa: D417 """Determine optional prerequisite Nodes for a composite. Args: @@ -549,7 +549,7 @@ def _create_optional_subtrees(self, parent, prereqs, query=None): return prereq_nodes - def _create_prerequisite_subtrees(self, parent, prereqs, query=None): + def _create_prerequisite_subtrees(self, parent, prereqs, query=None): # noqa: D417 """Determine prerequisite Nodes for a composite. Args: diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py index e2dda9cf63..00a0f8dd4e 100644 --- a/satpy/enhancements/__init__.py +++ b/satpy/enhancements/__init__.py @@ -131,7 +131,7 @@ def wrapper(data, **kwargs): return on_dask_array(wrapper) -def piecewise_linear_stretch( +def piecewise_linear_stretch( # noqa: D417 img: XRImage, xp: ArrayLike, fp: ArrayLike, @@ -229,7 +229,7 @@ def _cira_stretch(band_data): return band_data -def reinhard_to_srgb(img, saturation=1.25, white=100, **kwargs): +def reinhard_to_srgb(img, saturation=1.25, white=100, **kwargs): # noqa: D417 """Stretch method based on the Reinhard algorithm, using luminance. Args: @@ -293,7 +293,7 @@ def _lookup_table(band_data, luts=None, index=-1): return lut[band_data] -def colorize(img, **kwargs): +def colorize(img, **kwargs): # noqa: D417 """Colorize the given image. Args: @@ -365,7 +365,7 @@ def _merge_colormaps(kwargs, img=None): return full_cmap -def create_colormap(palette, img=None): +def create_colormap(palette, img=None): # noqa: D417 """Create colormap of the given numpy file, color vector, or colormap. Args: @@ -525,7 +525,7 @@ def _three_d_effect_delayed(band_data, kernel, mode): return new_data.reshape((1, band_data.shape[0], band_data.shape[1])) -def btemp_threshold(img, min_in, max_in, threshold, threshold_out=None, **kwargs): +def btemp_threshold(img, min_in, max_in, threshold, threshold_out=None, **kwargs): # noqa: D417 """Scale data linearly in two separate regions. This enhancement scales the input data linearly by splitting the data diff --git a/satpy/etc/readers/nwcsaf-geo.yaml b/satpy/etc/readers/nwcsaf-geo.yaml index 29e3b5cc05..e22ae09fc1 100644 --- a/satpy/etc/readers/nwcsaf-geo.yaml +++ b/satpy/etc/readers/nwcsaf-geo.yaml @@ -41,7 +41,8 @@ file_types: nc_nwcsaf_crr-ph: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF - file_patterns: ['S_NWC_CRR-Ph_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] + file_patterns: ['S_NWC_CRR-Ph_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc', + 'S_NWC_CRRPh_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_ishai: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF diff --git a/satpy/modifiers/_crefl.py b/satpy/modifiers/_crefl.py index bc42228f26..a68d9a460e 100644 --- a/satpy/modifiers/_crefl.py +++ b/satpy/modifiers/_crefl.py @@ -35,7 +35,7 @@ class ReflectanceCorrector(ModifierBase, DataDownloadMixin): Uses a python rewrite of the C CREFL code written for VIIRS and MODIS. """ - def __init__(self, *args, dem_filename=None, dem_sds="averaged elevation", + def __init__(self, *args, dem_filename=None, dem_sds="averaged elevation", # noqa: D417 url=None, known_hash=None, **kwargs): """Initialize the compositor with values from the user or from the configuration file. diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 04791f3184..1471ba3669 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -403,6 +403,9 @@ def get_cos_sza(data_arr: xr.DataArray) -> xr.DataArray: """ chunks = _geo_chunks_from_data_arr(data_arr) lons, lats = _get_valid_lonlats(data_arr.attrs["area"], chunks) + if lons.dtype != data_arr.dtype and np.issubdtype(data_arr.dtype, np.floating): + lons = lons.astype(data_arr.dtype) + lats = lats.astype(data_arr.dtype) cos_sza = _get_cos_sza(data_arr.attrs["start_time"], lons, lats) return _geo_dask_to_data_array(cos_sza) diff --git a/satpy/modifiers/filters.py b/satpy/modifiers/filters.py index 151082e723..0bfc3592b7 100644 --- a/satpy/modifiers/filters.py +++ b/satpy/modifiers/filters.py @@ -11,7 +11,7 @@ class Median(ModifierBase): """Apply a median filter to the band.""" - def __init__(self, median_filter_params, **kwargs): + def __init__(self, median_filter_params, **kwargs): # noqa: D417 """Create the instance. Args: diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py index 1194eb036a..cc903ad5e9 100644 --- a/satpy/modifiers/geometry.py +++ b/satpy/modifiers/geometry.py @@ -33,7 +33,7 @@ class SunZenithCorrectorBase(ModifierBase): """Base class for sun zenith correction modifiers.""" - def __init__(self, max_sza=95.0, **kwargs): + def __init__(self, max_sza=95.0, **kwargs): # noqa: D417 """Collect custom configuration values. Args: @@ -96,7 +96,7 @@ class SunZenithCorrector(SunZenithCorrectorBase): """ - def __init__(self, correction_limit=88., **kwargs): + def __init__(self, correction_limit=88., **kwargs): # noqa: D417 """Collect custom configuration values. Args: @@ -142,7 +142,7 @@ class EffectiveSolarPathLengthCorrector(SunZenithCorrectorBase): """ - def __init__(self, correction_limit=88., **kwargs): + def __init__(self, correction_limit=88., **kwargs): # noqa: D417 """Collect custom configuration values. Args: @@ -177,7 +177,7 @@ class SunZenithReducer(SunZenithCorrectorBase): """ - def __init__(self, correction_limit=55., max_sza=90, strength=1.5, **kwargs): + def __init__(self, correction_limit=55., max_sza=90, strength=1.5, **kwargs): # noqa: D417 """Collect custom configuration values. Args: diff --git a/satpy/modifiers/parallax.py b/satpy/modifiers/parallax.py index 8c5c138e5d..9d70aa12c3 100644 --- a/satpy/modifiers/parallax.py +++ b/satpy/modifiers/parallax.py @@ -265,7 +265,7 @@ def __init__(self, base_area, self.debug_mode = debug_mode self.diagnostics = {} - def __call__(self, cth_dataset, **kwargs): + def __call__(self, cth_dataset, **kwargs): # noqa: D417 """Apply parallax correction to dataset. Args: diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index e3ea3214b8..e37f6d3c9f 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -43,7 +43,7 @@ class NIRReflectance(ModifierBase): TERMINATOR_LIMIT = 85.0 MASKING_LIMIT = 88.0 - def __init__(self, sunz_threshold=TERMINATOR_LIMIT, + def __init__(self, sunz_threshold=TERMINATOR_LIMIT, # noqa: D417 masking_limit=MASKING_LIMIT, **kwargs): """Collect custom configuration values. @@ -67,23 +67,24 @@ def __call__(self, projectables, optional_datasets=None, **info): Not supposed to be used for wavelength outside [3, 4] µm. """ projectables = self.match_data_arrays(projectables) - return self._get_reflectance_as_dataarray(projectables, optional_datasets) + inputs = self._get_nir_inputs(projectables, optional_datasets) + return self._get_reflectance_as_dataarray(*inputs) - def _get_reflectance_as_dataarray(self, projectables, optional_datasets): + def _get_reflectance_as_dataarray(self, nir, da_tb11, da_tb13_4, da_sun_zenith): """Get the reflectance as a dataarray.""" - _nir, _tb11 = projectables - da_nir = _nir.data - da_tb11 = _tb11.data - da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) - da_sun_zenith = self._get_sun_zenith_from_provided_data(projectables, optional_datasets) - - logger.info("Getting reflective part of %s", _nir.attrs["name"]) - reflectance = self._get_reflectance_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, _nir.attrs) - - proj = self._create_modified_dataarray(reflectance, base_dataarray=_nir) + logger.info("Getting reflective part of %s", nir.attrs["name"]) + reflectance = self._get_reflectance_as_dask(nir.data, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) + proj = self._create_modified_dataarray(reflectance, base_dataarray=nir) proj.attrs["units"] = "%" return proj + def _get_nir_inputs(self, projectables, optional_datasets): + nir, tb11 = projectables + da_tb11 = tb11.data + da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) + da_sun_zenith = self._get_sun_zenith_from_provided_data(nir, optional_datasets, nir.dtype) + return (nir, da_tb11, da_tb13_4, da_sun_zenith) + @staticmethod def _get_tb13_4_from_optionals(optional_datasets): tb13_4 = None @@ -95,7 +96,7 @@ def _get_tb13_4_from_optionals(optional_datasets): return tb13_4 @staticmethod - def _get_sun_zenith_from_provided_data(projectables, optional_datasets): + def _get_sun_zenith_from_provided_data(nir, optional_datasets, dtype): """Get the sunz from available data or compute it if unavailable.""" sun_zenith = None @@ -106,9 +107,8 @@ def _get_sun_zenith_from_provided_data(projectables, optional_datasets): if sun_zenith is None: if sun_zenith_angle is None: raise ImportError("Module pyorbital.astronomy needed to compute sun zenith angles.") - _nir = projectables[0] - lons, lats = _nir.attrs["area"].get_lonlats(chunks=_nir.data.chunks) - sun_zenith = sun_zenith_angle(_nir.attrs["start_time"], lons, lats) + lons, lats = nir.attrs["area"].get_lonlats(chunks=nir.data.chunks, dtype=dtype) + sun_zenith = sun_zenith_angle(nir.attrs["start_time"], lons, lats) return sun_zenith def _create_modified_dataarray(self, reflectance, base_dataarray): @@ -139,7 +139,7 @@ def _init_reflectance_calculator(self, metadata): class NIREmissivePartFromReflectance(NIRReflectance): """Get the emissive part of NIR bands.""" - def __init__(self, sunz_threshold=None, **kwargs): + def __init__(self, sunz_threshold=None, **kwargs): # noqa: D417 """Collect custom configuration values. Args: @@ -159,20 +159,16 @@ def __call__(self, projectables, optional_datasets=None, **info): """ projectables = self.match_data_arrays(projectables) - return self._get_emissivity_as_dataarray(projectables, optional_datasets) + inputs = self._get_nir_inputs(projectables, optional_datasets) + return self._get_emissivity_as_dataarray(*inputs) - def _get_emissivity_as_dataarray(self, projectables, optional_datasets): + def _get_emissivity_as_dataarray(self, nir, da_tb11, da_tb13_4, da_sun_zenith): """Get the emissivity as a dataarray.""" - _nir, _tb11 = projectables - da_nir = _nir.data - da_tb11 = _tb11.data - da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) - da_sun_zenith = self._get_sun_zenith_from_provided_data(projectables, optional_datasets) - logger.info("Getting emissive part of %s", _nir.attrs["name"]) - emissivity = self._get_emissivity_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, _nir.attrs) + logger.info("Getting emissive part of %s", nir.attrs["name"]) + emissivity = self._get_emissivity_as_dask(nir.data, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) - proj = self._create_modified_dataarray(emissivity, base_dataarray=_nir) + proj = self._create_modified_dataarray(emissivity, base_dataarray=nir) proj.attrs["units"] = "K" return proj diff --git a/satpy/multiscene/_multiscene.py b/satpy/multiscene/_multiscene.py index 976fbbbd2a..4440641d8f 100644 --- a/satpy/multiscene/_multiscene.py +++ b/satpy/multiscene/_multiscene.py @@ -192,7 +192,7 @@ def first_scene(self): return self._scene_gen.first @classmethod - def from_files( + def from_files( # noqa: D417 cls, files_to_sort: Collection[str], reader: str | Collection[str] | None = None, diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 81ebf2393b..c8fc0a8b69 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -111,7 +111,7 @@ def group_files(files_to_sort, reader=None, time_threshold=10, return list(_filter_groups(groups, missing=missing)) -def _assign_files_to_readers(files_to_sort, reader_names, +def _assign_files_to_readers(files_to_sort, reader_names, # noqa: D417 reader_kwargs): """Assign files to readers. @@ -190,7 +190,7 @@ def _get_file_keys_for_reader_files(reader_files, group_keys=None): return file_keys -def _get_sorted_file_groups(all_file_keys, time_threshold): +def _get_sorted_file_groups(all_file_keys, time_threshold): # noqa: D417 """Get sorted file groups. Get a list of dictionaries, where each list item consists of a dictionary @@ -673,7 +673,7 @@ class FSFile(os.PathLike): """ - def __init__(self, file, fs=None): + def __init__(self, file, fs=None): # noqa: D417 """Initialise the FSFile instance. Args: @@ -705,7 +705,7 @@ def __repr__(self): """Representation of the object.""" return '' - def open(self, *args, **kwargs): + def open(self, *args, **kwargs): # noqa: A003 """Open the file. This is read-only. diff --git a/satpy/readers/atms_sdr_hdf5.py b/satpy/readers/atms_sdr_hdf5.py index 7f2d43bd71..dc78399aca 100644 --- a/satpy/readers/atms_sdr_hdf5.py +++ b/satpy/readers/atms_sdr_hdf5.py @@ -15,8 +15,7 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -""" -Reader for the ATMS SDR format. +"""Reader for the ATMS SDR format. A reader for Advanced Technology Microwave Sounder (ATMS) SDR data as it e.g. comes out of the CSPP package for processing Direct Readout data. diff --git a/satpy/readers/avhrr_l1b_gaclac.py b/satpy/readers/avhrr_l1b_gaclac.py index c566175b8c..cfc3e1283e 100644 --- a/satpy/readers/avhrr_l1b_gaclac.py +++ b/satpy/readers/avhrr_l1b_gaclac.py @@ -60,7 +60,7 @@ class GACLACFile(BaseFileHandler): """Reader for GAC and LAC data.""" - def __init__(self, filename, filename_info, filetype_info, + def __init__(self, filename, filename_info, filetype_info, # noqa: D417 start_line=None, end_line=None, strip_invalid_coords=True, interpolate_coords=True, **reader_kwargs): """Init the file handler. @@ -196,7 +196,7 @@ def get_dataset(self, key, info): return res - def slice(self, data, times): + def slice(self, data, times): # noqa: A003 """Select user-defined scanlines and/or strip invalid coordinates. Furthermore, update scanline timestamps. diff --git a/satpy/readers/file_handlers.py b/satpy/readers/file_handlers.py index 3fdeed1edc..66a028eb4c 100644 --- a/satpy/readers/file_handlers.py +++ b/satpy/readers/file_handlers.py @@ -25,7 +25,7 @@ from satpy.readers import open_file_or_filename -def open_dataset(filename, *args, **kwargs): +def open_dataset(filename, *args, **kwargs): # noqa: D417 """Open a file with xarray. Args: diff --git a/satpy/readers/generic_image.py b/satpy/readers/generic_image.py index 1ba160095f..f6c983e8d5 100644 --- a/satpy/readers/generic_image.py +++ b/satpy/readers/generic_image.py @@ -15,8 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -""" -Reader for generic image (e.g. gif, png, jpg, tif, geotiff, ...). +"""Reader for generic image (e.g. gif, png, jpg, tif, geotiff, ...). Returns a dataset without calibration. Includes coordinates if available in the file (eg. geotiff). diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 0bf918d68f..4f34c1fde8 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -34,8 +34,7 @@ def gerb_get_dataset(ds, ds_info): - """ - Load a GERB dataset in memory from a HDF5 file or HDF5FileHandler. + """Load a GERB dataset in memory from a HDF5 file or HDF5FileHandler. The routine takes into account the quantisation factor and fill values. """ diff --git a/satpy/readers/ici_l1b_nc.py b/satpy/readers/ici_l1b_nc.py index d6ebea0c56..b063c51c4f 100644 --- a/satpy/readers/ici_l1b_nc.py +++ b/satpy/readers/ici_l1b_nc.py @@ -176,8 +176,7 @@ def _interpolate_geo( latitude, n_samples, ): - """ - Perform the interpolation of geographic coordinates from tie points to pixel points. + """Perform the interpolation of geographic coordinates from tie points to pixel points. Args: longitude: xarray DataArray containing the longitude dataset to @@ -229,8 +228,7 @@ def _interpolate_viewing_angle( zenith, n_samples, ): - """ - Perform the interpolation of angular coordinates from tie points to pixel points. + """Perform the interpolation of angular coordinates from tie points to pixel points. Args: azimuth: xarray DataArray containing the azimuth angle dataset to diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 29e0247fdc..2862301168 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -46,7 +46,6 @@ class ModisL3GriddedHDFFileHandler(HDFEOSGeoReader): """File handler for MODIS HDF-EOS Level 3 CMG gridded files.""" def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" - # Initialise set of variable names to carry through code handled_var_names = set() @@ -112,7 +111,6 @@ def get_dataset(self, dataset_id, dataset_info): def _get_area_extent(self): """Get the grid properties.""" - # Now compute the data extent upperleft = self.metadata["GridStructure"]["GRID_1"]["UpperLeftPointMtrs"] lowerright = self.metadata["GridStructure"]["GRID_1"]["LowerRightMtrs"] diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index 9a309a0bb8..fc5aea2c8e 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -540,7 +540,7 @@ class FiduceoMviriBase(BaseFileHandler): "IR": "count_ir" } - def __init__(self, filename, filename_info, filetype_info, + def __init__(self, filename, filename_info, filetype_info, # noqa: D417 mask_bad_quality=False): """Initialize the file handler. diff --git a/satpy/readers/nucaps.py b/satpy/readers/nucaps.py index 2c9e2ba39f..19db6f9976 100644 --- a/satpy/readers/nucaps.py +++ b/satpy/readers/nucaps.py @@ -237,7 +237,7 @@ def get_dataset(self, dataset_id, ds_info): class NUCAPSReader(FileYAMLReader): """Reader for NUCAPS NetCDF4 files.""" - def __init__(self, config_files, mask_surface=True, mask_quality=True, **kwargs): + def __init__(self, config_files, mask_surface=True, mask_quality=True, **kwargs): # noqa: D417 """Configure reader behavior. Args: diff --git a/satpy/readers/scatsat1_l2b.py b/satpy/readers/scatsat1_l2b.py index 9989bf3d86..886ce458b3 100644 --- a/satpy/readers/scatsat1_l2b.py +++ b/satpy/readers/scatsat1_l2b.py @@ -26,8 +26,10 @@ class SCATSAT1L2BFileHandler(BaseFileHandler): + """File handler for ScatSat level 2 files, as distributed by Eumetsat in HDF5 format.""" def __init__(self, filename, filename_info, filetype_info): + """Initialize the file handler.""" super(SCATSAT1L2BFileHandler, self).__init__(filename, filename_info, filetype_info) self.h5f = h5py.File(self.filename, "r") h5data = self.h5f["science_data"] @@ -44,6 +46,7 @@ def __init__(self, filename, filename_info, filetype_info): self.longitude_scale = float(h5data.attrs["Longitude Scale"]) def get_dataset(self, key, info): + """Get the dataset.""" h5data = self.h5f["science_data"] stdname = info.get("standard_name") diff --git a/satpy/readers/viirs_edr_active_fires.py b/satpy/readers/viirs_edr_active_fires.py index bd8f3f6d69..9fa5b5d59a 100644 --- a/satpy/readers/viirs_edr_active_fires.py +++ b/satpy/readers/viirs_edr_active_fires.py @@ -46,7 +46,7 @@ def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=auto_maskandscale, xarray_kwargs=xarray_kwargs) self.prefix = filetype_info.get("variable_prefix") - def get_dataset(self, dsid, dsinfo): + def get_dataset(self, dsid, dsinfo): # noqa: D417 """Get requested data as DataArray. Args: diff --git a/satpy/readers/viirs_sdr.py b/satpy/readers/viirs_sdr.py index db9ba9ba10..eef02f7777 100644 --- a/satpy/readers/viirs_sdr.py +++ b/satpy/readers/viirs_sdr.py @@ -185,7 +185,7 @@ def split_desired_other(fhs, prime_geo, second_geo): class VIIRSSDRReader(FileYAMLReader): """Custom file reader for finding VIIRS SDR geolocation at runtime.""" - def __init__(self, config_files, use_tc=None, **kwargs): + def __init__(self, config_files, use_tc=None, **kwargs): # noqa: D417 """Initialize file reader and adjust geolocation preferences. Args: diff --git a/satpy/readers/xmlformat.py b/satpy/readers/xmlformat.py index 969c30113a..21200d4c8d 100644 --- a/satpy/readers/xmlformat.py +++ b/satpy/readers/xmlformat.py @@ -31,22 +31,22 @@ "uinteger4": ">u4", } -def process_delimiter(elt, ascii=False): +def process_delimiter(elt, text=False): """Process a 'delimiter' tag.""" - del elt, ascii + del elt, text -def process_field(elt, ascii=False): +def process_field(elt, text=False): """Process a 'field' tag.""" # NOTE: if there is a variable defined in this field and it is different # from the default, we could change the value and restart. scale = np.uint8(1) - if elt.get("type") == "bitfield" and not ascii: + if elt.get("type") == "bitfield" and not text: current_type = ">u" + str(int(elt.get("length")) // 8) scale = np.dtype(current_type).type(1) elif (elt.get("length") is not None): - if ascii: + if text: add = 33 else: add = 0 @@ -64,9 +64,9 @@ def process_field(elt, ascii=False): return ((elt.get("name"), current_type, scale)) -def process_array(elt, ascii=False): +def process_array(elt, text=False): """Process an 'array' tag.""" - del ascii + del text chld = list(elt) if len(chld) > 1: raise ValueError() @@ -147,10 +147,10 @@ def parse_format(xml_file): types_scales = {} for prod in tree.find("product"): - ascii = (prod.tag in ["mphr", "sphr"]) + text = (prod.tag in ["mphr", "sphr"]) res = [] for i in prod: - lres = CASES[i.tag](i, ascii) + lres = CASES[i.tag](i, text) if lres is not None: res.append(lres) types_scales[(prod.tag, int(prod.get("subclass")))] = res diff --git a/satpy/resample.py b/satpy/resample.py index c8ed073ae5..ddab90be82 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -823,7 +823,7 @@ def compute(self, data, **kwargs): """Call the resampling.""" raise NotImplementedError("Use the sub-classes") - def resample(self, data, **kwargs): + def resample(self, data, **kwargs): # noqa: D417 """Resample `data` by calling `precompute` and `compute` methods. Args: @@ -899,7 +899,7 @@ class BucketAvg(BucketResamplerBase): """ - def compute(self, data, fill_value=np.nan, skipna=True, **kwargs): + def compute(self, data, fill_value=np.nan, skipna=True, **kwargs): # noqa: D417 """Call the resampling. Args: diff --git a/satpy/scene.py b/satpy/scene.py index d96c81a0e4..9d9057c907 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -660,7 +660,7 @@ def _slice_datasets(self, dataset_ids, slice_key, new_area, area_only=True): else: replace_anc(new_ds, pres) - def slice(self, key): + def slice(self, key): # noqa: A003 """Slice Scene by dataset index. .. note:: @@ -1384,7 +1384,7 @@ def unload(self, keepables=None): LOG.debug("Unloading dataset: %r", ds_id) del self._datasets[ds_id] - def load(self, wishlist, calibration="*", resolution="*", + def load(self, wishlist, calibration="*", resolution="*", # noqa: D417 polarization="*", level="*", modifiers="*", generate=True, unload=True, **kwargs): """Read and generate requested datasets. diff --git a/satpy/tests/modifier_tests/test_parallax.py b/satpy/tests/modifier_tests/test_parallax.py index e1b426dce2..b769e45608 100644 --- a/satpy/tests/modifier_tests/test_parallax.py +++ b/satpy/tests/modifier_tests/test_parallax.py @@ -49,7 +49,7 @@ def fake_tle(): line2="2 40732 0.2533 325.0106 0000976 118.8734 330.4058 1.00272123 23817") -def _get_fake_areas(center, sizes, resolution, code=4326): +def _get_fake_areas(center, sizes, resolution, code=4326): # noqa: D417 """Get multiple square areas with the same center. Returns multiple square areas centered at the same location diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 1c7d2c78ef..969c497410 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -131,11 +131,13 @@ def _create_fake_rad_dataset(rad: xr.DataArray, resolution: int) -> xr.Dataset: def generate_l1b_filename(chan_name: str) -> str: + """Generate a l1b filename.""" return f"OR_ABI-L1b-RadC-M4{chan_name}_G16_s20161811540362_e20161811545170_c20161811545230_suffix.nc" @pytest.fixture() def c01_refl(tmp_path) -> xr.DataArray: + """Load c01 reflectances.""" with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", None, 1000) return reader.load(["C01"])["C01"] @@ -143,6 +145,7 @@ def c01_refl(tmp_path) -> xr.DataArray: @pytest.fixture() def c01_rad(tmp_path) -> xr.DataArray: + """Load c01 radiances.""" with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", None, 1000) return reader.load([DataQuery(name="C01", calibration="radiance")])["C01"] @@ -150,6 +153,7 @@ def c01_rad(tmp_path) -> xr.DataArray: @pytest.fixture() def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: + """Load c01 radiances through h5netcdf.""" shape = RAD_SHAPE[1000] rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.0) * 50.0 rad_data = (rad_data + 1.0) / 0.5 @@ -172,6 +176,7 @@ def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: @pytest.fixture() def c01_counts(tmp_path) -> xr.DataArray: + """Load c01 counts.""" with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", None, 1000) return reader.load([DataQuery(name="C01", calibration="counts")])["C01"] @@ -179,6 +184,7 @@ def c01_counts(tmp_path) -> xr.DataArray: @pytest.fixture() def c07_bt_creator(tmp_path) -> Callable: + """Create a loader for c07 brightness temperatures.""" def _load_data_array( clip_negative_radiances: bool = False, ): diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index e71534fbd2..0c22f5b3f1 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -89,6 +89,7 @@ def _create_test_netcdf(filename, resolution=742): @pytest.fixture(scope="session") def cf_scene(): + """Create a cf scene.""" tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) data_visir = np.array([[1, 2], [3, 4]]) @@ -214,6 +215,7 @@ def cf_scene(): @pytest.fixture() def nc_filename(tmp_path): + """Create an nc filename for viirs m band.""" now = datetime.utcnow() filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-mband-20201007075915-20201007080744.nc" return str(tmp_path / filename) @@ -221,6 +223,7 @@ def nc_filename(tmp_path): @pytest.fixture() def nc_filename_i(tmp_path): + """Create an nc filename for viirs i band.""" now = datetime.utcnow() filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-iband-20201007075915-20201007080744.nc" return str(tmp_path / filename) diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index 49206962e5..7ec34fd9bf 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -31,6 +31,7 @@ @pytest.fixture() def nc_filename(tmp_path): + """Create an nc test data file and return its filename.""" now = datetime.datetime.utcnow() filename = f"VGAC_VJ10XMOD_A{now:%Y%j_%H%M}_n004946_K005.nc" filename_str = str(tmp_path / filename) diff --git a/satpy/tests/reader_tests/test_virr_l1b.py b/satpy/tests/reader_tests/test_virr_l1b.py index e3fbd73272..f899cd537c 100644 --- a/satpy/tests/reader_tests/test_virr_l1b.py +++ b/satpy/tests/reader_tests/test_virr_l1b.py @@ -158,7 +158,7 @@ def _fy3_helper(self, platform_name, reader, Emissive_units): "solar_azimuth_angle", "sensor_azimuth_angle"] assert ["virr_geoxx", "virr_l1b"] == attributes["file_type"] assert ("longitude", "latitude") == attributes["coordinates"] - assert band_values[dataset["name"]] == round(float(np.array(ds[ds.shape[0] // 2][ds.shape[1] // 2])), 6) + np.testing.assert_allclose(band_values[dataset["name"]], ds[ds.shape[0] // 2][ds.shape[1] // 2], rtol=1e-6) assert "valid_range" not in ds.attrs def test_fy3b_file(self): diff --git a/satpy/tests/scene_tests/test_resampling.py b/satpy/tests/scene_tests/test_resampling.py index 6b5f74ee59..d59019e3f7 100644 --- a/satpy/tests/scene_tests/test_resampling.py +++ b/satpy/tests/scene_tests/test_resampling.py @@ -316,17 +316,20 @@ def test_resample_reduce_data_toggle(self, rs): ds_walker.return_value = test_order slice_data.side_effect = orig_slice_data scene.resample(target_area, reduce_data=False) - assert not slice_data.called - assert not get_area_slices.called + slice_data.assert_not_called() + get_area_slices.assert_not_called() scene.resample(target_area) - assert slice_data.called_once - assert get_area_slices.called_once + assert slice_data.call_count == 3 + assert get_area_slices.call_count == 1 + assert get_area_slices_big.call_count == 1 scene.resample(target_area, reduce_data=True) # 2 times for each dataset # once for default (reduce_data=True) # once for kwarg forced to `True` assert slice_data.call_count == 2 * 3 - assert get_area_slices.called_once + # get area slices is called again, once per area + assert get_area_slices.call_count == 2 + assert get_area_slices_big.call_count == 2 def test_resample_ancillary(self): """Test that the Scene reducing data does not affect final output.""" diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 70bc2abf25..7fbe177bfb 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -401,7 +401,7 @@ def setUp(self): start_time = datetime(2018, 1, 1, 18, 0, 0) # RGB - a = np.zeros((3, 2, 2), dtype=np.float64) + a = np.zeros((3, 2, 2), dtype=np.float32) a[:, 0, 0] = 0.1 a[:, 0, 1] = 0.2 a[:, 1, 0] = 0.3 @@ -409,7 +409,7 @@ def setUp(self): a = da.from_array(a, a.shape) self.data_a = xr.DataArray(a, attrs={"test": "a", "start_time": start_time}, coords={"bands": bands}, dims=("bands", "y", "x")) - b = np.zeros((3, 2, 2), dtype=np.float64) + b = np.zeros((3, 2, 2), dtype=np.float32) b[:, 0, 0] = np.nan b[:, 0, 1] = 0.25 b[:, 1, 0] = 0.50 @@ -418,7 +418,7 @@ def setUp(self): self.data_b = xr.DataArray(b, attrs={"test": "b", "start_time": start_time}, coords={"bands": bands}, dims=("bands", "y", "x")) - sza = np.array([[80., 86.], [94., 100.]]) + sza = np.array([[80., 86.], [94., 100.]], dtype=np.float32) sza = da.from_array(sza, sza.shape) self.sza = xr.DataArray(sza, dims=("y", "x")) @@ -442,8 +442,9 @@ def test_daynight_sza(self): comp = DayNightCompositor(name="dn_test", day_night="day_night") res = comp((self.data_a, self.data_b, self.sza)) res = res.compute() - expected = np.array([[0., 0.22122352], [0.5, 1.]]) - np.testing.assert_allclose(res.values[0], expected) + expected = np.array([[0., 0.22122374], [0.5, 1.]], dtype=np.float32) + assert res.dtype == np.float32 + np.testing.assert_allclose(res.values[0], expected, rtol=1e-6) def test_daynight_area(self): """Test compositor both day and night portions when SZA data is not provided.""" @@ -453,7 +454,8 @@ def test_daynight_area(self): comp = DayNightCompositor(name="dn_test", day_night="day_night") res = comp((self.data_a, self.data_b)) res = res.compute() - expected_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) + expected_channel = np.array([[0., 0.33164983], [0.66835017, 1.]], dtype=np.float32) + assert res.dtype == np.float32 for i in range(3): np.testing.assert_allclose(res.values[i], expected_channel) @@ -465,8 +467,9 @@ def test_night_only_sza_with_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=True) res = comp((self.data_b, self.sza)) res = res.compute() - expected_red_channel = np.array([[np.nan, 0.], [0.5, 1.]]) - expected_alpha = np.array([[0., 0.33296056], [1., 1.]]) + expected_red_channel = np.array([[np.nan, 0.], [0.5, 1.]], dtype=np.float32) + expected_alpha = np.array([[0., 0.3329599], [1., 1.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_red_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) @@ -478,7 +481,8 @@ def test_night_only_sza_without_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=False) res = comp((self.data_a, self.sza)) res = res.compute() - expected = np.array([[0., 0.11042631], [0.66835017, 1.]]) + expected = np.array([[0., 0.11042609], [0.6683502, 1.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected) assert "A" not in res.bands @@ -490,8 +494,9 @@ def test_night_only_area_with_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=True) res = comp((self.data_b,)) res = res.compute() - expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]]) - expected_alpha = np.array([[np.nan, 0.], [0., 0.]]) + expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]], dtype=np.float32) + expected_alpha = np.array([[np.nan, 0.], [0., 0.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_l_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) @@ -503,7 +508,8 @@ def test_night_only_area_without_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=False) res = comp((self.data_b,)) res = res.compute() - expected = np.array([[np.nan, 0.], [0., 0.]]) + expected = np.array([[np.nan, 0.], [0., 0.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected) assert "A" not in res.bands @@ -515,8 +521,9 @@ def test_day_only_sza_with_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_a, self.sza)) res = res.compute() - expected_red_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) - expected_alpha = np.array([[1., 0.66703944], [0., 0.]]) + expected_red_channel = np.array([[0., 0.33164983], [0.66835017, 1.]], dtype=np.float32) + expected_alpha = np.array([[1., 0.6670401], [0., 0.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_red_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) @@ -528,7 +535,8 @@ def test_day_only_sza_without_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=False) res = comp((self.data_a, self.sza)) res = res.compute() - expected_channel_data = np.array([[0., 0.22122352], [0., 0.]]) + expected_channel_data = np.array([[0., 0.22122373], [0., 0.]], dtype=np.float32) + assert res.dtype == np.float32 for i in range(3): np.testing.assert_allclose(res.values[i], expected_channel_data) assert "A" not in res.bands @@ -541,8 +549,9 @@ def test_day_only_area_with_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_a,)) res = res.compute() - expected_l_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) - expected_alpha = np.array([[1., 1.], [1., 1.]]) + expected_l_channel = np.array([[0., 0.33164983], [0.66835017, 1.]], dtype=np.float32) + expected_alpha = np.array([[1., 1.], [1., 1.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_l_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) @@ -554,8 +563,9 @@ def test_day_only_area_with_alpha_and_missing_data(self): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_b,)) res = res.compute() - expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]]) - expected_alpha = np.array([[np.nan, 1.], [1., 1.]]) + expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]], dtype=np.float32) + expected_alpha = np.array([[np.nan, 1.], [1., 1.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_l_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) @@ -567,7 +577,8 @@ def test_day_only_area_without_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=False) res = comp((self.data_a,)) res = res.compute() - expected = np.array([[0., 0.33164983], [0.66835017, 1.]]) + expected = np.array([[0., 0.33164983], [0.66835017, 1.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected) assert "A" not in res.bands diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 4aece73487..81ce5f3ad8 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -119,13 +119,15 @@ def test_basic_default_not_provided(self, sunz_ds1, as_32bit): sunz_ds1 = sunz_ds1.astype(np.float32) comp = SunZenithCorrector(name="sza_test", modifiers=tuple()) res = comp((sunz_ds1,), test_attr="test") - np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) + np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]]), + rtol=1e-6) assert "y" in res.coords assert "x" in res.coords ds1 = sunz_ds1.copy().drop_vars(("y", "x")) res = comp((ds1,), test_attr="test") res_np = res.compute() - np.testing.assert_allclose(res_np.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) + np.testing.assert_allclose(res_np.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]]), + rtol=1e-6) assert res.dtype == res_np.dtype assert "y" not in res.coords assert "x" not in res.coords @@ -273,7 +275,7 @@ def test_no_sunz_no_co2(self, calculator, apply_modifier_info, sza): # due to copying of DataArrays, self.get_lonlats is not the same as the one that was called # we must used the area from the final result DataArray - res.attrs["area"].get_lonlats.assert_called() + res.attrs["area"].get_lonlats.assert_called_with(chunks=((2,), (2,)), dtype=self.nir.dtype) sza.assert_called_with(self.start_time, self.lons, self.lats) self.refl_from_tbs.assert_called_with(self.da_sunz, self.nir.data, self.ir_.data, tb_ir_co2=None) assert np.allclose(res.data, self.refl * 100).compute() diff --git a/satpy/tests/test_node.py b/satpy/tests/test_node.py index 7475b04d24..35fd8a27bb 100644 --- a/satpy/tests/test_node.py +++ b/satpy/tests/test_node.py @@ -26,7 +26,7 @@ class FakeCompositor: """A fake compositor.""" - def __init__(self, id): + def __init__(self, id): # noqa: A002 """Set up the fake compositor.""" self.id = id diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index 378f3fdb5a..d91e2b6fed 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -637,7 +637,7 @@ def test_available_readers_base_loader(self, monkeypatch): from satpy import available_readers from satpy._config import glob_config - def patched_import_error(name, globals=None, locals=None, fromlist=(), level=0): + def patched_import_error(name, globals=None, locals=None, fromlist=(), level=0): # noqa: A002 if name in ("netcdf4", ): raise ImportError(f"Mocked import error {name}") return real_import(name, globals=globals, locals=locals, fromlist=fromlist, level=level) diff --git a/satpy/tests/writer_tests/test_ninjogeotiff.py b/satpy/tests/writer_tests/test_ninjogeotiff.py index a9c60bdf90..e05150a571 100644 --- a/satpy/tests/writer_tests/test_ninjogeotiff.py +++ b/satpy/tests/writer_tests/test_ninjogeotiff.py @@ -630,7 +630,7 @@ def test_write_and_read_file_units( np.testing.assert_allclose(float(tgs["ninjo_Gradient"]), 0.467717, rtol=1e-5) np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]), - -79.86771) + -79.86771, rtol=1e-5) fn2 = os.fspath(tmp_path / "test2.tif") with caplog.at_level(logging.WARNING): ngtw.save_dataset( diff --git a/satpy/writers/awips_tiled.py b/satpy/writers/awips_tiled.py index 9bab65fe35..15680e8091 100644 --- a/satpy/writers/awips_tiled.py +++ b/satpy/writers/awips_tiled.py @@ -422,7 +422,7 @@ def __call__(self): class LetteredTileGenerator(NumberedTileGenerator): """Helper class to generate per-tile metadata for lettered tiles.""" - def __init__(self, area_definition, extents, sector_crs, + def __init__(self, area_definition, extents, sector_crs, # noqa: D417 cell_size=(2000000, 2000000), num_subtiles=None, use_sector_reference=False): """Initialize tile information for later generation. @@ -1501,7 +1501,7 @@ def _get_tile_data_info(self, data_arrs, creation_time, source_name): return ds_info # TODO: Add additional untiled variable support - def save_datasets(self, datasets, sector_id=None, + def save_datasets(self, datasets, sector_id=None, # noqa: D417 source_name=None, tile_count=(1, 1), tile_size=None, lettered_grid=False, num_subtiles=None, diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 506a8bf561..301df399c8 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -1092,7 +1092,7 @@ def save_dataset(self, dataset, filename=None, fill_value=None, **kwargs): """Save the *dataset* to a given *filename*.""" return self.save_datasets([dataset], filename, **kwargs) - def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=EPOCH, + def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=EPOCH, # noqa: D417 flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False, include_orig_name=True, numeric_name_prefix="CHANNEL_", **to_netcdf_kwargs): """Save the given datasets in one netCDF file. diff --git a/satpy/writers/geotiff.py b/satpy/writers/geotiff.py index 1a522ecd68..229cf777db 100644 --- a/satpy/writers/geotiff.py +++ b/satpy/writers/geotiff.py @@ -137,7 +137,7 @@ def separate_init_kwargs(cls, kwargs): return init_kwargs, kwargs - def save_image( + def save_image( # noqa: D417 self, img: XRImage, filename: Optional[str] = None, diff --git a/satpy/writers/ninjogeotiff.py b/satpy/writers/ninjogeotiff.py index a8f603861e..5f88cc52ed 100644 --- a/satpy/writers/ninjogeotiff.py +++ b/satpy/writers/ninjogeotiff.py @@ -103,7 +103,7 @@ class NinJoGeoTIFFWriter(GeoTIFFWriter): scale_offset_tag_names = ("ninjo_Gradient", "ninjo_AxisIntercept") - def save_image( + def save_image( # noqa: D417 self, image, filename=None, fill_value=None, compute=True, keep_palette=False, cmap=None, overviews=None, overviews_minsize=256, overviews_resampling=None,