diff --git a/.github/workflows/ci_action.yml b/.github/workflows/ci_action.yml index 49eb61c8..7410d313 100644 --- a/.github/workflows/ci_action.yml +++ b/.github/workflows/ci_action.yml @@ -85,7 +85,9 @@ jobs: cache: pip - name: Install packages - run: pip install -e .[DEV] --upgrade --upgrade-strategy eager + run: | # must install async-timeout until ray fixes issue + pip install -e .[DEV] --upgrade --upgrade-strategy eager + pip install async-timeout - name: Run full tests and code coverage if: ${{ matrix.full_test_suite }} @@ -110,3 +112,20 @@ jobs: files: coverage.xml fail_ci_if_error: true verbose: false + + mirror-and-integration-test-on-gitlab: + if: github.event_name == 'push' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - name: Mirror + trigger CI + uses: SvanBoxel/gitlab-mirror-and-ci-action@master + with: + args: "https://git.sinergise.com/eo/code/eo-learn/" + env: + FOLLOW_TAGS: "true" + GITLAB_HOSTNAME: "git.sinergise.com" + GITLAB_USERNAME: "github-action" + GITLAB_PASSWORD: ${{ secrets.GITLAB_PASSWORD }} + GITLAB_PROJECT_ID: "164" + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/ci_trigger.yml b/.github/workflows/ci_trigger.yml index 558c6f45..5bdd9419 100644 --- a/.github/workflows/ci_trigger.yml +++ b/.github/workflows/ci_trigger.yml @@ -1,30 +1,19 @@ -name: mirror_and_trigger +name: trigger on: - pull_request: - push: - branches: - - "master" - - "develop" - workflow_call: release: types: - published jobs: - mirror-and-integration-test-on-gitlab: - if: github.event_name == 'push' + trigger: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 - - name: Mirror + trigger CI - uses: SvanBoxel/gitlab-mirror-and-ci-action@master - with: - args: "https://git.sinergise.com/eo/code/eo-learn/" - env: - FOLLOW_TAGS: "true" - GITLAB_HOSTNAME: "git.sinergise.com" - GITLAB_USERNAME: "github-action" - GITLAB_PASSWORD: ${{ secrets.GITLAB_PASSWORD }} - GITLAB_PROJECT_ID: "164" - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Trigger API + run: > + curl -X POST --fail \ + -F token=${{ secrets.GITLAB_PIPELINE_TRIGGER_TOKEN }} \ + -F ref=main \ + -F variables[CUSTOM_RUN_TAG]=auto \ + -F variables[LAYER_NAME]=dotai-eo \ + https://git.sinergise.com/api/v4/projects/1031/trigger/pipeline diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 4a40cbbd..9fe3e598 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -2,7 +2,6 @@ image: python:3.9 stages: - test - - build run_sh_integration_tests: stage: test @@ -14,17 +13,3 @@ run_sh_integration_tests: - pip install .[DEV] - sentinelhub.config --sh_client_id "$SH_CLIENT_ID" --sh_client_secret "$SH_CLIENT_SECRET" > /dev/null # Gitlab can't mask SH_CLIENT_SECRET in logs - pytest -m sh_integration - -build_docker_image: - stage: build - needs: [] - rules: - - if: $CI_COMMIT_TAG # run only on releases - when: always - variables: - CUSTOM_RUN_TAG: auto # this will create images with the latest tag and the version tag - LAYER_NAME: dotai-eo - - when: manual - trigger: - project: eo/infra/docker - allow_failure: true diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 14a6b970..1948c24e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,18 +13,18 @@ repos: - id: debug-statements - repo: https://github.com/psf/black - rev: 23.10.1 + rev: 23.12.1 hooks: - id: black language_version: python3 - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: "v0.1.4" + rev: "v0.1.11" hooks: - id: ruff - repo: https://github.com/nbQA-dev/nbQA - rev: 1.7.0 + rev: 1.7.1 hooks: - id: nbqa-black - id: nbqa-ruff diff --git a/.zenodo.json b/.zenodo.json index 1857c50b..f9362877 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -206,6 +206,9 @@ }, { "id": "101004112" + }, + { + "id": "101059548" } ] } diff --git a/CHANGELOG.md b/CHANGELOG.md index e93f75a7..ba43ba9a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ +## [Version 1.5.3] - 2024-01-10 + +- Fix `numpy<2` in anticipation of numpy 2.0 release. + + ## [Version 1.5.2] - 2023-11-07 - `RayExecutor` can now forward remote kwargs to ray jobs. diff --git a/README.md b/README.md index 7e50cea6..c94f2a8e 100644 --- a/README.md +++ b/README.md @@ -202,4 +202,4 @@ See [LICENSE](https://github.com/sentinel-hub/eo-learn/blob/master/LICENSE). ## Acknowledgements -This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreements No. 776115 and No. 101004112. +This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreements No. 776115, No. 101004112 and No. 101059548. diff --git a/eolearn/__init__.py b/eolearn/__init__.py index 3e77f3bb..d32baa74 100644 --- a/eolearn/__init__.py +++ b/eolearn/__init__.py @@ -1,6 +1,6 @@ """Main module of the `eolearn` package.""" -__version__ = "1.5.2" +__version__ = "1.5.3" import importlib.util import warnings diff --git a/eolearn/core/constants.py b/eolearn/core/constants.py index d661732f..3205577e 100644 --- a/eolearn/core/constants.py +++ b/eolearn/core/constants.py @@ -13,8 +13,9 @@ from enum import Enum, EnumMeta from typing import Any, TypeVar +from typing_extensions import deprecated + from sentinelhub import BBox, MimeType -from sentinelhub.exceptions import deprecated_function from .exceptions import EODeprecationWarning @@ -157,14 +158,16 @@ def is_image(self) -> bool: """True if FeatureType stores a dictionary with arrays that represent images. False otherwise.""" return self.is_array() and self.is_spatial() - @deprecated_function( - EODeprecationWarning, "Use the equivalent `is_array` method, or consider if `is_image` fits better." + @deprecated( + "The method `is_raster` has been deprecated. Use the equivalent `is_array` method, or consider if `is_image`" + " fits better.", + category=EODeprecationWarning, ) def is_raster(self) -> bool: """True if FeatureType stores a dictionary with raster data. False otherwise.""" return self.is_array() - @deprecated_function(EODeprecationWarning) + @deprecated("The method `has_dict` has been deprecated.", category=EODeprecationWarning) def has_dict(self) -> bool: """True if FeatureType stores a dictionary. False otherwise.""" return self in [ @@ -181,7 +184,7 @@ def has_dict(self) -> bool: FeatureType.META_INFO, ] - @deprecated_function(EODeprecationWarning) + @deprecated("The method `contains_ndarrays` has been deprecated.", category=EODeprecationWarning) def contains_ndarrays(self) -> bool: """True if FeatureType stores a dictionary of numpy.ndarrays. False otherwise.""" return self.is_array() @@ -201,7 +204,7 @@ def ndim(self) -> int | None: }[self] return None - @deprecated_function(EODeprecationWarning) + @deprecated("The method `type` has been deprecated.", category=EODeprecationWarning) def type(self) -> type: """Returns type of the data for the given FeatureType.""" if self is FeatureType.TIMESTAMPS: @@ -210,7 +213,7 @@ def type(self) -> type: return BBox return dict - @deprecated_function(EODeprecationWarning) + @deprecated("The method `file_format` has been deprecated.", category=EODeprecationWarning) def file_format(self) -> MimeType: """Returns a mime type enum of a file format into which data of the feature type will be serialized""" if self.is_array(): diff --git a/eolearn/core/core_tasks.py b/eolearn/core/core_tasks.py index 8ae4823b..cf4fc0db 100644 --- a/eolearn/core/core_tasks.py +++ b/eolearn/core/core_tasks.py @@ -18,9 +18,9 @@ import fs import numpy as np from fs.base import FS +from typing_extensions import deprecated from sentinelhub import SHConfig -from sentinelhub.exceptions import deprecated_class from .constants import FeatureType, OverwritePermission from .eodata import EOPatch @@ -56,7 +56,9 @@ def execute(self, eopatch: EOPatch) -> EOPatch: return eopatch.copy(features=self.features, deep=self.deep, copy_timestamps=self.copy_timestamps) -@deprecated_class(EODeprecationWarning, "Use `CopyTask` with the configuration `deep=True`.") +@deprecated( + "Use `CopyTask` with the configuration `deep=True` instead of `DeepCopyTask`.", category=EODeprecationWarning +) class DeepCopyTask(CopyTask): """[DEPRECATED] Makes a deep copy of the given EOPatch.""" @@ -119,7 +121,7 @@ def __init__( all features will be saved. :param overwrite_permission: A level of permission for overwriting an existing EOPatch to 9 (highest compression). - :save_timestamps: Whether to save the timestamps of the EOPatch. With the `"auto"` setting timestamps are saved + :save_timestamps: Save the timestamps of the EOPatch. With the `"auto"` setting timestamps are saved if `features=...` or if other temporal features are being saved. :param use_zarr: Saves numpy-array based features into Zarr files. Requires ZARR extra dependencies. :param temporal_selection: Writes all of the data to the chosen temporal indices of preexisting arrays. Can be @@ -191,7 +193,7 @@ def __init__( default configuration will be taken. :param features: A collection of features to be loaded. By default, all features will be loaded. :param lazy_loading: If `True` features will be lazy loaded. - :load_timestamps: Whether to load the timestamps of the EOPatch. With the `"auto"` setting timestamps are loaded + :load_timestamps: Load the timestamps of the EOPatch. With the `"auto"` setting timestamps are loaded if `features=...` or if other temporal features are being loaded. :param temporal_selection: Only loads data corresponding to the chosen indices. Can also be a callable that, given a list of timestamps, returns a list of booleans declaring which temporal slices to load. diff --git a/eolearn/core/eodata.py b/eolearn/core/eodata.py index c275a997..b694d022 100644 --- a/eolearn/core/eodata.py +++ b/eolearn/core/eodata.py @@ -34,9 +34,9 @@ import geopandas as gpd import numpy as np from fs.base import FS +from typing_extensions import deprecated from sentinelhub import CRS, BBox, parse_time -from sentinelhub.exceptions import deprecated_function from .constants import FEATURETYPE_DEPRECATION_MSG, TIMESTAMP_COLUMN, FeatureType, OverwritePermission from .eodata_io import FeatureIO, load_eopatch_content, save_eopatch @@ -427,7 +427,10 @@ def __contains__(self, key: object) -> bool: "`(feature_type, feature_name)` pairs." ) - @deprecated_function(EODeprecationWarning, "Use the `merge` method instead.") + @deprecated( + "The `+` operator for EOPatches has been deprecated. Use the function `eolearn.core.merge_eopatches` instead.", + category=EODeprecationWarning, + ) def __add__(self, other: EOPatch) -> EOPatch: """Merges two EOPatches into a new EOPatch.""" return self.merge(other) @@ -499,7 +502,7 @@ def __copy__( """Returns a new EOPatch with shallow copies of given features. :param features: A collection of features or feature types that will be copied into new EOPatch. - :param copy_timestamps: Whether to copy timestamps to the new EOPatch. By default copies them over if all + :param copy_timestamps: Copy timestamps to the new EOPatch. By default copies them over if all features are copied or if any temporal features are getting copied. """ if not features: # For some reason deepcopy and copy pass {} by default @@ -526,7 +529,7 @@ def __deepcopy__( :param memo: built-in parameter for memoization :param features: A collection of features or feature types that will be copied into new EOPatch. - :param copy_timestamps: Whether to copy timestamps to the new EOPatch. By default copies them over if all + :param copy_timestamps: Copy timestamps to the new EOPatch. By default copies them over if all features are copied or if any temporal features are getting copied. """ if not features: # For some reason deepcopy and copy pass {} by default @@ -564,7 +567,7 @@ def copy( :param features: Features to be copied into a new `EOPatch`. By default, all features will be copied. :param deep: If `True` it will make a deep copy of all data inside the `EOPatch`. Otherwise, only a shallow copy of `EOPatch` will be made. Note that `BBOX` and `TIMESTAMPS` will be copied even with a shallow copy. - :param copy_timestamps: Whether to copy timestamps to the new EOPatch. By default copies them over if all + :param copy_timestamps: Copy timestamps to the new EOPatch. By default copies them over if all features are copied or if any temporal features are getting copied. :return: An EOPatch copy. """ @@ -591,14 +594,14 @@ def get_features(self) -> list[Feature]: :return: List of non-empty features """ - feature_list: list[Feature] = [] with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=FEATURETYPE_DEPRECATION_MSG.format(".*?", ".*?")) removed_ftypes = {FeatureType.BBOX, FeatureType.TIMESTAMPS} # list comprehensions make ignoring hard - for feature_type in (ftype for ftype in FeatureType if ftype not in removed_ftypes): - for feature_name in self[feature_type]: - feature_list.append((feature_type, feature_name)) - return feature_list + return [ + (feature_type, feature_name) + for feature_type in (ftype for ftype in FeatureType if ftype not in removed_ftypes) + for feature_name in self[feature_type] + ] def save( self, @@ -620,7 +623,7 @@ def save( :param overwrite_permission: A level of permission for overwriting an existing EOPatch :param filesystem: An existing filesystem object. If not given it will be initialized according to the `path` parameter. - :save_timestamps: Whether to save the timestamps of the EOPatch. With the `"auto"` setting timestamps are saved + :save_timestamps: Save the timestamps of the EOPatch. With the `"auto"` setting timestamps are saved if `features=...` or if other temporal features are being saved. :param use_zarr: Saves numpy-array based features into Zarr files. Requires ZARR extra dependencies. :param temporal_selection: Writes all of the data to the chosen temporal indices of preexisting arrays. Can be @@ -666,7 +669,7 @@ def load( :param lazy_loading: If `True` features will be lazy loaded. :param filesystem: An existing filesystem object. If not given it will be initialized according to the `path` parameter. - :load_timestamps: Whether to load the timestamps of the EOPatch. With the `"auto"` setting timestamps are loaded + :load_timestamps: Load the timestamps of the EOPatch. With the `"auto"` setting timestamps are loaded if `features=...` or if other temporal features are being loaded. :param temporal_selection: Only loads data corresponding to the chosen indices. Can also be a callable that, given a list of timestamps, returns a list of booleans declaring which temporal slices to load. @@ -688,7 +691,10 @@ def load( _trigger_loading_for_eopatch_features(eopatch) return eopatch - @deprecated_function(EODeprecationWarning, "Use the function `eolearn.core.merge_eopatches` instead.") + @deprecated( + "The EOPatch method `merge` has been deprecated. Use the function `eolearn.core.merge_eopatches` instead.", + category=EODeprecationWarning, + ) def merge( self, *eopatches: EOPatch, @@ -727,7 +733,10 @@ def merge( self, *eopatches, features=features, time_dependent_op=time_dependent_op, timeless_op=timeless_op ) - @deprecated_function(EODeprecationWarning, "Please use the method `temporal_subset` instead.") + @deprecated( + "The method `consolidate_timestamps` has been deprecated. Use the method `temporal_subset` instead.", + category=EODeprecationWarning, + ) def consolidate_timestamps(self, timestamps: list[dt.datetime]) -> set[dt.datetime]: """Removes all frames from the EOPatch with a date not found in the provided timestamps list. diff --git a/eolearn/core/eoexecution.py b/eolearn/core/eoexecution.py index 59e35c16..5fc4643a 100644 --- a/eolearn/core/eoexecution.py +++ b/eolearn/core/eoexecution.py @@ -26,8 +26,7 @@ import fs from fs.base import FS - -from sentinelhub.exceptions import deprecated_function +from typing_extensions import deprecated from .eonode import EONode from .eoworkflow import EOWorkflow, WorkflowResults @@ -114,7 +113,7 @@ def __init__( object. The 2nd option is chosen only if `filesystem` parameter exists in the signature. - :param raise_on_temporal_mismatch: Whether to treat `TemporalDimensionWarning` as an exception. + :param raise_on_temporal_mismatch: Treat `TemporalDimensionWarning` as an exception. """ self.workflow = workflow self.execution_kwargs = self._parse_and_validate_execution_kwargs(execution_kwargs) @@ -326,7 +325,7 @@ def get_failed_executions(self) -> list[int]: def get_report_path(self, full_path: bool = True) -> str: """Returns the filename and file path of the report. - :param full_path: Whether to return full absolute paths or paths relative to the filesystem object. + :param full_path: Return full absolute paths or paths relative to the filesystem object. :return: Report filename """ if self.report_folder is None: @@ -354,7 +353,7 @@ def make_report(self, include_logs: bool = True) -> None: def get_log_paths(self, full_path: bool = True) -> list[str]: """Returns a list of file paths containing logs. - :param full_path: Whether to return full absolute paths or paths relative to the filesystem object. + :param full_path: Return full absolute paths or paths relative to the filesystem object. :return: A list of paths to log files. """ if self.report_folder is None: @@ -362,7 +361,7 @@ def get_log_paths(self, full_path: bool = True) -> list[str]: log_paths = [fs.path.combine(self.report_folder, f"eoexecution-{name}.log") for name in self.execution_names] return [get_full_path(self.filesystem, path) for path in log_paths] if full_path else log_paths - @deprecated_function(EODeprecationWarning) + @deprecated("The method `read_logs` has been deprecated.", category=EODeprecationWarning) def read_logs(self) -> list[str | None]: """Loads the content of log files if logs have been saved.""" if not self.save_logs: diff --git a/eolearn/core/eotask.py b/eolearn/core/eotask.py index 6910b261..50b94e47 100644 --- a/eolearn/core/eotask.py +++ b/eolearn/core/eotask.py @@ -20,7 +20,7 @@ from dataclasses import dataclass from typing import Any, Callable, Iterable, TypeVar -from sentinelhub.exceptions import deprecated_function +from typing_extensions import deprecated from .constants import FeatureType from .eodata import EOPatch @@ -33,7 +33,8 @@ Self = TypeVar("Self") PARSE_RENAMED_DEPRECATE_MSG = ( - "It will no longer be a method of the `EOTask`, but can be imported from `eolearn.core.utils.parsing`." + "The method will no longer be a method of `EOTask`, but can be imported as a function from" + " `eolearn.core.utils.parsing`." ) @@ -41,10 +42,10 @@ class EOTask(metaclass=ABCMeta): """Base class for EOTask.""" parse_renamed_feature = staticmethod( - deprecated_function(EODeprecationWarning, PARSE_RENAMED_DEPRECATE_MSG)(parse_renamed_feature) + deprecated(PARSE_RENAMED_DEPRECATE_MSG, category=EODeprecationWarning)(parse_renamed_feature) ) parse_renamed_features = staticmethod( - deprecated_function(EODeprecationWarning, PARSE_RENAMED_DEPRECATE_MSG)(parse_renamed_features) + deprecated(PARSE_RENAMED_DEPRECATE_MSG, category=EODeprecationWarning)(parse_renamed_features) ) def __new__(cls: type[Self], *args: Any, **kwargs: Any) -> Self: diff --git a/eolearn/core/utils/parsing.py b/eolearn/core/utils/parsing.py index 632745e1..a530572e 100644 --- a/eolearn/core/utils/parsing.py +++ b/eolearn/core/utils/parsing.py @@ -159,7 +159,7 @@ def _parse_dict( def _parse_sequence( self, - features: SingleFeatureSpec | SequenceFeatureSpec, + features: SingleFeatureSpec | tuple[FeatureType, EllipsisType] | SequenceFeatureSpec, ) -> list[_ParserFeaturesSpec]: """Implements parsing and validation in case the input is a tuple describing a single feature or a sequence.""" diff --git a/eolearn/coregistration/coregistration.py b/eolearn/coregistration/coregistration.py index 3b0f886a..1a9603d1 100644 --- a/eolearn/coregistration/coregistration.py +++ b/eolearn/coregistration/coregistration.py @@ -104,7 +104,7 @@ def register( """Method that estimates the transformation between source and target image""" criteria = (cv2.TERM_CRITERIA_COUNT, self.max_iter, 0) warp_matrix_size = (3, 3) if warp_mode == cv2.MOTION_HOMOGRAPHY else (2, 3) - warp_matrix = np.eye(*warp_matrix_size, dtype=np.float32) + warp_matrix: np.ndarray = np.eye(*warp_matrix_size, dtype=np.float32) try: cv2.setNumThreads(self.num_threads) @@ -114,7 +114,7 @@ def register( warp_matrix, warp_mode, criteria, - valid_mask, + valid_mask, # type: ignore[arg-type] self.gauss_kernel_size, ) except cv2.error as cv2err: @@ -163,7 +163,7 @@ def execute(self, eopatch: EOPatch) -> EOPatch: def warp(self, img: np.ndarray, warp_matrix: np.ndarray, shape: tuple[int, int], flags: int) -> np.ndarray: """Transform the target image with the estimated transformation matrix""" if warp_matrix.shape == (3, 3): - return cv2.warpPerspective( + return cv2.warpPerspective( # type: ignore[call-overload] img.astype(np.float32), warp_matrix, shape, @@ -171,7 +171,7 @@ def warp(self, img: np.ndarray, warp_matrix: np.ndarray, shape: tuple[int, int], borderMode=self.border_mode, borderValue=self.border_value, ) - return cv2.warpAffine( + return cv2.warpAffine( # type: ignore[call-overload] img.astype(np.float32), warp_matrix, shape, @@ -213,8 +213,8 @@ def get_gradient(src: np.ndarray) -> np.ndarray: """ # Calculate the x and y gradients using Sobel operator src = src.astype(np.float32) - grad_x = cv2.Sobel(src, cv2.CV_32F, 1, 0, ksize=3) # type: ignore[attr-defined] - grad_y = cv2.Sobel(src, cv2.CV_32F, 0, 1, ksize=3) # type: ignore[attr-defined] + grad_x = cv2.Sobel(src, cv2.CV_32F, 1, 0, ksize=3) + grad_y = cv2.Sobel(src, cv2.CV_32F, 0, 1, ksize=3) # Combine and return the two gradients return cv2.addWeighted(np.absolute(grad_x), 0.5, np.absolute(grad_y), 0.5, 0) diff --git a/eolearn/features/extra/interpolation.py b/eolearn/features/extra/interpolation.py index 2e853a78..a6bbee5d 100644 --- a/eolearn/features/extra/interpolation.py +++ b/eolearn/features/extra/interpolation.py @@ -20,8 +20,7 @@ import numpy as np import scipy.interpolate from sklearn.gaussian_process import GaussianProcessRegressor - -from sentinelhub.exceptions import deprecated_class +from typing_extensions import deprecated from eolearn.core import EOPatch, EOTask, FeatureType from eolearn.core.exceptions import EODeprecationWarning, EOUserWarning @@ -274,12 +273,10 @@ def interpolate_data(self, data: np.ndarray, times: np.ndarray, resampled_times: # array defining index correspondence between reference times and resampled times min_time, max_time = np.min(resampled_times), np.max(resampled_times) - ori2res = np.array( - [ - np.abs(resampled_times - orig_time).argmin() if min_time <= orig_time <= max_time else None - for orig_time in times - ] - ) + ori2res = np.array([ + np.abs(resampled_times - orig_time).argmin() if min_time <= orig_time <= max_time else None + for orig_time in times + ]) # find NaNs that start or end a time-series row_nans, col_nans = np.where(self._get_start_end_nans(data)) @@ -442,9 +439,10 @@ def interpolate_data(self, data: np.ndarray, times: np.ndarray, resampled_times: return interpolation_function(data, times, resampled_times) -@deprecated_class( - EODeprecationWarning, - "Use `InterpolationTask` with `interpolation_object=scipy.interpolate.interp1d` and `kind='cubic'`", +@deprecated( + "The task `CubicInterpolationTask` has been deprecated. Use `InterpolationTask` with" + " `interpolation_object=scipy.interpolate.interp1d` and `kind='cubic'`", + category=EODeprecationWarning, ) class CubicInterpolationTask(InterpolationTask): """ @@ -455,8 +453,10 @@ def __init__(self, feature: SingleFeatureSpec, **kwargs: Any): super().__init__(feature, scipy.interpolate.interp1d, kind="cubic", **kwargs) -@deprecated_class( - EODeprecationWarning, "Use `InterpolationTask` with `interpolation_object=scipy.interpolate.UnivariateSpline`" +@deprecated( + "The task `SplineInterpolationTask` has been deprecated. Use `InterpolationTask` with" + " `interpolation_object=scipy.interpolate.UnivariateSpline`", + category=EODeprecationWarning, ) class SplineInterpolationTask(InterpolationTask): """[DEPRECATED] Implements `eolearn.features.InterpolationTask` by using `scipy.interpolate.UnivariateSpline`""" @@ -467,8 +467,10 @@ def __init__( super().__init__(feature, scipy.interpolate.UnivariateSpline, k=spline_degree, s=smoothing_factor, **kwargs) -@deprecated_class( - EODeprecationWarning, "Use `InterpolationTask` with `interpolation_object=scipy.interpolate.make_interp_spline`" +@deprecated( + "The task `BSplineInterpolationTask` has been deprecated. Use `InterpolationTask` with" + " `interpolation_object=scipy.interpolate.make_interp_spline`", + category=EODeprecationWarning, ) class BSplineInterpolationTask(InterpolationTask): """[DEPRECATED] Implements `eolearn.features.InterpolationTask` by using `scipy.interpolate.BSpline`""" @@ -477,8 +479,10 @@ def __init__(self, feature: SingleFeatureSpec, *, spline_degree: int = 3, **kwar super().__init__(feature, scipy.interpolate.make_interp_spline, k=spline_degree, **kwargs) -@deprecated_class( - EODeprecationWarning, "Use `InterpolationTask` with `interpolation_object=scipy.interpolate.Akima1DInterpolator`" +@deprecated( + "The task `AkimaInterpolationTask` has been deprecated. Use `InterpolationTask` with" + " `interpolation_object=scipy.interpolate.Akima1DInterpolator`", + category=EODeprecationWarning, ) class AkimaInterpolationTask(InterpolationTask): """[DEPRECATED] Implements `eolearn.features.InterpolationTask` by using `scipy.interpolate.Akima1DInterpolator`""" @@ -578,9 +582,10 @@ def get_interpolation_function(self, times: np.ndarray, series: np.ndarray) -> C return self.interpolation_object(times, series, axis=0, **self.interpolation_parameters) -@deprecated_class( - EODeprecationWarning, - "Use `ResamplingTask` with `interpolation_object=scipy.interpolate.interp1d` and `kind='nearest'`.", +@deprecated( + "The task `NearestResamplingTask` has been deprecated. Use `ResamplingTask` with" + " `interpolation_object=scipy.interpolate.interp1d` and `kind='nearest'`.", + category=EODeprecationWarning, ) class NearestResamplingTask(ResamplingTask): """ @@ -591,9 +596,10 @@ def __init__(self, feature: SingleFeatureSpec, resample_range: ResampleRangeType super().__init__(feature, scipy.interpolate.interp1d, resample_range, kind="nearest", **kwargs) -@deprecated_class( - EODeprecationWarning, - "Use `ResamplingTask` with `interpolation_object=scipy.interpolate.interp1d` and `kind='linear'`.", +@deprecated( + "The task `LinearResamplingTask` has been deprecated. Use `ResamplingTask` with" + " `interpolation_object=scipy.interpolate.interp1d` and `kind='linear'`.", + category=EODeprecationWarning, ) class LinearResamplingTask(ResamplingTask): """[DEPRECATED] Implements `eolearn.features.ResamplingTask` by using `scipy.interpolate.interp1d(kind='linear')`""" @@ -602,9 +608,10 @@ def __init__(self, feature: SingleFeatureSpec, resample_range: ResampleRangeType super().__init__(feature, scipy.interpolate.interp1d, resample_range, kind="linear", **kwargs) -@deprecated_class( - EODeprecationWarning, - "Use `ResamplingTask` with `interpolation_object=scipy.interpolate.interp1d` and `kind='cubic'`.", +@deprecated( + "The task `CubicResamplingTask` has been deprecated. Use `ResamplingTask` with" + " `interpolation_object=scipy.interpolate.interp1d` and `kind='cubic'`.", + category=EODeprecationWarning, ) class CubicResamplingTask(ResamplingTask): """[DEPRECATED] Implements `eolearn.features.ResamplingTask` by using `scipy.interpolate.interp1d(kind='cubic')`""" diff --git a/eolearn/io/geometry_io.py b/eolearn/io/geometry_io.py index 74f9b106..16ab1a8a 100644 --- a/eolearn/io/geometry_io.py +++ b/eolearn/io/geometry_io.py @@ -95,9 +95,9 @@ def dataset_crs(self) -> CRS: """Provides a CRS of dataset, it loads it lazily (i.e. the first time it is needed)""" if self._dataset_crs is None: is_on_s3 = self.full_path.startswith("s3://") - with fiona.Env(session=self.aws_session) if is_on_s3 else nullcontext(): - with fiona.open(self.full_path, **self.fiona_kwargs) as features: - self._dataset_crs = CRS(features.crs) + env = fiona.Env(session=self.aws_session) if is_on_s3 else nullcontext() + with env, fiona.open(self.full_path, **self.fiona_kwargs) as features: + self._dataset_crs = CRS(features.crs) return self._dataset_crs diff --git a/eolearn/io/raster_io.py b/eolearn/io/raster_io.py index 3f2abd45..c11f7dde 100644 --- a/eolearn/io/raster_io.py +++ b/eolearn/io/raster_io.py @@ -310,7 +310,7 @@ def _export_tiff( src_transform: Affine, ) -> None: """Export an EOPatch feature to tiff based on input channel range.""" - with rasterio.Env(), filesystem.openbin(path, "w") as file_handle: + with rasterio.Env(), filesystem.openbin(path, "w") as file_handle: # noqa: SIM117 with rasterio.open( file_handle, "w", diff --git a/eolearn/ml_tools/sampling.py b/eolearn/ml_tools/sampling.py index 66c2618a..a6e7a16b 100644 --- a/eolearn/ml_tools/sampling.py +++ b/eolearn/ml_tools/sampling.py @@ -54,7 +54,7 @@ def sample_by_values( :param n_samples_per_value: A dictionary specifying the amount of samples per value. Values that are not in the dictionary will not be sampled. :param rng: A random numbers generator. If not provided it will be initialized without a seed. - :param replace: Whether to sample with replacement. False means each value can only be chosen once. + :param replace: Sample with replacement. False means each value can only be chosen once. :return: A pair of numpy arrays first one containing row indices and second one containing column indices of sampled points. """ @@ -190,7 +190,7 @@ def __init__( :param sampling_feature: A timeless mask feature according to which points will be sampled. :param fraction: Fraction of points to sample. Can be dictionary mapping values of mask to fractions. :param exclude_values: Skips points that have these values in `sampling_mask` - :param replace: Whether to sample with replacement. False means each value can only be chosen once. + :param replace: Sample with replacement. False means each value can only be chosen once. :param mask_of_samples: An output mask timeless feature of counts how many times each pixel has been sampled. """ super().__init__(features_to_sample, mask_of_samples=mask_of_samples) @@ -275,7 +275,7 @@ def __init__( :param amount: The number of points to sample if integer valued and the fraction of all points if `float` :param sample_size: A tuple describing a size of sampled blocks. The size is defined as a tuple of number of rows and number of columns. - :param replace: Whether to sample with replacement. False means each value can only be chosen once. + :param replace: Sample with replacement. False means each value can only be chosen once. :param mask_of_samples: An output mask timeless feature of counts how many times each pixel has been sampled. """ super().__init__(features_to_sample, mask_of_samples=mask_of_samples) diff --git a/eolearn/visualization/eoexecutor.py b/eolearn/visualization/eoexecutor.py index 99d413eb..e8a26a13 100644 --- a/eolearn/visualization/eoexecutor.py +++ b/eolearn/visualization/eoexecutor.py @@ -163,17 +163,15 @@ def _get_node_descriptions(self) -> list[dict[str, Any]]: f" usually {np.mean(durations):.4g} ± {np.std(durations):.4g} seconds" ) - descriptions.append( - { - "name": f"{node_name} ({node.uid})", - "uid": node.uid, - "args": { - key: value.replace("<", "<").replace(">", ">") # type: ignore[attr-defined] - for key, value in node.task.private_task_config.init_args.items() - }, - "duration_report": duration_report, - } - ) + descriptions.append({ + "name": f"{node_name} ({node.uid})", + "uid": node.uid, + "args": { + key: value.replace("<", "<").replace(">", ">") # type: ignore[attr-defined] + for key, value in node.task.private_task_config.init_args.items() + }, + "duration_report": duration_report, + }) return descriptions def _render_execution_tracebacks(self, formatter: pygments.formatter.Formatter) -> list: diff --git a/eolearn/visualization/eoworkflow.py b/eolearn/visualization/eoworkflow.py index 325ed44c..9ce939bd 100644 --- a/eolearn/visualization/eoworkflow.py +++ b/eolearn/visualization/eoworkflow.py @@ -66,7 +66,7 @@ def _get_node_uid_to_dot_name_mapping(nodes: Sequence[EONode]) -> dict[str, str] dot_name_to_nodes[node.get_name()].append(node) node_to_dot_name = {} - for _, same_name_nodes in dot_name_to_nodes.items(): + for same_name_nodes in dot_name_to_nodes.values(): if len(same_name_nodes) == 1: node = same_name_nodes[0] node_to_dot_name[node.uid] = node.get_name() diff --git a/examples/core/CoreOverview.ipynb b/examples/core/CoreOverview.ipynb index 18f3af70..67407ac1 100644 --- a/examples/core/CoreOverview.ipynb +++ b/examples/core/CoreOverview.ipynb @@ -801,13 +801,11 @@ } ], "source": [ - "results = workflow.execute(\n", - " {\n", - " load_node: {\"eopatch_folder\": \"TestEOPatch\"},\n", - " add_feature_node: {\"data\": np.zeros((68, 3), dtype=np.uint8)},\n", - " save_node: {\"eopatch_folder\": \"WorkflowEOPatch\"},\n", - " }\n", - ")\n", + "results = workflow.execute({\n", + " load_node: {\"eopatch_folder\": \"TestEOPatch\"},\n", + " add_feature_node: {\"data\": np.zeros((68, 3), dtype=np.uint8)},\n", + " save_node: {\"eopatch_folder\": \"WorkflowEOPatch\"},\n", + "})\n", "\n", "results" ] diff --git a/examples/io/SentinelHubIO.ipynb b/examples/io/SentinelHubIO.ipynb index cbe6a281..5a67313b 100644 --- a/examples/io/SentinelHubIO.ipynb +++ b/examples/io/SentinelHubIO.ipynb @@ -374,12 +374,10 @@ "workflow_nodes = linearly_connect_tasks(input_task, add_indices, add_l2a_and_scl, add_dem, save, output_task)\n", "workflow = EOWorkflow(workflow_nodes)\n", "\n", - "result = workflow.execute(\n", - " {\n", - " workflow_nodes[0]: {\"bbox\": roi_bbox, \"time_interval\": time_interval},\n", - " workflow_nodes[-2]: {\"eopatch_folder\": \"eopatch\"},\n", - " }\n", - ")" + "result = workflow.execute({\n", + " workflow_nodes[0]: {\"bbox\": roi_bbox, \"time_interval\": time_interval},\n", + " workflow_nodes[-2]: {\"eopatch_folder\": \"eopatch\"},\n", + "})" ] }, { diff --git a/examples/land-cover-map/SI_LULC_pipeline.ipynb b/examples/land-cover-map/SI_LULC_pipeline.ipynb index 775a2501..23dc8645 100644 --- a/examples/land-cover-map/SI_LULC_pipeline.ipynb +++ b/examples/land-cover-map/SI_LULC_pipeline.ipynb @@ -744,12 +744,10 @@ "save_node = workflow_nodes[-1]\n", "execution_args = []\n", "for idx, bbox in enumerate(bbox_list[patch_ids]):\n", - " execution_args.append(\n", - " {\n", - " input_node: {\"bbox\": bbox, \"time_interval\": time_interval},\n", - " save_node: {\"eopatch_folder\": f\"eopatch_{idx}\"},\n", - " }\n", - " )\n", + " execution_args.append({\n", + " input_node: {\"bbox\": bbox, \"time_interval\": time_interval},\n", + " save_node: {\"eopatch_folder\": f\"eopatch_{idx}\"},\n", + " })\n", "\n", "# Execute the workflow\n", "executor = EOExecutor(workflow, execution_args, save_logs=True)\n", @@ -1331,13 +1329,11 @@ "\n", "execution_args = []\n", "for idx in range(len(patch_ids)):\n", - " execution_args.append(\n", - " {\n", - " workflow_nodes[0]: {\"eopatch_folder\": f\"eopatch_{idx}\"}, # load\n", - " workflow_nodes[-2]: {\"seed\": 42}, # sampling\n", - " workflow_nodes[-1]: {\"eopatch_folder\": f\"eopatch_{idx}\"}, # save\n", - " }\n", - " )\n", + " execution_args.append({\n", + " workflow_nodes[0]: {\"eopatch_folder\": f\"eopatch_{idx}\"}, # load\n", + " workflow_nodes[-2]: {\"seed\": 42}, # sampling\n", + " workflow_nodes[-1]: {\"eopatch_folder\": f\"eopatch_{idx}\"}, # save\n", + " })\n", "\n", "executor = EOExecutor(workflow, execution_args, save_logs=True)\n", "executor.run(workers=5)\n", @@ -2006,13 +2002,11 @@ "# Create a list of execution arguments for each patch\n", "execution_args = []\n", "for i in range(len(patch_ids)):\n", - " execution_args.append(\n", - " {\n", - " workflow_nodes[0]: {\"eopatch_folder\": f\"eopatch_{i}\"},\n", - " workflow_nodes[2]: {\"filename\": f\"{tiff_location}/prediction_eopatch_{i}.tiff\"},\n", - " workflow_nodes[3]: {\"eopatch_folder\": f\"eopatch_{i}\"},\n", - " }\n", - " )\n", + " execution_args.append({\n", + " workflow_nodes[0]: {\"eopatch_folder\": f\"eopatch_{i}\"},\n", + " workflow_nodes[2]: {\"filename\": f\"{tiff_location}/prediction_eopatch_{i}.tiff\"},\n", + " workflow_nodes[3]: {\"eopatch_folder\": f\"eopatch_{i}\"},\n", + " })\n", "\n", "# Run the executor\n", "executor = EOExecutor(workflow, execution_args)\n", diff --git a/examples/water-monitor/WaterMonitorWorkflow.ipynb b/examples/water-monitor/WaterMonitorWorkflow.ipynb index 5d8fdf5e..bba14e02 100644 --- a/examples/water-monitor/WaterMonitorWorkflow.ipynb +++ b/examples/water-monitor/WaterMonitorWorkflow.ipynb @@ -430,11 +430,9 @@ "# The download task requires additional arguments at execution. These are linked to the node the task is in.\n", "download_node = workflow_nodes[0]\n", "\n", - "result = workflow.execute(\n", - " {\n", - " download_node: {\"bbox\": dam_bbox, \"time_interval\": time_interval},\n", - " }\n", - ")" + "result = workflow.execute({\n", + " download_node: {\"bbox\": dam_bbox, \"time_interval\": time_interval},\n", + "})" ] }, { @@ -648,7 +646,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.4" + "version": "3.8.10" }, "vscode": { "interpreter": { diff --git a/pyproject.toml b/pyproject.toml index 917dc4fc..eb3e28cd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,11 +48,11 @@ dependencies = [ "fs", "fs-s3fs", "geopandas>=0.11.0", - "numpy>=1.20.0", + "numpy>=1.20.0,<2", "python-dateutil", "sentinelhub>=3.9.0", "tqdm>=4.27", - "typing-extensions", + "typing-extensions>=4.5.0", "opencv-python-headless", "affine", "rasterio>=1.3.8", @@ -106,35 +106,36 @@ preview = true line-length = 120 target-version = "py38" select = [ - "F", # pyflakes - "E", # pycodestyle - "W", # pycodestyle - "C90", # mccabe - "N", # naming - "YTT", # flake-2020 - "B", # bugbear - "A", # built-ins - "COM", # commas - "C4", # comprehensions - "T10", # debugger statements - "ISC", # implicit string concatenation - "ICN", # import conventions - "G", # logging format - "PIE", # flake8-pie - "T20", # print statements - "PT", # pytest style - "RET", # returns - "SLF", # private member access - "SIM", # simplifications - "ARG", # unused arguments - "PD", # pandas - "PGH", # pygrep hooks (useless noqa comments, eval statements etc.) - "FLY", # flynt - "RUF", # ruff rules - "NPY", # numpy - "I", # isort - "UP", # pyupgrade - "FA", # checks where future import of annotations would make types nicer + "F", # pyflakes + "E", # pycodestyle + "W", # pycodestyle + "C90", # mccabe + "I", # isort + "N", # naming + "UP", # pyupgrade + "YTT", # flake-2020 + "B", # bugbear + "A", # built-ins + "COM", # commas + "C4", # comprehensions + "T10", # debugger statements + "FA", # checks where future import of annotations would make types nicer + "ISC", # implicit string concatenation + "ICN", # import conventions + "G", # logging format + "PIE", # flake8-pie + "T20", # print statements + "PT", # pytest style + "RET", # returns + "SLF", # private member access + "SIM", # simplifications + "ARG", # unused arguments + "PD", # pandas + "PGH", # pygrep hooks (useless noqa comments, eval statements etc.) + "FLY", # flynt + "NPY", # numpy + "PERF", # perflint, performance improvements + "RUF", # ruff rules ] fix = true fixable = [ @@ -147,7 +148,6 @@ fixable = [ ] ignore = [ "C408", # complains about `dict()` calls, we use them to avoid too many " in the code - "SIM117", # wants to always combine `with` statements, gets ugly for us "SIM108", # tries to aggresively inline `if`, not always readable "A003", # complains when ATTRIBUTES shadow builtins, we have objects that implement `filter` and such "COM812", # trailing comma missing, fights with black diff --git a/tests/core/test_core_tasks.py b/tests/core/test_core_tasks.py index cfa91de8..c3048962 100644 --- a/tests/core/test_core_tasks.py +++ b/tests/core/test_core_tasks.py @@ -54,15 +54,13 @@ @pytest.fixture(name="patch") def patch_fixture() -> EOPatch: - patch = generate_eopatch( - { - FeatureType.DATA: ["bands", "CLP"], - FeatureType.MASK: ["CLM"], - FeatureType.MASK_TIMELESS: ["mask", "LULC", "RANDOM_UINT8"], - FeatureType.SCALAR: ["values", "CLOUD_COVERAGE"], - FeatureType.META_INFO: ["something"], - } - ) + patch = generate_eopatch({ + FeatureType.DATA: ["bands", "CLP"], + FeatureType.MASK: ["CLM"], + FeatureType.MASK_TIMELESS: ["mask", "LULC", "RANDOM_UINT8"], + FeatureType.SCALAR: ["values", "CLOUD_COVERAGE"], + FeatureType.META_INFO: ["something"], + }) patch.data["CLP_S2C"] = np.zeros_like(patch.data["CLP"]) return patch diff --git a/tests/core/test_eodata.py b/tests/core/test_eodata.py index db749a67..c91cbe08 100644 --- a/tests/core/test_eodata.py +++ b/tests/core/test_eodata.py @@ -31,23 +31,22 @@ @pytest.fixture(name="mini_eopatch") def mini_eopatch_fixture() -> EOPatch: - return generate_eopatch( - { - FeatureType.DATA: ["A", "B"], - FeatureType.MASK: ["C", "D"], - FeatureType.MASK_TIMELESS: ["E"], - FeatureType.META_INFO: ["beep"], - } - ) + return generate_eopatch({ + FeatureType.DATA: ["A", "B"], + FeatureType.MASK: ["C", "D"], + FeatureType.MASK_TIMELESS: ["E"], + FeatureType.META_INFO: ["beep"], + }) def test_numpy_feature_types() -> None: eop = EOPatch(bbox=DUMMY_BBOX, timestamps=DUMMY_TIMESTAMPS * 2) - data_examples = [] - for size in range(6): - for dtype in [np.float32, np.float64, float, np.uint8, np.int64, bool]: - data_examples.append(np.zeros((2,) * size, dtype=dtype)) + data_examples = [ + np.zeros((2,) * size, dtype=dtype) + for size in range(6) + for dtype in [np.float32, np.float64, float, np.uint8, np.int64, bool] + ] for feature_type in filter(lambda fty: fty.is_array(), FeatureType): valid_count = 0 @@ -56,7 +55,7 @@ def test_numpy_feature_types() -> None: try: eop[feature_type]["TEST"] = data valid_count += 1 - except ValueError: + except ValueError: # noqa: PERF203 pass expected_count = 3 if feature_type.is_discrete() else 6 diff --git a/tests/core/test_eodata_io.py b/tests/core/test_eodata_io.py index efdd03c9..61a3b7cc 100644 --- a/tests/core/test_eodata_io.py +++ b/tests/core/test_eodata_io.py @@ -66,15 +66,13 @@ def _silence_warnings_fixture(): @pytest.fixture(name="eopatch") def eopatch_fixture(): - eopatch = generate_eopatch( - { - FeatureType.DATA: ["data"], - FeatureType.MASK_TIMELESS: ["mask", "mask2"], - FeatureType.SCALAR: ["my scalar with spaces"], - FeatureType.SCALAR_TIMELESS: ["my timeless scalar with spaces"], - FeatureType.META_INFO: ["something", "something-else"], - } - ) + eopatch = generate_eopatch({ + FeatureType.DATA: ["data"], + FeatureType.MASK_TIMELESS: ["mask", "mask2"], + FeatureType.SCALAR: ["my scalar with spaces"], + FeatureType.SCALAR_TIMELESS: ["my timeless scalar with spaces"], + FeatureType.META_INFO: ["something", "something-else"], + }) eopatch.vector["my-df"] = GeoDataFrame( { "values": [1, 2], diff --git a/tests/core/test_eoexecutor.py b/tests/core/test_eoexecutor.py index 5eb91e45..3d51dfab 100644 --- a/tests/core/test_eoexecutor.py +++ b/tests/core/test_eoexecutor.py @@ -215,9 +215,7 @@ def test_exception_wrong_length_execution_names(workflow, execution_kwargs): def test_keyboard_interrupt(): exception_node = EONode(KeyboardExceptionTask()) workflow = EOWorkflow([exception_node]) - execution_kwargs = [] - for _ in range(10): - execution_kwargs.append({exception_node: {"arg1": 1}}) + execution_kwargs = [{exception_node: {"arg1": 1}} for _ in range(10)] run_kwargs = [{"workers": 1}, {"workers": 3, "multiprocess": True}, {"workers": 3, "multiprocess": False}] for kwarg in run_kwargs: diff --git a/tests/core/test_extra/test_ray.py b/tests/core/test_extra/test_ray.py index a302a11a..33cc5db3 100644 --- a/tests/core/test_extra/test_ray.py +++ b/tests/core/test_extra/test_ray.py @@ -175,9 +175,7 @@ def test_execution_results2(workflow, execution_kwargs): def test_keyboard_interrupt(): exception_node = EONode(KeyboardExceptionTask()) workflow = EOWorkflow([exception_node]) - execution_kwargs = [] - for _ in range(10): - execution_kwargs.append({exception_node: {"arg1": 1}}) + execution_kwargs = [{exception_node: {"arg1": 1}} for _ in range(10)] with pytest.raises((TaskCancelledError, RayTaskError)): RayExecutor(workflow, execution_kwargs).run() diff --git a/tests/core/test_utils/test_parsing.py b/tests/core/test_utils/test_parsing.py index d61c3531..cb2bf3c5 100644 --- a/tests/core/test_utils/test_parsing.py +++ b/tests/core/test_utils/test_parsing.py @@ -152,14 +152,12 @@ def test_allowed_feature_types_iterable(test_input: FeaturesSpecification, allow @pytest.fixture(name="eopatch", scope="module") def eopatch_fixture(): - return generate_eopatch( - { - FeatureType.DATA: ["data", "CLP"], - FeatureType.MASK: ["data", "IS_VALID"], - FeatureType.MASK_TIMELESS: ["LULC"], - FeatureType.META_INFO: ["something"], - } - ) + return generate_eopatch({ + FeatureType.DATA: ["data", "CLP"], + FeatureType.MASK: ["data", "IS_VALID"], + FeatureType.MASK_TIMELESS: ["LULC"], + FeatureType.META_INFO: ["something"], + }) @pytest.mark.parametrize(