From 8112d8994ec67023b18c7b0f514646df492e0bdc Mon Sep 17 00:00:00 2001 From: Marco Mancini Date: Thu, 11 Jan 2024 09:09:25 +0100 Subject: [PATCH 01/31] Update api with adjust_for_intake branch --- api/Dockerfile | 20 +- api/app/api_utils.py | 73 ++++ api/app/auth/__init__.py | 0 api/app/auth/backend.py | 66 ++++ api/app/auth/manager.py | 72 ++++ api/app/auth/models.py | 38 ++ api/app/auth/scopes.py | 5 + api/app/callbacks/__init__.py | 1 + api/app/callbacks/on_startup.py | 15 + api/app/const/__init__.py | 0 api/app/const/tags.py | 5 + api/app/const/venv.py | 7 + api/app/decorators_factory.py | 37 ++ api/app/encoders.py | 41 ++ api/app/endpoint_handlers/__init__.py | 3 + api/app/endpoint_handlers/dataset.py | 430 ++++++++++++++++++++ api/app/endpoint_handlers/file.py | 66 ++++ api/app/endpoint_handlers/request.py | 144 +++++++ api/app/exceptions.py | 195 ++++++++++ api/app/main.py | 538 ++++++++++++++++++++++---- api/app/validation.py | 36 ++ api/requirements.txt | 5 +- 22 files changed, 1710 insertions(+), 87 deletions(-) create mode 100644 api/app/api_utils.py create mode 100644 api/app/auth/__init__.py create mode 100644 api/app/auth/backend.py create mode 100644 api/app/auth/manager.py create mode 100644 api/app/auth/models.py create mode 100644 api/app/auth/scopes.py create mode 100644 api/app/callbacks/__init__.py create mode 100644 api/app/callbacks/on_startup.py create mode 100644 api/app/const/__init__.py create mode 100644 api/app/const/tags.py create mode 100644 api/app/const/venv.py create mode 100644 api/app/decorators_factory.py create mode 100644 api/app/encoders.py create mode 100644 api/app/endpoint_handlers/__init__.py create mode 100644 api/app/endpoint_handlers/dataset.py create mode 100644 api/app/endpoint_handlers/file.py create mode 100644 api/app/endpoint_handlers/request.py create mode 100644 api/app/exceptions.py create mode 100644 api/app/validation.py diff --git a/api/Dockerfile b/api/Dockerfile index 6182cb1..9ee0633 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,15 +1,9 @@ -FROM continuumio/miniconda3 -WORKDIR /code -COPY ./api/requirements.txt /code/requirements.txt +ARG REGISTRY=rg.nl-ams.scw.cloud/geodds-production +ARG TAG=latest +FROM $REGISTRY/geodds-datastore:$TAG +WORKDIR /app +COPY requirements.txt /code/requirements.txt RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt -RUN conda install -c anaconda psycopg2 -COPY ./utils/wait-for-it.sh /code/wait-for-it.sh -COPY ./db/dbmanager /code/db/dbmanager -COPY ./geoquery/ /code/geoquery -COPY ./resources /code/app/resources -COPY ./api/app /code/app +COPY app /app EXPOSE 80 -# VOLUME /code -CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "80"] -# if behind a proxy use --proxy-headers -# CMD ["uvicorn", "app.main:app", "--proxy-headers", "--host", "0.0.0.0", "--port", "80"] \ No newline at end of file +CMD ["uvicorn", "app.main:app", "--proxy-headers", "--host", "0.0.0.0", "--port", "80"] diff --git a/api/app/api_utils.py b/api/app/api_utils.py new file mode 100644 index 0000000..82ea9f6 --- /dev/null +++ b/api/app/api_utils.py @@ -0,0 +1,73 @@ +"""Utils module""" + + +def convert_bytes(size_bytes: int, to: str) -> float: + """Converts size in bytes to the other unit - one out of: + ["kb", "mb", "gb"] + + Parameters + ---------- + size_bytes : int + Size in bytes + to : str + Unit to convert `size_bytes` to + + size : float + `size_bytes` converted to the given unit + """ + assert to is not None, "Expected unit cannot be `None`" + to = to.lower() + match to: + case "bytes": + return size_bytes + case "kb": + return size_bytes / 1024 + case "mb": + return size_bytes / 1024**2 + case "gb": + return size_bytes / 1024**3 + case _: + raise ValueError(f"unsupported units: {to}") + + +def make_bytes_readable_dict( + size_bytes: int, units: str | None = None +) -> dict: + """Prepare dictionary representing size (in bytes) in more readable unit + to keep value in the range [0,1] - if `units` is `None`. + If `units` is not None, converts `size_bytes` to the size expressed by + that argument. + + Parameters + ---------- + size_bytes : int + Size expressed in bytes + units : optional str + + Returns + ------- + result : dict + A dictionary with size and units in the form: + { + "value": ..., + "units": ... + } + """ + if units is None: + units = "bytes" + if units != "bytes": + converted_size = convert_bytes(size_bytes=size_bytes, to=units) + return {"value": converted_size, "units": units} + val = size_bytes + if val > 1024: + units = "kB" + val /= 1024 + if val > 1024: + units = "MB" + val /= 1024 + if val > 1024: + units = "GB" + val /= 1024 + if val > 0.0 and (round(val, 2) == 0.00): + val = 0.01 + return {"value": round(val, 2), "units": units} diff --git a/api/app/auth/__init__.py b/api/app/auth/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/app/auth/backend.py b/api/app/auth/backend.py new file mode 100644 index 0000000..c172b58 --- /dev/null +++ b/api/app/auth/backend.py @@ -0,0 +1,66 @@ +"""The module contains authentication backend""" +from uuid import UUID + +from starlette.authentication import ( + AuthCredentials, + AuthenticationBackend, + UnauthenticatedUser, +) +from dbmanager.dbmanager import DBManager + +import exceptions as exc +from auth.models import DDSUser +from auth import scopes + + +class DDSAuthenticationBackend(AuthenticationBackend): + """Class managing authentication and authorization""" + + async def authenticate(self, conn): + """Authenticate user based on `User-Token` header""" + if "User-Token" in conn.headers: + return self._manage_user_token_auth(conn.headers["User-Token"]) + return AuthCredentials([scopes.ANONYMOUS]), UnauthenticatedUser() + + def _manage_user_token_auth(self, user_token: str): + try: + user_id, api_key = self.get_authorization_scheme_param(user_token) + except exc.BaseDDSException as err: + raise err.wrap_around_http_exception() + user_dto = DBManager().get_user_details(user_id) + eligible_scopes = [scopes.AUTHENTICATED] + self._get_scopes_for_user( + user_dto=user_dto + ) + if user_dto.api_key != api_key: + raise exc.AuthenticationFailed( + user_dto + ).wrap_around_http_exception() + return AuthCredentials(eligible_scopes), DDSUser(username=user_id) + + def _get_scopes_for_user(self, user_dto) -> list[str]: + if user_dto is None: + return [] + eligible_scopes = [] + for role in user_dto.roles: + if "admin" == role.role_name: + eligible_scopes.append(scopes.ADMIN) + continue + # NOTE: Role-specific scopes + # Maybe need some more logic + eligible_scopes.append(role.role_name) + return eligible_scopes + + def get_authorization_scheme_param(self, user_token: str): + """Get `user_id` and `api_key` if authorization scheme is correct.""" + if user_token is None or user_token.strip() == "": + raise exc.EmptyUserTokenError + if ":" not in user_token: + raise exc.ImproperUserTokenError + user_id, api_key, *rest = user_token.split(":") + if len(rest) > 0: + raise exc.ImproperUserTokenError + try: + _ = UUID(user_id, version=4) + except ValueError as err: + raise exc.ImproperUserTokenError from err + return (user_id, api_key) diff --git a/api/app/auth/manager.py b/api/app/auth/manager.py new file mode 100644 index 0000000..02bf686 --- /dev/null +++ b/api/app/auth/manager.py @@ -0,0 +1,72 @@ +"""Module with access/authentication functions""" +from typing import Optional + +from utils.api_logging import get_dds_logger +import exceptions as exc + +log = get_dds_logger(__name__) + + +def is_role_eligible_for_product( + product_role_name: Optional[str] = None, + user_roles_names: Optional[list[str]] = None, +): + """Check if given role is eligible for the product with the provided + `product_role_name`. + + Parameters + ---------- + product_role_name : str, optional, default=None + The role which is eligible for the given product. + If `None`, product_role_name is claimed to be public + user_roles_names: list of str, optional, default=None + A list of user roles names. If `None`, user_roles_names is claimed + to be public + + Returns + ------- + is_eligible : bool + Flag which indicate if any role within the given `user_roles_names` + is eligible for the product with `product_role_name` + """ + log.debug( + "verifying eligibility of the product role '%s' against roles '%s'", + product_role_name, + user_roles_names, + ) + if product_role_name == "public" or product_role_name is None: + return True + if user_roles_names is None: + # NOTE: it means, we consider the public profile + return False + if "admin" in user_roles_names: + return True + if product_role_name in user_roles_names: + return True + return False + + +def assert_is_role_eligible( + product_role_name: Optional[str] = None, + user_roles_names: Optional[list[str]] = None, +): + """Assert that user role is eligible for the product + + Parameters + ---------- + product_role_name : str, optional, default=None + The role which is eligible for the given product. + If `None`, product_role_name is claimed to be public + user_roles_names: list of str, optional, default=None + A list of user roles names. If `None`, user_roles_names is claimed + to be public + + Raises + ------- + AuthorizationFailed + """ + if not is_role_eligible_for_product( + product_role_name=product_role_name, + user_roles_names=user_roles_names, + ): + raise exc.AuthorizationFailed diff --git a/api/app/auth/models.py b/api/app/auth/models.py new file mode 100644 index 0000000..bff896f --- /dev/null +++ b/api/app/auth/models.py @@ -0,0 +1,38 @@ +"""The module contains models related to the authentication and authorization""" +from starlette.authentication import SimpleUser + + +class DDSUser(SimpleUser): + """Immutable class containing information about the authenticated user""" + + def __init__(self, username: str) -> None: + super().__init__(username=username) + + @property + def id(self): + return self.username + + def __eq__(self, other) -> bool: + if not isinstance(other, DDSUser): + return False + if self.username == other.username: + return True + return False + + def __ne__(self, other): + return self != other + + def __repr__(self): + return f"" + + def __delattr__(self, name): + if getattr(self, name, None) is not None: + raise AttributeError("The attribute '{name}' cannot be deleted!") + super().__delattr__(name) + + def __setattr__(self, name, value): + if getattr(self, name, None) is not None: + raise AttributeError( + "The attribute '{name}' cannot modified when not None!" + ) + super().__setattr__(name, value) diff --git a/api/app/auth/scopes.py b/api/app/auth/scopes.py new file mode 100644 index 0000000..75113e4 --- /dev/null +++ b/api/app/auth/scopes.py @@ -0,0 +1,5 @@ +"""This module contains predefined authorization scopes""" + +ADMIN = "admin" +AUTHENTICATED = "authenticated" +ANONYMOUS = "anonymous" diff --git a/api/app/callbacks/__init__.py b/api/app/callbacks/__init__.py new file mode 100644 index 0000000..e003acf --- /dev/null +++ b/api/app/callbacks/__init__.py @@ -0,0 +1 @@ +from .on_startup import all_onstartup_callbacks diff --git a/api/app/callbacks/on_startup.py b/api/app/callbacks/on_startup.py new file mode 100644 index 0000000..ec883d3 --- /dev/null +++ b/api/app/callbacks/on_startup.py @@ -0,0 +1,15 @@ +"""Module with functions call during API server startup""" +from utils.api_logging import get_dds_logger + +from datastore.datastore import Datastore + +log = get_dds_logger(__name__) + + +def _load_cache() -> None: + log.info("loading cache started...") + Datastore()._load_cache() + log.info("cache loaded succesfully!") + + +all_onstartup_callbacks = [_load_cache] diff --git a/api/app/const/__init__.py b/api/app/const/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/app/const/tags.py b/api/app/const/tags.py new file mode 100644 index 0000000..58a2213 --- /dev/null +++ b/api/app/const/tags.py @@ -0,0 +1,5 @@ +"""The module with endpoint tags definitions""" + +BASIC = "basic" +DATASET = "dataset" +REQUEST = "request" diff --git a/api/app/const/venv.py b/api/app/const/venv.py new file mode 100644 index 0000000..85c3658 --- /dev/null +++ b/api/app/const/venv.py @@ -0,0 +1,7 @@ +"""This modul contains all supported environment variables names""" + +ENDPOINT_PREFIX = "ENDPOINT_PREFIX" +ALLOWED_CORS_ORIGINS_REGEX = "ALLOWED_CORS_ORIGINS_REGEX" +LOGGING_FORMAT = "LOGGING_FORMAT" +LOGGING_LEVEL = "LOGGING_LEVEL" +WEB_COMPONENT_HOST = "WEB_COMPONENT_HOST" diff --git a/api/app/decorators_factory.py b/api/app/decorators_factory.py new file mode 100644 index 0000000..d2e4b39 --- /dev/null +++ b/api/app/decorators_factory.py @@ -0,0 +1,37 @@ +"""Modules with utils for creating decorators""" +from inspect import Signature + + +def assert_parameters_are_defined( + sig: Signature, required_parameters: list[tuple] +): + """Assert the given callable signature has parameters with + names and types indicated by `required_parameters` argument. + + Parameters + ---------- + sig : Signature + A signature object of a callable + required_parameters : list of tuples + List of two-element tuples containing a name and a type + of the parameter, e.g. [("dataset_id", str)] + + Raises + ------ + TypeError + If a required parameter is not defined or is of wrong type + """ + for param_name, param_type in required_parameters: + if param_name not in sig.parameters: + raise TypeError( + f"The parameter '{param_name}' annotated with the type" + f" '{param_type}' must be defined for the callable decorated" + " with 'authenticate_user' decorator" + ) + + +def bind_arguments(sig: Signature, *args, **kwargs): + """Bind arguments to the signature""" + args_bind = sig.bind_partial(*args, **kwargs) + args_bind.apply_defaults() + return args_bind.arguments diff --git a/api/app/encoders.py b/api/app/encoders.py new file mode 100644 index 0000000..9566f57 --- /dev/null +++ b/api/app/encoders.py @@ -0,0 +1,41 @@ +import numpy as np +from fastapi.encoders import encoders_by_class_tuples + + +def make_ndarray_dtypes_valid(o: np.ndarray) -> np.ndarray: + """Convert `numpy.array` dtype to the one which is serializable + to JSON. + + int32 -> int64 + float32 -> float 64 + + Parameters + ---------- + o : np.ndarray + A NumPy array object + + Returns + ------- + res : np.ndarray + A NumPy array object with dtype set properly + + Raises + ------ + AssertionError + If passed object is not of `numpy.ndarray` + """ + assert isinstance(o, np.ndarray) + if np.issubdtype(o.dtype, np.int32): + return o.astype(np.int64) + if np.issubdtype(o.dtype, np.float32): + return o.astype(np.float64) + return o + + +def extend_json_encoders(): + """Extend `encoders_by_class_tuples` module variable from `fastapi.encoders` + with auxiliary encoders necessary for proper application working.""" + encoders_by_class_tuples[lambda o: list(make_ndarray_dtypes_valid(o))] = ( + np.ndarray, + ) + encoders_by_class_tuples[str] += (np.int32, np.float32) diff --git a/api/app/endpoint_handlers/__init__.py b/api/app/endpoint_handlers/__init__.py new file mode 100644 index 0000000..c5a44be --- /dev/null +++ b/api/app/endpoint_handlers/__init__.py @@ -0,0 +1,3 @@ +from . import file as file_handler +from . import dataset as dataset_handler +from . import request as request_handler diff --git a/api/app/endpoint_handlers/dataset.py b/api/app/endpoint_handlers/dataset.py new file mode 100644 index 0000000..c03a54b --- /dev/null +++ b/api/app/endpoint_handlers/dataset.py @@ -0,0 +1,430 @@ +"""Modules realizing logic for dataset-related endpoints""" +import os +import pika +import json +from typing import Optional + +from fastapi.responses import FileResponse + +from dbmanager.dbmanager import DBManager, RequestStatus +from intake_geokube.queries.geoquery import GeoQuery +from intake_geokube.queries.workflow import Workflow +from datastore.datastore import Datastore, DEFAULT_MAX_REQUEST_SIZE_GB +from datastore import exception as datastore_exception + +from utils.metrics import log_execution_time +from utils.api_logging import get_dds_logger +from auth.manager import ( + is_role_eligible_for_product, +) +import exceptions as exc +from api_utils import make_bytes_readable_dict +from validation import assert_product_exists + +from . import request + +log = get_dds_logger(__name__) +data_store = Datastore() + +MESSAGE_SEPARATOR = os.environ["MESSAGE_SEPARATOR"] + +def _is_etimate_enabled(dataset_id, product_id): + if dataset_id in ("sentinel-2",): + return False + return True + + +@log_execution_time(log) +def get_datasets(user_roles_names: list[str]) -> list[dict]: + """Realize the logic for the endpoint: + + `GET /datasets` + + Get datasets names, their metadata and products names (if eligible for a user). + If no eligible products are found for a dataset, it is not included. + + Parameters + ---------- + user_roles_names : list of str + List of user's roles + + Returns + ------- + datasets : list of dict + A list of dictionaries with datasets information (including metadata and + eligible products lists) + + Raises + ------- + MissingKeyInCatalogEntryError + If the dataset catalog entry does not contain the required key + """ + log.debug( + "getting all eligible products for datasets...", + ) + datasets = [] + for dataset_id in data_store.dataset_list(): + log.debug( + "getting info and eligible products for `%s`", + dataset_id, + ) + dataset_info = data_store.dataset_info(dataset_id=dataset_id) + try: + eligible_prods = { + prod_name: prod_info + for prod_name, prod_info in dataset_info["products"].items() + if is_role_eligible_for_product( + product_role_name=prod_info.get("role"), + user_roles_names=user_roles_names, + ) + } + except KeyError as err: + log.error( + "dataset `%s` does not have products defined", + dataset_id, + exc_info=True, + ) + raise exc.MissingKeyInCatalogEntryError( + key="products", dataset=dataset_id + ) from err + else: + if len(eligible_prods) == 0: + log.debug( + "no eligible products for dataset `%s` for the role `%s`." + " dataset skipped", + dataset_id, + user_roles_names, + ) + else: + dataset_info["products"] = eligible_prods + datasets.append(dataset_info) + return datasets + + +@log_execution_time(log) +@assert_product_exists +def get_product_details( + user_roles_names: list[str], + dataset_id: str, + product_id: Optional[str] = None, +) -> dict: + """Realize the logic for the endpoint: + + `GET /datasets/{dataset_id}/{product_id}` + + Get details for the given product indicated by `dataset_id` + and `product_id` arguments. + + Parameters + ---------- + user_roles_names : list of str + List of user's roles + dataset_id : str + ID of the dataset + product_id : optional, str + ID of the product. If `None` the 1st product will be considered + + Returns + ------- + details : dict + Details for the given product + + Raises + ------- + AuthorizationFailed + If user is not authorized for the resources + """ + log.debug( + "getting details for eligible products of `%s`", + dataset_id, + ) + try: + if product_id: + return data_store.product_details( + dataset_id=dataset_id, + product_id=product_id, + role=user_roles_names, + use_cache=True, + ) + else: + return data_store.first_eligible_product_details( + dataset_id=dataset_id, role=user_roles_names, use_cache=True + ) + except datastore_exception.UnauthorizedError as err: + raise exc.AuthorizationFailed from err + + +@log_execution_time(log) +@assert_product_exists +def get_metadata(dataset_id: str, product_id: str): + """Realize the logic for the endpoint: + + `GET /datasets/{dataset_id}/{product_id}/metadata` + + Get metadata for the product. + + Parameters + ---------- + dataset_id : str + ID of the dataset + product_id : str + ID of the product + """ + log.debug( + "getting metadata for '{dataset_id}.{product_id}'", + ) + return data_store.product_metadata(dataset_id, product_id) + + +@log_execution_time(log) +@assert_product_exists +def estimate( + dataset_id: str, + product_id: str, + query: GeoQuery, + unit: Optional[str] = None, +): + """Realize the logic for the nedpoint: + + `POST /datasets/{dataset_id}/{product_id}/estimate` + + Estimate the size of the resulting data. + No authentication is needed for estimation query. + + Parameters + ---------- + dataset_id : str + ID of the dataset + product_id : str + ID of the product + query : GeoQuery + Query to perform + unit : str + One of unit [bytes, kB, MB, GB] to present the result. If `None`, + unit will be inferred. + + Returns + ------- + size_details : dict + Estimated size of the query in the form: + ```python + { + "value": val, + "units": units + } + ``` + """ + query_bytes_estimation = data_store.estimate(dataset_id, product_id, query) + return make_bytes_readable_dict( + size_bytes=query_bytes_estimation, units=unit + ) + + +@log_execution_time(log) +@assert_product_exists +def async_query( + user_id: str, + dataset_id: str, + product_id: str, + query: GeoQuery, +): + """Realize the logic for the endpoint: + + `POST /datasets/{dataset_id}/{product_id}/execute` + + Query the data and return the ID of the request. + + Parameters + ---------- + user_id : str + ID of the user executing the query + dataset_id : str + ID of the dataset + product_id : str + ID of the product + query : GeoQuery + Query to perform + + Returns + ------- + request_id : int + ID of the request + + Raises + ------- + MaximumAllowedSizeExceededError + if the allowed size is below the estimated one + EmptyDatasetError + if estimated size is zero + + """ + log.debug("geoquery: %s", query) + if _is_etimate_enabled(dataset_id, product_id): + estimated_size = estimate(dataset_id, product_id, query, "GB").get("value") + allowed_size = data_store.product_metadata(dataset_id, product_id).get( + "maximum_query_size_gb", DEFAULT_MAX_REQUEST_SIZE_GB + ) + if estimated_size > allowed_size: + raise exc.MaximumAllowedSizeExceededError( + dataset_id=dataset_id, + product_id=product_id, + estimated_size_gb=estimated_size, + allowed_size_gb=allowed_size, + ) + if estimated_size == 0.0: + raise exc.EmptyDatasetError( + dataset_id=dataset_id, product_id=product_id + ) + broker_conn = pika.BlockingConnection( + pika.ConnectionParameters( + host=os.getenv("BROKER_SERVICE_HOST", "broker") + ) + ) + broker_channel = broker_conn.channel() + + request_id = DBManager().create_request( + user_id=user_id, + dataset=dataset_id, + product=product_id, + query=json.dumps(query.model_dump_original()), + ) + + # TODO: find a separator; for the moment use "\" + message = MESSAGE_SEPARATOR.join( + [str(request_id), "query", dataset_id, product_id, query.json()] + ) + + broker_channel.basic_publish( + exchange="", + routing_key="query_queue", + body=message, + properties=pika.BasicProperties( + delivery_mode=2, # make message persistent + ), + ) + broker_conn.close() + return request_id + +@log_execution_time(log) +@assert_product_exists +def sync_query( + user_id: str, + dataset_id: str, + product_id: str, + query: GeoQuery, +): + """Realize the logic for the endpoint: + + `POST /datasets/{dataset_id}/{product_id}/execute` + + Query the data and return the result of the request. + + Parameters + ---------- + user_id : str + ID of the user executing the query + dataset_id : str + ID of the dataset + product_id : str + ID of the product + query : GeoQuery + Query to perform + + Returns + ------- + request_id : int + ID of the request + + Raises + ------- + MaximumAllowedSizeExceededError + if the allowed size is below the estimated one + EmptyDatasetError + if estimated size is zero + + """ + + import time + request_id = async_query(user_id, dataset_id, product_id, query) + status, _ = DBManager().get_request_status_and_reason(request_id) + log.debug("sync query: status: %s", status) + while status in (RequestStatus.RUNNING, RequestStatus.QUEUED, + RequestStatus.PENDING): + time.sleep(1) + status, _ = DBManager().get_request_status_and_reason(request_id) + log.debug("sync query: status: %s", status) + + if status is RequestStatus.DONE: + download_details = DBManager().get_download_details_for_request_id( + request_id + ) + return FileResponse( + path=download_details.location_path, + filename=download_details.location_path.split(os.sep)[-1], + ) + raise exc.ProductRetrievingError( + dataset_id=dataset_id, + product_id=product_id, + status=status.name) + + +@log_execution_time(log) +def run_workflow( + user_id: str, + workflow: Workflow, +): + """Realize the logic for the endpoint: + + `POST /datasets/workflow` + + Schedule the workflow and return the ID of the request. + + Parameters + ---------- + user_id : str + ID of the user executing the query + workflow : Workflow + Workflow to perform + + Returns + ------- + request_id : int + ID of the request + + Raises + ------- + MaximumAllowedSizeExceededError + if the allowed size is below the estimated one + EmptyDatasetError + if estimated size is zero + + """ + log.debug("geoquery: %s", workflow) + broker_conn = pika.BlockingConnection( + pika.ConnectionParameters( + host=os.getenv("BROKER_SERVICE_HOST", "broker") + ) + ) + broker_channel = broker_conn.channel() + request_id = DBManager().create_request( + user_id=user_id, + dataset=workflow.dataset_id, + product=workflow.product_id, + query=workflow.json(), + ) + + # TODO: find a separator; for the moment use "\" + message = MESSAGE_SEPARATOR.join( + [str(request_id), "workflow", workflow.json()] + ) + + broker_channel.basic_publish( + exchange="", + routing_key="query_queue", + body=message, + properties=pika.BasicProperties( + delivery_mode=2, # make message persistent + ), + ) + broker_conn.close() + return request_id diff --git a/api/app/endpoint_handlers/file.py b/api/app/endpoint_handlers/file.py new file mode 100644 index 0000000..04cf562 --- /dev/null +++ b/api/app/endpoint_handlers/file.py @@ -0,0 +1,66 @@ +"""Module with functions to handle file related endpoints""" +import os + +from fastapi.responses import FileResponse +from dbmanager.dbmanager import DBManager, RequestStatus + +from utils.api_logging import get_dds_logger +from utils.metrics import log_execution_time +import exceptions as exc + +log = get_dds_logger(__name__) + + +@log_execution_time(log) +def download_request_result(request_id: int): + """Realize the logic for the endpoint: + + `GET /download/{request_id}` + + Get location path of the file being the result of + the request with `request_id`. + + Parameters + ---------- + request_id : int + ID of the request + + Returns + ------- + path : str + The location of the resulting file + + Raises + ------- + RequestNotYetAccomplished + If dds request was not yet finished + FileNotFoundError + If file was not found + """ + log.debug( + "preparing downloads for request id: %s", + request_id, + ) + ( + request_status, + _, + ) = DBManager().get_request_status_and_reason(request_id=request_id) + if request_status is not RequestStatus.DONE: + log.debug( + "request with id: '%s' does not exist or it is not finished yet!", + request_id, + ) + raise exc.RequestNotYetAccomplished(request_id=request_id) + download_details = DBManager().get_download_details_for_request( + request_id=request_id + ) + if not os.path.exists(download_details.location_path): + log.error( + "file '%s' does not exists!", + download_details.location_path, + ) + raise FileNotFoundError + return FileResponse( + path=download_details.location_path, + filename=download_details.location_path.split(os.sep)[-1], + ) diff --git a/api/app/endpoint_handlers/request.py b/api/app/endpoint_handlers/request.py new file mode 100644 index 0000000..93a0636 --- /dev/null +++ b/api/app/endpoint_handlers/request.py @@ -0,0 +1,144 @@ +"""Modules with functions realizing logic for requests-related endpoints""" +from dbmanager.dbmanager import DBManager + +from utils.api_logging import get_dds_logger +from utils.metrics import log_execution_time +import exceptions as exc + +log = get_dds_logger(__name__) + + +@log_execution_time(log) +def get_requests(user_id: str): + """Realize the logic for the endpoint: + + `GET /requests` + + Get details of all requests for the user. + + Parameters + ---------- + user_id : str + ID of the user for whom requests are taken + + Returns + ------- + requests : list + List of all requests done by the user + """ + return DBManager().get_requests_for_user_id(user_id=user_id) + + +@log_execution_time(log) +def get_request_status(user_id: str, request_id: int): + """Realize the logic for the endpoint: + + `GET /requests/{request_id}/status` + + Get request status and the reason of the eventual fail. + The second item is `None`, it status is other than failed. + + Parameters + ---------- + user_id : str + ID of the user whose request's status is about to be checed + request_id : int + ID of the request + + Returns + ------- + status : tuple + Tuple of status and fail reason. + """ + # NOTE: maybe verification should be added if user checks only him\her requests + try: + status, reason = DBManager().get_request_status_and_reason(request_id) + except IndexError as err: + log.error( + "request with id: '%s' was not found!", + request_id, + ) + raise exc.RequestNotFound(request_id=request_id) from err + return {"status": status.name, "fail_reason": reason} + + +@log_execution_time(log) +def get_request_resulting_size(request_id: int): + """Realize the logic for the endpoint: + + `GET /requests/{request_id}/size` + + Get size of the file being the result of the request with `request_id` + + Parameters + ---------- + request_id : int + ID of the request + + Returns + ------- + size : int + Size in bytes + + Raises + ------- + RequestNotFound + If the request was not found + """ + if request := DBManager().get_request_details(request_id): + size = request.download.size_bytes + if not size or size == 0: + raise exc.EmptyDatasetError(dataset_id=request.dataset, + product_id=request.product) + return size + log.info( + "request with id '%s' could not be found", + request_id, + ) + raise exc.RequestNotFound(request_id=request_id) + + +@log_execution_time(log) +def get_request_uri(request_id: int): + """ + Realize the logic for the endpoint: + + `GET /requests/{request_id}/uri` + + Get URI for the request. + + Parameters + ---------- + request_id : int + ID of the request + + Returns + ------- + uri : str + URI for the download associated with the given request + """ + try: + download_details = DBManager().get_download_details_for_request_id( + request_id + ) + except IndexError as err: + log.error( + "request with id: '%s' was not found!", + request_id, + ) + raise exc.RequestNotFound(request_id=request_id) from err + if download_details is None: + ( + request_status, + _, + ) = DBManager().get_request_status_and_reason(request_id) + log.info( + "download URI not found for request id: '%s'." + " Request status is '%s'", + request_id, + request_status, + ) + raise exc.RequestStatusNotDone( + request_id=request_id, request_status=request_status + ) + return download_details.download_uri diff --git a/api/app/exceptions.py b/api/app/exceptions.py new file mode 100644 index 0000000..af4d072 --- /dev/null +++ b/api/app/exceptions.py @@ -0,0 +1,195 @@ +"""Module with DDS exceptions definitions""" +from typing import Optional + +from fastapi import HTTPException + + +class BaseDDSException(BaseException): + """Base class for DDS.api exceptions""" + + msg: str = "Bad request" + code: int = 400 + + def wrap_around_http_exception(self) -> HTTPException: + """Wrap an exception around `fastapi.HTTPExcetion`""" + return HTTPException( + status_code=self.code, + detail=self.msg, + ) + + +class EmptyUserTokenError(BaseDDSException): + """Raised if `User-Token` is empty""" + + msg: str = "User-Token cannot be empty!" + + +class ImproperUserTokenError(BaseDDSException): + """Raised if `User-Token` format is wrong""" + + msg: str = ( + "The format of the User-Token is wrong. It should be be in the format" + " :!" + ) + + +class NoEligibleProductInDatasetError(BaseDDSException): + """No eligible products in the dataset Error""" + + msg: str = ( + "No eligible products for the dataset '{dataset_id}' for the user" + " with roles '{user_roles_names}'" + ) + + def __init__(self, dataset_id: str, user_roles_names: list[str]) -> None: + self.msg = self.msg.format( + dataset_id=dataset_id, user_roles_names=user_roles_names + ) + super().__init__(self.msg) + + +class MissingKeyInCatalogEntryError(BaseDDSException): + """Missing key in the catalog entry""" + + msg: str = ( + "There is missing '{key}' in the catalog for '{dataset}' dataset." + ) + + def __init__(self, key, dataset): + self.msg = self.msg.format(key=key, dataset=dataset) + super().__init__(self.msg) + + +class MaximumAllowedSizeExceededError(BaseDDSException): + """Estimated size is too big""" + + msg: str = ( + "Maximum allowed size for '{dataset_id}.{product_id}' is" + " {allowed_size_gb:.2f} GB but the estimated size is" + " {estimated_size_gb:.2f} GB" + ) + + def __init__( + self, dataset_id, product_id, estimated_size_gb, allowed_size_gb + ): + self.msg = self.msg.format( + dataset_id=dataset_id, + product_id=product_id, + allowed_size_gb=allowed_size_gb, + estimated_size_gb=estimated_size_gb, + ) + super().__init__(self.msg) + + +class RequestNotYetAccomplished(BaseDDSException): + """Raised if dds request was not finished yet""" + + msg: str = ( + "Request with id: {request_id} does not exist or it is not" + " finished yet!" + ) + + def __init__(self, request_id): + self.msg = self.msg.format(request_id=request_id) + super().__init__(self.msg) + + +class RequestNotFound(BaseDDSException): + """If the given request could not be found""" + + msg: str = "Request with ID '{request_id}' was not found" + + def __init__(self, request_id: int) -> None: + self.msg = self.msg.format(request_id=request_id) + super().__init__(self.msg) + + +class RequestStatusNotDone(BaseDDSException): + """Raised when the submitted request failed""" + + msg: str = ( + "Request with id: `{request_id}` does not have download. URI. Its" + " status is: `{request_status}`!" + ) + + def __init__(self, request_id, request_status) -> None: + self.msg = self.msg.format( + request_id=request_id, request_status=request_status + ) + super().__init__(self.msg) + + +class AuthorizationFailed(BaseDDSException): + """Raised when the user is not authorized for the given resource""" + + msg: str = "{user} is not authorized for the resource!" + code: int = 403 + + def __init__(self, user_id: Optional[str] = None): + if user_id is None: + self.msg = self.msg.format(user="User") + else: + self.msg = self.msg.format(user=f"User '{user_id}'") + super().__init__(self.msg) + + +class AuthenticationFailed(BaseDDSException): + """Raised when the key of the provided user differs from the one s + tored in the DB""" + + msg: str = "Authentication of the user '{user_id}' failed!" + code: int = 401 + + def __init__(self, user_id: str): + self.msg = self.msg.format(user_id=user_id) + super().__init__(self.msg) + + +class MissingDatasetError(BaseDDSException): + """Raied if the queried dataset is not present in the catalog""" + + msg: str = "Dataset '{dataset_id}' does not exist in the catalog!" + + def __init__(self, dataset_id: str): + self.msg = self.msg.format(dataset_id=dataset_id) + super().__init__(self.msg) + + +class MissingProductError(BaseDDSException): + """Raised if the requested product is not defined for the dataset""" + + msg: str = ( + "Product '{dataset_id}.{product_id}' does not exist in the catalog!" + ) + + def __init__(self, dataset_id: str, product_id: str): + self.msg = self.msg.format( + dataset_id=dataset_id, product_id=product_id + ) + super().__init__(self.msg) + + +class EmptyDatasetError(BaseDDSException): + """The size of the requested dataset is zero""" + + msg: str = "The resulting dataset '{dataset_id}.{product_id}' is empty" + + def __init__(self, dataset_id, product_id): + self.msg = self.msg.format( + dataset_id=dataset_id, + product_id=product_id, + ) + super().__init__(self.msg) + +class ProductRetrievingError(BaseDDSException): + """Retrieving of the product failed.""" + + msg: str = "Retrieving of the product '{dataset_id}.{product_id}' failed with the status {status}" + + def __init__(self, dataset_id, product_id, status): + self.msg = self.msg.format( + dataset_id=dataset_id, + product_id=product_id, + status=status + ) + super().__init__(self.msg) \ No newline at end of file diff --git a/api/app/main.py b/api/app/main.py index 2712586..2084394 100644 --- a/api/app/main.py +++ b/api/app/main.py @@ -1,72 +1,468 @@ -from fastapi import FastAPI -import pika -from enum import Enum -from pydantic import BaseModel -from db.dbmanager.dbmanager import DBManager -from geoquery.geoquery import GeoQuery - -app = FastAPI() -db_conn = None -## -# RabbitMQ Broker Connection -broker_conn = pika.BlockingConnection(pika.ConnectionParameters(host='broker')) -broker_chann = broker_conn.channel() - -@app.get("/") +"""Main module with dekube-dds API endpoints defined""" +__version__ = "2.0" +import os +from typing import Optional + +from datetime import datetime + +from fastapi import FastAPI, HTTPException, Request, status, Query +from fastapi.middleware.cors import CORSMiddleware +from starlette.middleware.authentication import AuthenticationMiddleware +from starlette.authentication import requires + +from aioprometheus import ( + Counter, + Summary, + timer, + MetricsMiddleware, +) +from aioprometheus.asgi.starlette import metrics + +from intake_geokube.queries.workflow import Workflow +from intake_geokube.queries.geoquery import GeoQuery + +from utils.api_logging import get_dds_logger +import exceptions as exc +from endpoint_handlers import ( + dataset_handler, + file_handler, + request_handler, +) +from auth.backend import DDSAuthenticationBackend +from callbacks import all_onstartup_callbacks +from encoders import extend_json_encoders +from const import venv, tags +from auth import scopes + +def map_to_geoquery( + variables: list[str], + format: str, + bbox: str | None = None, # minx, miny, maxx, maxy (minlon, minlat, maxlon, maxlat) + time: datetime | None = None, + **format_kwargs +) -> GeoQuery: + + bbox_ = [float(x) for x in bbox.split(',')] + area = { 'west': bbox_[0], 'south': bbox_[1], 'east': bbox_[2], 'north': bbox_[3], } + time_ = { 'year': time.year, 'month': time.month, 'day': time.day, 'hour': time.hour} + query = GeoQuery(variable=variables, time=time_, area=area, + format_args=format_kwargs, format=format) + return query + +logger = get_dds_logger(__name__) + +# ======== JSON encoders extension ========= # +extend_json_encoders() + +app = FastAPI( + title="geokube-dds API", + description="REST API for geokube-dds", + version=__version__, + contact={ + "name": "geokube Contributors", + "email": "geokube@googlegroups.com", + }, + license_info={ + "name": "Apache 2.0", + "url": "https://www.apache.org/licenses/LICENSE-2.0.html", + }, + root_path=os.environ.get(venv.ENDPOINT_PREFIX, "/api"), + on_startup=all_onstartup_callbacks, +) + +# ======== Authentication backend ========= # +app.add_middleware( + AuthenticationMiddleware, backend=DDSAuthenticationBackend() +) + +# ======== CORS ========= # +cors_kwargs: dict[str, str | list[str]] +if venv.ALLOWED_CORS_ORIGINS_REGEX in os.environ: + cors_kwargs = { + "allow_origin_regex": os.environ[venv.ALLOWED_CORS_ORIGINS_REGEX] + } +else: + cors_kwargs = {"allow_origins": ["*"]} + +app.add_middleware( + CORSMiddleware, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + **cors_kwargs, +) + + +# ======== Prometheus metrics ========= # +app.add_middleware(MetricsMiddleware) +app.add_route("/metrics", metrics) + +app.state.api_request_duration_seconds = Summary( + "api_request_duration_seconds", "Requests duration" +) +app.state.api_http_requests_total = Counter( + "api_http_requests_total", "Total number of requests" +) + + +# ======== Endpoints definitions ========= # +@app.get("/", tags=[tags.BASIC]) async def dds_info(): - return {"DDS API 2.0"} - -@app.get("/datasets") -async def datasets(): - return {"List of Datasets"} - -@app.get("/datasets/{dataset_id}") -async def dataset(dataset_id: str): - return {f"Dataset Info {dataset_id}"} - -@app.get("/datasets/{dataset_id}/{product_id}") -async def dataset(dataset_id: str, product_id: str): - return {f"Product Info {product_id} from dataset {dataset_id}"} - -@app.post("/datasets/{dataset_id}/{product_id}/estimate") -async def estimate(dataset_id: str, product_id: str, query: GeoQuery): - return {f'estimate size for {dataset_id} {product_id} is 10GB'} - -@app.post("/datasets/{dataset_id}/{product_id}/execute") -async def query(dataset_id: str, product_id: str, format: str, query: GeoQuery): - global db_conn - if not db_conn: - db_conn = DBManager() -# -# -# TODO: Validation Query Schema -# TODO: estimate the size and will not execute if it is above the limit -# -# - request_id = db_conn.create_request(dataset=dataset_id, product=product_id, query=query.json()) - print(f"request id: {request_id}") - -# we should find a separator; for the moment use "\" - message = f'{request_id}\\{dataset_id}\\{product_id}\\{query.json()}\\{format}' - -# submit request to broker queue - broker_chann.basic_publish( - exchange='', - routing_key='query_queue', - body=message, - properties=pika.BasicProperties( - delivery_mode=2, # make message persistent - )) - return request_id - -@app.get("/requests") -async def get_requests(): - return - -@app.get("/requests/{request_id}/status") -async def get_request_status(request_id: int): - return db_conn.get_request_status(request_id) - -@app.get("/requests/{request_id}/uri") -async def get_request_uri(request_id: int): - return \ No newline at end of file + """Return current version of the DDS API""" + return f"DDS API {__version__}" + + +@app.get("/datasets", tags=[tags.DATASET]) +@timer( + app.state.api_request_duration_seconds, labels={"route": "GET /datasets"} +) +async def get_datasets(request: Request): + """List all products eligible for a user defined by user_token""" + app.state.api_http_requests_total.inc({"route": "GET /datasets"}) + try: + return dataset_handler.get_datasets( + user_roles_names=request.auth.scopes + ) + except exc.BaseDDSException as err: + raise err.wrap_around_http_exception() from err + + +@app.get("/datasets/{dataset_id}", tags=[tags.DATASET]) +@timer( + app.state.api_request_duration_seconds, + labels={"route": "GET /datasets/{dataset_id}"}, +) +async def get_first_product_details( + request: Request, + dataset_id: str, +): + """Get details for the 1st product of the dataset""" + app.state.api_http_requests_total.inc( + {"route": "GET /datasets/{dataset_id}"} + ) + try: + return dataset_handler.get_product_details( + user_roles_names=request.auth.scopes, + dataset_id=dataset_id, + ) + except exc.BaseDDSException as err: + raise err.wrap_around_http_exception() from err + + +@app.get("/datasets/{dataset_id}/{product_id}", tags=[tags.DATASET]) +@timer( + app.state.api_request_duration_seconds, + labels={"route": "GET /datasets/{dataset_id}/{product_id}"}, +) +async def get_product_details( + request: Request, + dataset_id: str, + product_id: str, +): + """Get details for the requested product if user is authorized""" + app.state.api_http_requests_total.inc( + {"route": "GET /datasets/{dataset_id}/{product_id}"} + ) + try: + return dataset_handler.get_product_details( + user_roles_names=request.auth.scopes, + dataset_id=dataset_id, + product_id=product_id, + ) + except exc.BaseDDSException as err: + raise err.wrap_around_http_exception() from err + +@app.get("/datasets/{dataset_id}/{product_id}/map", tags=[tags.DATASET]) +@timer( + app.state.api_request_duration_seconds, + labels={"route": "GET /datasets/{dataset_id}/{product_id}"}, +) +async def get_map( + request: Request, + dataset_id: str, + product_id: str, +# OGC WMS parameters + width: int, + height: int, + layers: str | None = None, + format: str | None = 'png', + time: datetime | None = None, + transparent: bool | None = 'true', + bgcolor: str | None = 'FFFFFF', + bbox: str | None = None, # minx, miny, maxx, maxy (minlon, minlat, maxlon, maxlat) + crs: str | None = None, +# OGC map parameters + # subset: str | None = None, + # subset_crs: str | None = Query(..., alias="subset-crs"), + # bbox_crs: str | None = Query(..., alias="bbox-crs"), +): + + app.state.api_http_requests_total.inc( + {"route": "GET /datasets/{dataset_id}/{product_id}/map"} + ) + # query should be the OGC query + # map OGC parameters to GeoQuery + # variable: Optional[Union[str, List[str]]] + # time: Optional[Union[Dict[str, str], Dict[str, List[str]]]] + # area: Optional[Dict[str, float]] + # location: Optional[Dict[str, Union[float, List[float]]]] + # vertical: Optional[Union[float, List[float], Dict[str, float]]] + # filters: Optional[Dict] + # format: Optional[str] + query = map_to_geoquery(variables=layers, bbox=bbox, time=time, + format="png", width=width, height=height, + transparent=transparent, bgcolor=bgcolor) + try: + return dataset_handler.sync_query( + user_id=request.user.id, + dataset_id=dataset_id, + product_id=product_id, + query=query + ) + except exc.BaseDDSException as err: + raise err.wrap_around_http_exception() from err + +@app.get("/datasets/{dataset_id}/{product_id}/items/{feature_id}", tags=[tags.DATASET]) +@timer( + app.state.api_request_duration_seconds, + labels={"route": "GET /datasets/{dataset_id}/{product_id}/items/{feature_id}"}, +) +async def get_feature( + request: Request, + dataset_id: str, + product_id: str, + feature_id: str, +# OGC feature parameters + time: datetime | None = None, + bbox: str | None = None, # minx, miny, maxx, maxy (minlon, minlat, maxlon, maxlat) + crs: str | None = None, +# OGC map parameters + # subset: str | None = None, + # subset_crs: str | None = Query(..., alias="subset-crs"), + # bbox_crs: str | None = Query(..., alias="bbox-crs"), +): + + app.state.api_http_requests_total.inc( + {"route": "GET /datasets/{dataset_id}/{product_id}/items/{feature_id}"} + ) + # query should be the OGC query + # feature OGC parameters to GeoQuery + # variable: Optional[Union[str, List[str]]] + # time: Optional[Union[Dict[str, str], Dict[str, List[str]]]] + # area: Optional[Dict[str, float]] + # location: Optional[Dict[str, Union[float, List[float]]]] + # vertical: Optional[Union[float, List[float], Dict[str, float]]] + # filters: Optional[Dict] + # format: Optional[str] + + query = map_to_geoquery(variables=[feature_id], bbox=bbox, time=time, + format="geojson") + try: + return dataset_handler.sync_query( + user_id=request.user.id, + dataset_id=dataset_id, + product_id=product_id, + query=query + ) + except exc.BaseDDSException as err: + raise err.wrap_around_http_exception() from err + +@app.get("/datasets/{dataset_id}/{product_id}/metadata", tags=[tags.DATASET]) +@timer( + app.state.api_request_duration_seconds, + labels={"route": "GET /datasets/{dataset_id}/{product_id}/metadata"}, +) +async def get_metadata( + request: Request, + dataset_id: str, + product_id: str, +): + """Get metadata of the given product""" + app.state.api_http_requests_total.inc( + {"route": "GET /datasets/{dataset_id}/{product_id}/metadata"} + ) + try: + return dataset_handler.get_metadata( + dataset_id=dataset_id, product_id=product_id + ) + except exc.BaseDDSException as err: + raise err.wrap_around_http_exception() from err + + +@app.post("/datasets/{dataset_id}/{product_id}/estimate", tags=[tags.DATASET]) +@timer( + app.state.api_request_duration_seconds, + labels={"route": "POST /datasets/{dataset_id}/{product_id}/estimate"}, +) +async def estimate( + request: Request, + dataset_id: str, + product_id: str, + query: GeoQuery, + unit: str = None, +): + """Estimate the resulting size of the query""" + app.state.api_http_requests_total.inc( + {"route": "POST /datasets/{dataset_id}/{product_id}/estimate"} + ) + try: + return dataset_handler.estimate( + dataset_id=dataset_id, + product_id=product_id, + query=query, + unit=unit, + ) + except exc.BaseDDSException as err: + raise err.wrap_around_http_exception() from err + + +@app.post("/datasets/{dataset_id}/{product_id}/execute", tags=[tags.DATASET]) +@timer( + app.state.api_request_duration_seconds, + labels={"route": "POST /datasets/{dataset_id}/{product_id}/execute"}, +) +@requires([scopes.AUTHENTICATED]) +async def query( + request: Request, + dataset_id: str, + product_id: str, + query: GeoQuery, +): + """Schedule the job of data retrieve""" + app.state.api_http_requests_total.inc( + {"route": "POST /datasets/{dataset_id}/{product_id}/execute"} + ) + try: + return dataset_handler.async_query( + user_id=request.user.id, + dataset_id=dataset_id, + product_id=product_id, + query=query, + ) + except exc.BaseDDSException as err: + raise err.wrap_around_http_exception() from err + + +@app.post("/datasets/workflow", tags=[tags.DATASET]) +@timer( + app.state.api_request_duration_seconds, + labels={"route": "POST /datasets/workflow"}, +) +@requires([scopes.AUTHENTICATED]) +async def workflow( + request: Request, + tasks: Workflow, +): + """Schedule the job of workflow processing""" + app.state.api_http_requests_total.inc({"route": "POST /datasets/workflow"}) + try: + return dataset_handler.run_workflow( + user_id=request.user.id, + workflow=tasks, + ) + except exc.BaseDDSException as err: + raise err.wrap_around_http_exception() from err + + +@app.get("/requests", tags=[tags.REQUEST]) +@timer( + app.state.api_request_duration_seconds, labels={"route": "GET /requests"} +) +@requires([scopes.AUTHENTICATED]) +async def get_requests( + request: Request, +): + """Get all requests for the user""" + app.state.api_http_requests_total.inc({"route": "GET /requests"}) + try: + return request_handler.get_requests(request.user.id) + except exc.BaseDDSException as err: + raise err.wrap_around_http_exception() from err + + +@app.get("/requests/{request_id}/status", tags=[tags.REQUEST]) +@timer( + app.state.api_request_duration_seconds, + labels={"route": "GET /requests/{request_id}/status"}, +) +@requires([scopes.AUTHENTICATED]) +async def get_request_status( + request: Request, + request_id: int, +): + """Get status of the request without authentication""" + app.state.api_http_requests_total.inc( + {"route": "GET /requests/{request_id}/status"} + ) + try: + return request_handler.get_request_status( + user_id=request.user.id, request_id=request_id + ) + except exc.BaseDDSException as err: + raise err.wrap_around_http_exception() from err + + +@app.get("/requests/{request_id}/size", tags=[tags.REQUEST]) +@timer( + app.state.api_request_duration_seconds, + labels={"route": "GET /requests/{request_id}/size"}, +) +@requires([scopes.AUTHENTICATED]) +async def get_request_resulting_size( + request: Request, + request_id: int, +): + """Get size of the file being the result of the request""" + app.state.api_http_requests_total.inc( + {"route": "GET /requests/{request_id}/size"} + ) + try: + return request_handler.get_request_resulting_size( + request_id=request_id + ) + except exc.BaseDDSException as err: + raise err.wrap_around_http_exception() from err + + +@app.get("/requests/{request_id}/uri", tags=[tags.REQUEST]) +@timer( + app.state.api_request_duration_seconds, + labels={"route": "GET /requests/{request_id}/uri"}, +) +@requires([scopes.AUTHENTICATED]) +async def get_request_uri( + request: Request, + request_id: int, +): + """Get download URI for the request""" + app.state.api_http_requests_total.inc( + {"route": "GET /requests/{request_id}/uri"} + ) + try: + return request_handler.get_request_uri(request_id=request_id) + except exc.BaseDDSException as err: + raise err.wrap_around_http_exception() from err + + +@app.get("/download/{request_id}", tags=[tags.REQUEST]) +@timer( + app.state.api_request_duration_seconds, + labels={"route": "GET /download/{request_id}"}, +) +# @requires([scopes.AUTHENTICATED]) # TODO: mange download auth in the web component +async def download_request_result( + request: Request, + request_id: int, +): + """Download result of the request""" + app.state.api_http_requests_total.inc( + {"route": "GET /download/{request_id}"} + ) + try: + return file_handler.download_request_result(request_id=request_id) + except exc.BaseDDSException as err: + raise err.wrap_around_http_exception() from err + except FileNotFoundError as err: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail="File was not found!" + ) from err diff --git a/api/app/validation.py b/api/app/validation.py new file mode 100644 index 0000000..51bdbc1 --- /dev/null +++ b/api/app/validation.py @@ -0,0 +1,36 @@ +from datastore.datastore import Datastore +from utils.api_logging import get_dds_logger +from decorators_factory import assert_parameters_are_defined, bind_arguments +from functools import wraps +from inspect import signature +import exceptions as exc + + +log = get_dds_logger(__name__) + + +def assert_product_exists(func): + """Decorator for convenient checking if product is defined in the catalog + """ + sig = signature(func) + assert_parameters_are_defined( + sig, required_parameters=[("dataset_id", str), ("product_id", str)] + ) + + @wraps(func) + def assert_inner(*args, **kwargs): + args_dict = bind_arguments(sig, *args, **kwargs) + dataset_id = args_dict["dataset_id"] + product_id = args_dict["product_id"] + if dataset_id not in Datastore().dataset_list(): + raise exc.MissingDatasetError(dataset_id=dataset_id) + elif ( + product_id is not None + and product_id not in Datastore().product_list(dataset_id) + ): + raise exc.MissingProductError( + dataset_id=dataset_id, product_id=product_id + ) + return func(*args, **kwargs) + + return assert_inner diff --git a/api/requirements.txt b/api/requirements.txt index e23ebfb..97fcaf3 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -1,6 +1,5 @@ fastapi -pydantic uvicorn pika -intake -sqlalchemy \ No newline at end of file +sqlalchemy +aioprometheus From 3a608ef828f6e363e884834e801f491468564256 Mon Sep 17 00:00:00 2001 From: Marco Mancini Date: Thu, 11 Jan 2024 09:12:52 +0100 Subject: [PATCH 02/31] Update datastore with adjust_for_intake branch --- datastore/Dockerfile | 14 + datastore/datastore.py | 63 --- datastore/datastore/__init__.py | 0 datastore/datastore/const.py | 6 + datastore/datastore/datastore.py | 447 +++++++++++++++++++++ datastore/datastore/exception.py | 5 + datastore/datastore/singleton.py | 29 ++ datastore/datastore/util.py | 27 ++ datastore/dbmanager/__init__.py | 0 datastore/dbmanager/dbmanager.py | 349 ++++++++++++++++ datastore/dbmanager/singleton.py | 21 + datastore/requirements.txt | 2 + datastore/tests/__init__.py | 0 datastore/tests/workflow/__init__.py | 0 datastore/tests/workflow/fixtures.py | 122 ++++++ datastore/tests/workflow/test_operators.py | 20 + datastore/tests/workflow/test_workflow.py | 23 ++ datastore/utils/__init__.py | 0 datastore/utils/api_logging.py | 40 ++ datastore/utils/metrics.py | 33 ++ datastore/wait-for-it.sh | 182 +++++++++ datastore/workflow/__init__.py | 1 + datastore/workflow/workflow.py | 226 +++++++++++ 23 files changed, 1547 insertions(+), 63 deletions(-) create mode 100644 datastore/Dockerfile delete mode 100644 datastore/datastore.py create mode 100644 datastore/datastore/__init__.py create mode 100644 datastore/datastore/const.py create mode 100644 datastore/datastore/datastore.py create mode 100644 datastore/datastore/exception.py create mode 100644 datastore/datastore/singleton.py create mode 100644 datastore/datastore/util.py create mode 100644 datastore/dbmanager/__init__.py create mode 100644 datastore/dbmanager/dbmanager.py create mode 100644 datastore/dbmanager/singleton.py create mode 100644 datastore/requirements.txt create mode 100644 datastore/tests/__init__.py create mode 100644 datastore/tests/workflow/__init__.py create mode 100644 datastore/tests/workflow/fixtures.py create mode 100644 datastore/tests/workflow/test_operators.py create mode 100644 datastore/tests/workflow/test_workflow.py create mode 100644 datastore/utils/__init__.py create mode 100644 datastore/utils/api_logging.py create mode 100644 datastore/utils/metrics.py create mode 100755 datastore/wait-for-it.sh create mode 100644 datastore/workflow/__init__.py create mode 100644 datastore/workflow/workflow.py diff --git a/datastore/Dockerfile b/datastore/Dockerfile new file mode 100644 index 0000000..9ca2496 --- /dev/null +++ b/datastore/Dockerfile @@ -0,0 +1,14 @@ +ARG REGISTRY=rg.nl-ams.scw.cloud/geokube-production +ARG TAG=latest +FROM $REGISTRY/intake-geokube:$TAG +RUN conda install -c conda-forge --yes --freeze-installed psycopg2 \ + && conda clean -afy +COPY requirements.txt /app/requirements.txt +RUN pip install --no-cache-dir -r /app/requirements.txt +COPY ./datastore /app/datastore +COPY ./workflow /app/workflow +COPY ./dbmanager /app/dbmanager +COPY ./utils /app/utils +COPY ./tests /app/tests +COPY ./wait-for-it.sh / + diff --git a/datastore/datastore.py b/datastore/datastore.py deleted file mode 100644 index 107d821..0000000 --- a/datastore/datastore.py +++ /dev/null @@ -1,63 +0,0 @@ -import intake -from geokube.core.datacube import DataCube -from geokube.core.dataset import Dataset -from typing import Union -from geoquery.geoquery import GeoQuery -import json - -class Datastore(): - - def __init__(self, cat_path: str) -> None: - self.catalog = intake.open_catalog(cat_path) - - def dataset_list(self): - return list(self.catalog) - - def product_list(self, dataset_id: str): - return list(self.catalog[dataset_id]) - - def dataset_info(self, dataset_id: str): - info = {} - entry = self.catalog[dataset_id] - if entry.metadata: - info['metadata'] = entry.metadata - info['products'] = {} - for p in self.products(): - info['products'][p] = self.product_info() - - def product_info(self, dataset_id: str, product_id: str): - info = {} - entry = self.catalog[dataset_id][product_id] - if entry.metadata: - info['metadata'] = entry.metadata - info.update(entry.read_chunked().to_dict()) - return info - - def query(self, dataset: str, product: str, query: Union[GeoQuery, dict, str], compute: bool=False): - """ - :param dataset: dasaset name - :param product: product name - :param query: subset query - :param path: path to store - :return: subsetted geokube of selected dataset product - """ - if isinstance(query, str): - query = json.loads(query) - if isinstance(query, dict): - query = GeoQuery(**query) - kube = self.catalog[dataset][product].read_chunked() - if isinstance(kube, Dataset): - kube = kube.filter(query.filters) - if query.variable: - kube = kube[query.variable] - if query.area: - kube = kube.geobbox(query.area) - if query.locations: - kube = kube.locations(**query.locations) - if query.time: - kube = kube.sel(query.time) - if query.vertical: - kube = kube.sel(query.vertical) - if compute: - kube.compute() - return kube \ No newline at end of file diff --git a/datastore/datastore/__init__.py b/datastore/datastore/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/datastore/datastore/const.py b/datastore/datastore/const.py new file mode 100644 index 0000000..22435bc --- /dev/null +++ b/datastore/datastore/const.py @@ -0,0 +1,6 @@ +"""This module contains useful constants definitions grouped into classes""" + + +class BaseRole: + PUBLIC = "public" + ADMIN = "admin" diff --git a/datastore/datastore/datastore.py b/datastore/datastore/datastore.py new file mode 100644 index 0000000..ca402fe --- /dev/null +++ b/datastore/datastore/datastore.py @@ -0,0 +1,447 @@ +"""Module for catalog management classes and functions""" +from __future__ import annotations + +import os +import logging +import json + +import intake +from dask.delayed import Delayed + +from intake_geokube.queries.geoquery import GeoQuery + +from geokube.core.datacube import DataCube +from geokube.core.dataset import Dataset + +from .singleton import Singleton +from .util import log_execution_time +from .const import BaseRole +from .exception import UnauthorizedError + +DEFAULT_MAX_REQUEST_SIZE_GB = 10 + + +class Datastore(metaclass=Singleton): + """Singleton component for managing catalog data""" + + _LOG = logging.getLogger("geokube.Datastore") + + def __init__(self) -> None: + if "CATALOG_PATH" not in os.environ: + self._LOG.error( + "missing required environment variable: 'CATALOG_PATH'" + ) + raise KeyError( + "Missing required environment variable: 'CATALOG_PATH'" + ) + if "CACHE_PATH" not in os.environ: + self._LOG.error( + "'CACHE_PATH' environment variable was not set. catalog will" + " not be opened!" + ) + raise RuntimeError( + "'CACHE_PATH' environment variable was not set. catalog will" + " not be opened!" + ) + self.catalog = intake.open_catalog(os.environ["CATALOG_PATH"]) + self.cache_dir = os.environ["CACHE_PATH"] + self._LOG.info("cache dir set to %s", self.cache_dir) + self.cache = None + + @log_execution_time(_LOG) + def get_cached_product_or_read( + self, dataset_id: str, product_id: str, query: GeoQuery | None = None + ) -> DataCube | Dataset: + """Get product from the cache instead of loading files indicated in + the catalog if `metadata_caching` set to `True`. + If might return `geokube.DataCube` or `geokube.Dataset`. + + Parameters + ------- + dataset_id : str + ID of the dataset + product_id : str + ID of the dataset + + Returns + ------- + kube : DataCube or Dataset + """ + if self.cache is None: + self._load_cache() + if ( + dataset_id not in self.cache + or product_id not in self.cache[dataset_id] + ): + self._LOG.info( + "dataset `%s` or product `%s` not found in cache! Reading" + " product!", + dataset_id, + product_id, + ) + return self.catalog(CACHE_DIR=self.cache_dir)[dataset_id][ + product_id + ].process(query=query) + return self.cache[dataset_id][product_id] + + @log_execution_time(_LOG) + def _load_cache(self, datasets: list[str] | None = None): + if self.cache is None or datasets is None: + self.cache = {} + datasets = self.dataset_list() + + for i, dataset_id in enumerate(datasets): + self._LOG.info( + "loading cache for `%s` (%d/%d)", + dataset_id, + i + 1, + len(datasets), + ) + self.cache[dataset_id] = {} + for product_id in self.product_list(dataset_id): + catalog_entry = self.catalog(CACHE_DIR=self.cache_dir)[ + dataset_id + ][product_id] + if hasattr(catalog_entry, "metadata_caching") and not catalog_entry.metadata_caching: + self._LOG.info( + "`metadata_caching` for product %s.%s set to `False`", + dataset_id, + product_id, + ) + continue + try: + self.cache[dataset_id][ + product_id + ] = catalog_entry.read() + except ValueError: + self._LOG.error( + "failed to load cache for `%s.%s`", + dataset_id, + product_id, + exc_info=True, + ) + except NotImplementedError: + pass + + @log_execution_time(_LOG) + def dataset_list(self) -> list: + """Get list of datasets available in the catalog stored in `catalog` + attribute + + Returns + ------- + datasets : list + List of datasets present in the catalog + """ + datasets = set(self.catalog(CACHE_DIR=self.cache_dir)) + datasets -= { + "medsea-rea-e3r1", + } + # NOTE: medsae cmip uses cftime.DatetimeNoLeap as time + # need to think how to handle it + return sorted(list(datasets)) + + @log_execution_time(_LOG) + def product_list(self, dataset_id: str): + """Get list of products available in the catalog for dataset + indicated by `dataset_id` + + Parameters + ---------- + dataset_id : str + ID of the dataset + + Returns + ------- + products : list + List of products for the dataset + """ + return list(self.catalog(CACHE_DIR=self.cache_dir)[dataset_id]) + + @log_execution_time(_LOG) + def dataset_info(self, dataset_id: str): + """Get information about the dataset and names of all available + products (with their metadata) + + Parameters + ---------- + dataset_id : str + ID of the dataset + + Returns + ------- + info : dict + Dict of short information about the dataset + """ + info = {} + entry = self.catalog(CACHE_DIR=self.cache_dir)[dataset_id] + if entry.metadata: + info["metadata"] = entry.metadata + info["metadata"]["id"] = dataset_id + info["products"] = {} + for product_id in entry: + prod_entry = entry[product_id] + info["products"][product_id] = prod_entry.metadata + info["products"][product_id][ + "description" + ] = prod_entry.description + return info + + @log_execution_time(_LOG) + def product_metadata(self, dataset_id: str, product_id: str): + """Get product metadata directly from the catalog. + + Parameters + ---------- + dataset_id : str + ID of the dataset + product_id : str + ID of the product + + Returns + ------- + metadata : dict + DatasetMetadata of the product + """ + return self.catalog(CACHE_DIR=self.cache_dir)[dataset_id][ + product_id + ].metadata + + @log_execution_time(_LOG) + def first_eligible_product_details( + self, + dataset_id: str, + role: str | list[str] | None = None, + use_cache: bool = False, + ): + """Get details for the 1st product of the dataset eligible for the `role`. + If `role` is `None`, the `public` role is considered. + + Parameters + ---------- + dataset_id : str + ID of the dataset + role : optional str or list of str, default=`None` + Role code for which the 1st eligible product of a dataset + should be selected + use_cache : bool, optional, default=False + Data will be loaded from cache if set to `True` or directly + from the catalog otherwise + + Returns + ------- + details : dict + Details of the product + + Raises + ------ + UnauthorizedError + if none of product of the requested dataset is eligible for a role + """ + info = {} + product_ids = self.product_list(dataset_id) + for prod_id in product_ids: + if not self.is_product_valid_for_role( + dataset_id, prod_id, role=role + ): + continue + entry = self.catalog(CACHE_DIR=self.cache_dir)[dataset_id][prod_id] + if entry.metadata: + info["metadata"] = entry.metadata + info["description"] = entry.description + info["id"] = prod_id + info["dataset"] = self.dataset_info(dataset_id=dataset_id) + if use_cache: + info["data"] = self.get_cached_product_or_read( + dataset_id, prod_id + ).to_dict() + else: + info["data"] = entry.read_chunked().to_dict() + return info + raise UnauthorizedError() + + @log_execution_time(_LOG) + def product_details( + self, + dataset_id: str, + product_id: str, + role: str | list[str] | None = None, + use_cache: bool = False, + ): + """Get details for the single product + + Parameters + ---------- + dataset_id : str + ID of the dataset + product_id : str + ID of the product + role : optional str or list of str, default=`None` + Role code for which the the product is requested. + use_cache : bool, optional, default=False + Data will be loaded from cache if set to `True` or directly + from the catalog otherwise + + Returns + ------- + details : dict + Details of the product + + Raises + ------ + UnauthorizedError + if the requested product is not eligible for a role + """ + info = {} + if not self.is_product_valid_for_role( + dataset_id, product_id, role=role + ): + raise UnauthorizedError() + entry = self.catalog(CACHE_DIR=self.cache_dir)[dataset_id][product_id] + if entry.metadata: + info["metadata"] = entry.metadata + info["description"] = entry.description + info["id"] = product_id + info["dataset"] = self.dataset_info(dataset_id=dataset_id) + if use_cache: + info["data"] = self.get_cached_product_or_read( + dataset_id, product_id + ).to_dict() + else: + info["data"] = entry.read_chunked().to_dict() + return info + + def product_info( + self, dataset_id: str, product_id: str, use_cache: bool = False + ): + info = {} + entry = self.catalog(CACHE_DIR=self.cache_dir)[dataset_id][product_id] + if entry.metadata: + info["metadata"] = entry.metadata + if use_cache: + info["data"] = self.get_cached_product_or_read( + dataset_id, product_id + ).to_dict() + else: + info["data"] = entry.read_chunked().to_dict() + return info + + @log_execution_time(_LOG) + def query( + self, + dataset_id: str, + product_id: str, + query: GeoQuery | dict | str, + compute: None | bool = False, + ) -> DataCube: + """Query dataset + + Parameters + ---------- + dataset_id : str + ID of the dataset + product_id : str + ID of the product + query : GeoQuery or dict or str or bytes or bytearray + Query to be executed for the given product + compute : bool, optional, default=False + If True, resulting data of DataCube will be computed, otherwise + DataCube with `dask.Delayed` object will be returned + + Returns + ------- + kube : DataCube + DataCube processed according to `query` + """ + self._LOG.debug("query: %s", query) + geoquery: GeoQuery = GeoQuery.parse(query) + self._LOG.debug("processing GeoQuery: %s", geoquery) + # NOTE: we always use catalog directly and single product cache + self._LOG.debug("loading product...") + kube = self.catalog(CACHE_DIR=self.cache_dir)[dataset_id][ + product_id + ].process(query=geoquery) + return kube + + @log_execution_time(_LOG) + def estimate( + self, + dataset_id: str, + product_id: str, + query: GeoQuery | dict | str, + ) -> int: + """Estimate dataset size + + Parameters + ---------- + dataset_id : str + ID of the dataset + product_id : str + ID of the product + query : GeoQuery or dict or str + Query to be executed for the given product + + Returns + ------- + size : int + Number of bytes of the estimated kube + """ + self._LOG.debug("query: %s", query) + geoquery: GeoQuery = GeoQuery.parse(query) + self._LOG.debug("processing GeoQuery: %s", geoquery) + # NOTE: we always use catalog directly and single product cache + self._LOG.debug("loading product...") + # NOTE: for estimation we use cached products + kube = self.get_cached_product_or_read(dataset_id, product_id, + query=query) + return Datastore._process_query(kube, geoquery, False).nbytes + + @log_execution_time(_LOG) + def is_product_valid_for_role( + self, + dataset_id: str, + product_id: str, + role: str | list[str] | None = None, + ): + entry = self.catalog(CACHE_DIR=self.cache_dir)[dataset_id][product_id] + product_role = BaseRole.PUBLIC + if entry.metadata: + product_role = entry.metadata.get("role", BaseRole.PUBLIC) + if product_role == BaseRole.PUBLIC: + return True + if not role: + # NOTE: it means, we consider the public profile + return False + if BaseRole.ADMIN in role: + return True + if product_role in role: + return True + return False + + @staticmethod + def _process_query(kube, query: GeoQuery, compute: None | bool = False): + if isinstance(kube, Dataset): + Datastore._LOG.debug("filtering with: %s", query.filters) + try: + kube = kube.filter(**query.filters) + except ValueError as err: + Datastore._LOG.warning("could not filter by one of the key: %s", err) + if isinstance(kube, Delayed) and compute: + kube = kube.compute() + if query.variable: + Datastore._LOG.debug("selecting fields...") + kube = kube[query.variable] + if query.area: + Datastore._LOG.debug("subsetting by geobbox...") + kube = kube.geobbox(**query.area) + if query.location: + Datastore._LOG.debug("subsetting by locations...") + kube = kube.locations(**query.location) + if query.time: + Datastore._LOG.debug("subsetting by time...") + kube = kube.sel(time=query.time) + if query.vertical: + Datastore._LOG.debug("subsetting by vertical...") + method = None if isinstance(query.vertical, slice) else "nearest" + kube = kube.sel(vertical=query.vertical, method=method) + return kube.compute() if compute else kube diff --git a/datastore/datastore/exception.py b/datastore/datastore/exception.py new file mode 100644 index 0000000..d048e83 --- /dev/null +++ b/datastore/datastore/exception.py @@ -0,0 +1,5 @@ +"""Module with exceptions definitions""" + + +class UnauthorizedError(ValueError): + """Role is not authorized""" diff --git a/datastore/datastore/singleton.py b/datastore/datastore/singleton.py new file mode 100644 index 0000000..ff6ef01 --- /dev/null +++ b/datastore/datastore/singleton.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +"""Singleton module. + +The module contains metaclass called Singleton +for thread-safe singleton-pattern implementation. +""" +import os +import logging +from threading import Lock +from typing import Any, Type + + +class Singleton(type): + """Thread-safe implementation of the singleton design pattern metaclass""" + + _instances: dict[Type, Any] = {} + _lock: Lock = Lock() + + def __call__(cls, *args, **kwargs): + with cls._lock: + if cls not in cls._instances: + instance = super().__call__(*args, **kwargs) + if hasattr(instance, "_LOG"): + instance._LOG.setLevel( + os.environ.get("LOGGING_LEVEL", "INFO") + ) + instance._LOG.addHandler(logging.StreamHandler()) + cls._instances[cls] = instance + return cls._instances[cls] diff --git a/datastore/datastore/util.py b/datastore/datastore/util.py new file mode 100644 index 0000000..4122d57 --- /dev/null +++ b/datastore/datastore/util.py @@ -0,0 +1,27 @@ +"""Utils module""" +from functools import wraps +import datetime +import logging + + +def log_execution_time(logger: logging.Logger): + """Decorator logging execution time of the method or function""" + + def inner(func): + @wraps(func) + def wrapper(*args, **kwds): + exec_start_time = datetime.datetime.now() + try: + return func(*args, **kwds) + finally: + exec_time = datetime.datetime.now() - exec_start_time + logger.info( + "execution of '%s' function from '%s' package took %s", + func.__name__, + func.__module__, + exec_time, + ) + + return wrapper + + return inner diff --git a/datastore/dbmanager/__init__.py b/datastore/dbmanager/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/datastore/dbmanager/dbmanager.py b/datastore/dbmanager/dbmanager.py new file mode 100644 index 0000000..d4ff293 --- /dev/null +++ b/datastore/dbmanager/dbmanager.py @@ -0,0 +1,349 @@ +from __future__ import annotations + +import os +import yaml +import logging +import uuid +import secrets +from datetime import datetime +from enum import auto, Enum as Enum_, unique + +from sqlalchemy import ( + Column, + create_engine, + DateTime, + Enum, + ForeignKey, + Integer, + JSON, + Sequence, + String, + Table, +) +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import declarative_base, sessionmaker, relationship + +from .singleton import Singleton + + +def is_true(item) -> bool: + """If `item` represents `True` value""" + if isinstance(item, str): + return item.lower() in ["y", "yes", "true", "t"] + return bool(item) + + +def generate_key() -> str: + """Generate as new api key for a user""" + return secrets.token_urlsafe(nbytes=32) + + +@unique +class RequestStatus(Enum_): + """Status of the Request""" + + PENDING = auto() + QUEUED = auto() + RUNNING = auto() + DONE = auto() + FAILED = auto() + TIMEOUT = auto() + + @classmethod + def _missing_(cls, value): + return cls.PENDING + + +class _Repr: + def __repr__(self): + cols = self.__table__.columns.keys() # pylint: disable=no-member + kwa = ", ".join(f"{col}={getattr(self, col)}" for col in cols) + return f"{type(self).__name__}({kwa})" + + +Base = declarative_base(cls=_Repr, name="Base") + + +association_table = Table( + "users_roles", + Base.metadata, + Column("user_id", ForeignKey("users.user_id")), + Column("role_id", ForeignKey("roles.role_id")), +) + + +class Role(Base): + __tablename__ = "roles" + role_id = Column(Integer, Sequence("role_id_seq"), primary_key=True) + role_name = Column(String(255), nullable=False, unique=True) + + +class User(Base): + __tablename__ = "users" + user_id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + # keycloak_id = Column(UUID(as_uuid=True), nullable=False, unique=True, default=uuid.uuid4) + api_key = Column( + String(255), nullable=False, unique=True, default=generate_key + ) + contact_name = Column(String(255)) + requests = relationship("Request", lazy="dynamic") + roles = relationship("Role", secondary=association_table, lazy="selectin") + + +class Worker(Base): + __tablename__ = "workers" + worker_id = Column(Integer, primary_key=True) + status = Column(String(255), nullable=False) + host = Column(String(255)) + dask_scheduler_port = Column(Integer) + dask_dashboard_address = Column(String(10)) + created_on = Column(DateTime, default=datetime.now) + + +class Request(Base): + __tablename__ = "requests" + request_id = Column(Integer, primary_key=True) + status = Column(Enum(RequestStatus), nullable=False) + priority = Column(Integer) + user_id = Column( + UUID(as_uuid=True), ForeignKey("users.user_id"), nullable=False + ) + worker_id = Column(Integer, ForeignKey("workers.worker_id")) + dataset = Column(String(255)) + product = Column(String(255)) + query = Column(JSON()) + estimate_size_bytes = Column(Integer) + created_on = Column(DateTime, default=datetime.now) + last_update = Column(DateTime, default=datetime.now, onupdate=datetime.now) + fail_reason = Column(String(1000)) + download = relationship("Download", uselist=False, lazy="selectin") + + +class Download(Base): + __tablename__ = "downloads" + download_id = Column(Integer, primary_key=True) + download_uri = Column(String(255)) + request_id = Column( + Integer, ForeignKey("requests.request_id"), nullable=False + ) + storage_id = Column(Integer, ForeignKey("storages.storage_id")) + location_path = Column(String(255)) + size_bytes = Column(Integer) + created_on = Column(DateTime, default=datetime.now) + + +class Storage(Base): + __tablename__ = "storages" + storage_id = Column(Integer, primary_key=True) + name = Column(String(255)) + host = Column(String(20)) + protocol = Column(String(10)) + port = Column(Integer) + + +class DBManager(metaclass=Singleton): + _LOG = logging.getLogger("geokube.DBManager") + + def __init__(self) -> None: + for venv_key in [ + "POSTGRES_DB", + "POSTGRES_USER", + "POSTGRES_PASSWORD", + "DB_SERVICE_PORT", + ]: + self._LOG.info( + "attempt to load data from environment variable: `%s`", + venv_key, + ) + if venv_key not in os.environ: + self._LOG.error( + "missing required environment variable: `%s`", venv_key + ) + raise KeyError( + f"missing required environment variable: {venv_key}" + ) + + user = os.environ["POSTGRES_USER"] + password = os.environ["POSTGRES_PASSWORD"] + host = os.environ["DB_SERVICE_HOST"] + port = os.environ["DB_SERVICE_PORT"] + database = os.environ["POSTGRES_DB"] + + url = f"postgresql://{user}:{password}@{host}:{port}/{database}" + self._LOG.info("db connection: `%s`", url) + self.__engine = create_engine( + url, echo=is_true(os.environ.get("DB_LOGGING", False)) + ) + self.__session_maker = sessionmaker(bind=self.__engine) + + def _create_database(self): + try: + Base.metadata.create_all(self.__engine) + except Exception as exception: + self._LOG.error( + "could not create a database due to an error", exc_info=True + ) + raise exception + + def add_user( + self, + contact_name: str, + user_id: UUID | None = None, + api_key: str | None = None, + roles_names: list[str] | None = None, + ): + with self.__session_maker() as session: + user = User( + user_id=user_id, api_key=api_key, contact_name=contact_name + ) + if roles_names: + user.roles.extend( + [ + session.query(Role) + .where(Role.role_name == role_name) + .all()[0] # NOTE: role_name is unique in the database + for role_name in roles_names + ] + ) + session.add(user) + session.commit() + return user + + def get_user_details(self, user_id: int): + with self.__session_maker() as session: + return session.query(User).get(user_id) + + def get_user_roles_names(self, user_id: int | None = None) -> list[str]: + if user_id is None: + return ["public"] + with self.__session_maker() as session: + return list( + map( + lambda role: role.role_name, + session.query(User).get(user_id).roles, + ) + ) + + def get_request_details(self, request_id: int): + with self.__session_maker() as session: + return session.query(Request).get(request_id) + + def get_download_details_for_request(self, request_id: int): + with self.__session_maker() as session: + request_details = session.query(Request).get(request_id) + if request_details is None: + raise ValueError( + f"Request with id: {request_id} doesn't exist" + ) + return request_details.download + + def create_request( + self, + user_id: int = 1, + dataset: str | None = None, + product: str | None = None, + query: str | None = None, + worker_id: int | None = None, + priority: str | None = None, + estimate_size_bytes: int | None = None, + status: RequestStatus = RequestStatus.PENDING, + ) -> int: + # TODO: Add more request-related parameters to this method. + with self.__session_maker() as session: + request = Request( + status=status, + priority=priority, + user_id=user_id, + worker_id=worker_id, + dataset=dataset, + product=product, + query=query, + estimate_size_bytes=estimate_size_bytes, + created_on=datetime.utcnow(), + ) + session.add(request) + session.commit() + return request.request_id + + def update_request( + self, + request_id: int, + worker_id: int | None = None, + status: RequestStatus | None = None, + location_path: str = None, + size_bytes: int = None, + fail_reason: str = None, + ) -> int: + with self.__session_maker() as session: + request = session.query(Request).get(request_id) + if status: + request.status = status + if worker_id: + request.worker_id = worker_id + request.last_update = datetime.utcnow() + request.fail_reason = fail_reason + session.commit() + if status is RequestStatus.DONE: + download = Download( + location_path=location_path, + storage_id=0, + request_id=request.request_id, + created_on=datetime.utcnow(), + download_uri=f"/download/{request_id}", + size_bytes=size_bytes, + ) + session.add(download) + session.commit() + return request.request_id + + def get_request_status_and_reason( + self, request_id + ) -> None | RequestStatus: + with self.__session_maker() as session: + if request := session.query(Request).get(request_id): + return RequestStatus(request.status), request.fail_reason + raise IndexError( + f"Request with id: `{request_id}` does not exist!" + ) + + def get_requests_for_user_id(self, user_id) -> list[Request]: + with self.__session_maker() as session: + return session.query(User).get(user_id).requests.all() + + def get_requests_for_user_id_and_status( + self, user_id, status: RequestStatus | tuple[RequestStatus] + ) -> list[Request]: + if isinstance(status, RequestStatus): + status = (status,) + with self.__session_maker() as session: + return session.get(User, user_id).requests.filter( + Request.status.in_(status) + ) + + def get_download_details_for_request_id(self, request_id) -> Download: + with self.__session_maker() as session: + request_details = session.query(Request).get(request_id) + if request_details is None: + raise IndexError( + f"Request with id: `{request_id}` does not exist!" + ) + return request_details.download + + def create_worker( + self, + status: str, + dask_scheduler_port: int, + dask_dashboard_address: int, + host: str = "localhost", + ) -> int: + with self.__session_maker() as session: + worker = Worker( + status=status, + host=host, + dask_scheduler_port=dask_scheduler_port, + dask_dashboard_address=dask_dashboard_address, + created_on=datetime.utcnow(), + ) + session.add(worker) + session.commit() + return worker.worker_id diff --git a/datastore/dbmanager/singleton.py b/datastore/dbmanager/singleton.py new file mode 100644 index 0000000..bf7b29b --- /dev/null +++ b/datastore/dbmanager/singleton.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +"""Singleton module. + +The module contains metaclass called Singleton +for thread-safe singleton-pattern implementation. +""" +from threading import Lock + + +class Singleton(type): + """Thread-safe implementation of the singleton design pattern metaclass""" + + _instances = {} + _lock: Lock = Lock() + + def __call__(cls, *args, **kwargs): + with cls._lock: + if cls not in cls._instances: + instance = super().__call__(*args, **kwargs) + cls._instances[cls] = instance + return cls._instances[cls] diff --git a/datastore/requirements.txt b/datastore/requirements.txt new file mode 100644 index 0000000..d4a7d44 --- /dev/null +++ b/datastore/requirements.txt @@ -0,0 +1,2 @@ +networkx +pydantic \ No newline at end of file diff --git a/datastore/tests/__init__.py b/datastore/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/datastore/tests/workflow/__init__.py b/datastore/tests/workflow/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/datastore/tests/workflow/fixtures.py b/datastore/tests/workflow/fixtures.py new file mode 100644 index 0000000..8ce94ad --- /dev/null +++ b/datastore/tests/workflow/fixtures.py @@ -0,0 +1,122 @@ +import pytest + + +@pytest.fixture +def subset_query() -> str: + yield """ + { + "dataset_id": "era5-single-levels", + "product_id": "reanalysis", + "query": { + "area": { + "north": -85, + "south": -90, + "east": 260, + "west": 240 + }, + "time": { + "hour": [ + "15" + ], + "year": [ + "1981", + "1985", + "2022" + ], + "month": [ + "3", + "6" + ], + "day": [ + "23", + "27" + ] + }, + "variable": [ + "2_metre_dewpoint_temperature", + "surface_net_downward_shortwave_flux" + ] + } + } + """ + + +@pytest.fixture +def resample_query(): + yield """ + { + "freq": "1D", + "operator": "nanmax", + "resample_args": { + "closed": "right" + } + } + """ + + +@pytest.fixture +def workflow_str(): + yield """ + [ + { + "id": "subset1", + "op": "subset", + "args": { + "dataset_id": "era5-single-levels", + "product_id": "reanalysis", + "query": { + "area": { + "north": -85, + "south": -90, + "east": 260, + "west": 240 + } + } + } + }, + { + "id": "resample1", + "use": ["subset1"], + "op": "resample", + "args": + { + "freq": "1D", + "operator": "nanmax" + } + } + ] + """ + + +@pytest.fixture +def bad_workflow_str(): + yield """ + [ + { + "id": "subset1", + "op": "subset", + "args": { + "dataset_id": "era5-single-levels", + "product_id": "reanalysis", + "query": { + "area": { + "north": -85, + "south": -90, + "east": 260, + "west": 240 + } + } + } + }, + { + "id": "resample1", + "use": ["subset1", "subset2"], + "op": "resample", + "args": + { + "freq": "1D", + "operator": "nanmax" + } + } + ] + """ diff --git a/datastore/tests/workflow/test_operators.py b/datastore/tests/workflow/test_operators.py new file mode 100644 index 0000000..46cf109 --- /dev/null +++ b/datastore/tests/workflow/test_operators.py @@ -0,0 +1,20 @@ +from workflow import operators as op + +from .fixtures import subset_query, resample_query + + +def test_create_subset_operator_with_str_args(subset_query): + sub_op = op.Operator("subset", subset_query) + assert isinstance(sub_op, op.Subset) + assert isinstance(sub_op.args, op.SubsetArgs) + assert sub_op.args.dataset_id == "era5-single-levels" + assert sub_op.args.product_id == "reanalysis" + + +def test_create_resample_operator_with_str_args(resample_query): + res_op = op.Operator("resample", resample_query) + assert isinstance(res_op, op.Resample) + assert isinstance(res_op.args, op.ResampleArgs) + assert res_op.args.freq == "1D" + assert res_op.args.operator == "nanmax" + assert res_op.args.resample_args == {"closed": "right"} diff --git a/datastore/tests/workflow/test_workflow.py b/datastore/tests/workflow/test_workflow.py new file mode 100644 index 0000000..7036b73 --- /dev/null +++ b/datastore/tests/workflow/test_workflow.py @@ -0,0 +1,23 @@ +import pytest +from workflow.workflow import Workflow + +from .fixtures import workflow_str, bad_workflow_str + + +def test_create_workflow(workflow_str): + comp_graph = Workflow(workflow_str) + assert len(comp_graph) == 2 + task_iter = comp_graph.traverse() + node1, precedint1 = next(task_iter) + assert precedint1 == tuple() + assert node1.operator.name == "subset" + + node2, precedint2 = next(task_iter) + assert len(precedint2) == 1 + assert node2.operator.name == "resample" + assert precedint2[0].operator.name == "subset" + + +def test_fail_when_task_not_defined(bad_workflow_str): + with pytest.raises(ValueError, match=r"task with id*"): + _ = Workflow(bad_workflow_str) diff --git a/datastore/utils/__init__.py b/datastore/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/datastore/utils/api_logging.py b/datastore/utils/api_logging.py new file mode 100644 index 0000000..58d148d --- /dev/null +++ b/datastore/utils/api_logging.py @@ -0,0 +1,40 @@ +import os +from typing import Literal +import logging as default_logging + + +def get_dds_logger( + name: str, + level: Literal["debug", "info", "warning", "error", "critical"] = "info", +): + """Get DDS logger with the expected format, handlers and formatter. + + Parameters + ---------- + name : str + Name of the logger + level : str, default="info" + Value of the logging level. One out of ["debug", "info", "warn", + "error", "critical"]. + Logging level is taken from the + enviornmental variable `LOGGING_FORMAT`. If this variable is not defined, + the value of the `level` argument is used. + + Returns + ------- + log : logging.Logger + Logger with the handlers set + """ + log = default_logging.getLogger(name) + format_ = os.environ.get( + "LOGGING_FORMAT", + "%(asctime)s %(name)s %(levelname)s %(message)s", + ) + formatter = default_logging.Formatter(format_) + logging_level = os.environ.get("LOGGING_LEVEL", level.upper()) + log.setLevel(logging_level) + stream_handler = default_logging.StreamHandler() + stream_handler.setFormatter(formatter) + stream_handler.setLevel(logging_level) + log.addHandler(stream_handler) + return log diff --git a/datastore/utils/metrics.py b/datastore/utils/metrics.py new file mode 100644 index 0000000..82aeb55 --- /dev/null +++ b/datastore/utils/metrics.py @@ -0,0 +1,33 @@ +import time +import logging as default_logging +from functools import wraps +from typing import Literal + + +def log_execution_time( + logger: default_logging.Logger, + level: Literal["debug", "info", "warning", "error", "critical"] = "info", +): + """Decorator logging execution time of the method or function""" + level = default_logging.getLevelName(level.upper()) + + def inner(func): + @wraps(func) + def wrapper(*args, **kwds): + exec_start_time = time.monotonic() + try: + return func(*args, **kwds) + finally: + # NOTE: maybe logging should be on DEBUG level + logger.log( + level, + "execution of '%s' function from '%s' package took" + " %.4f sec", + func.__name__, + func.__module__, + time.monotonic() - exec_start_time, + ) + + return wrapper + + return inner diff --git a/datastore/wait-for-it.sh b/datastore/wait-for-it.sh new file mode 100755 index 0000000..d990e0d --- /dev/null +++ b/datastore/wait-for-it.sh @@ -0,0 +1,182 @@ +#!/usr/bin/env bash +# Use this script to test if a given TCP host/port are available + +WAITFORIT_cmdname=${0##*/} + +echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } + +usage() +{ + cat << USAGE >&2 +Usage: + $WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args] + -h HOST | --host=HOST Host or IP under test + -p PORT | --port=PORT TCP port under test + Alternatively, you specify the host and port as host:port + -s | --strict Only execute subcommand if the test succeeds + -q | --quiet Don't output any status messages + -t TIMEOUT | --timeout=TIMEOUT + Timeout in seconds, zero for no timeout + -- COMMAND ARGS Execute command with args after the test finishes +USAGE + exit 1 +} + +wait_for() +{ + if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then + echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" + else + echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout" + fi + WAITFORIT_start_ts=$(date +%s) + while : + do + if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then + nc -z $WAITFORIT_HOST $WAITFORIT_PORT + WAITFORIT_result=$? + else + (echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1 + WAITFORIT_result=$? + fi + if [[ $WAITFORIT_result -eq 0 ]]; then + WAITFORIT_end_ts=$(date +%s) + echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds" + break + fi + sleep 1 + done + return $WAITFORIT_result +} + +wait_for_wrapper() +{ + # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 + if [[ $WAITFORIT_QUIET -eq 1 ]]; then + timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT & + else + timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT & + fi + WAITFORIT_PID=$! + trap "kill -INT -$WAITFORIT_PID" INT + wait $WAITFORIT_PID + WAITFORIT_RESULT=$? + if [[ $WAITFORIT_RESULT -ne 0 ]]; then + echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" + fi + return $WAITFORIT_RESULT +} + +# process arguments +while [[ $# -gt 0 ]] +do + case "$1" in + *:* ) + WAITFORIT_hostport=(${1//:/ }) + WAITFORIT_HOST=${WAITFORIT_hostport[0]} + WAITFORIT_PORT=${WAITFORIT_hostport[1]} + shift 1 + ;; + --child) + WAITFORIT_CHILD=1 + shift 1 + ;; + -q | --quiet) + WAITFORIT_QUIET=1 + shift 1 + ;; + -s | --strict) + WAITFORIT_STRICT=1 + shift 1 + ;; + -h) + WAITFORIT_HOST="$2" + if [[ $WAITFORIT_HOST == "" ]]; then break; fi + shift 2 + ;; + --host=*) + WAITFORIT_HOST="${1#*=}" + shift 1 + ;; + -p) + WAITFORIT_PORT="$2" + if [[ $WAITFORIT_PORT == "" ]]; then break; fi + shift 2 + ;; + --port=*) + WAITFORIT_PORT="${1#*=}" + shift 1 + ;; + -t) + WAITFORIT_TIMEOUT="$2" + if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi + shift 2 + ;; + --timeout=*) + WAITFORIT_TIMEOUT="${1#*=}" + shift 1 + ;; + --) + shift + WAITFORIT_CLI=("$@") + break + ;; + --help) + usage + ;; + *) + echoerr "Unknown argument: $1" + usage + ;; + esac +done + +if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then + echoerr "Error: you need to provide a host and port to test." + usage +fi + +WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15} +WAITFORIT_STRICT=${WAITFORIT_STRICT:-0} +WAITFORIT_CHILD=${WAITFORIT_CHILD:-0} +WAITFORIT_QUIET=${WAITFORIT_QUIET:-0} + +# Check to see if timeout is from busybox? +WAITFORIT_TIMEOUT_PATH=$(type -p timeout) +WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH) + +WAITFORIT_BUSYTIMEFLAG="" +if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then + WAITFORIT_ISBUSY=1 + # Check if busybox timeout uses -t flag + # (recent Alpine versions don't support -t anymore) + if timeout &>/dev/stdout | grep -q -e '-t '; then + WAITFORIT_BUSYTIMEFLAG="-t" + fi +else + WAITFORIT_ISBUSY=0 +fi + +if [[ $WAITFORIT_CHILD -gt 0 ]]; then + wait_for + WAITFORIT_RESULT=$? + exit $WAITFORIT_RESULT +else + if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then + wait_for_wrapper + WAITFORIT_RESULT=$? + else + wait_for + WAITFORIT_RESULT=$? + fi +fi + +if [[ $WAITFORIT_CLI != "" ]]; then + if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then + echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess" + exit $WAITFORIT_RESULT + fi + exec "${WAITFORIT_CLI[@]}" +else + exit $WAITFORIT_RESULT +fi diff --git a/datastore/workflow/__init__.py b/datastore/workflow/__init__.py new file mode 100644 index 0000000..9c75326 --- /dev/null +++ b/datastore/workflow/__init__.py @@ -0,0 +1 @@ +from workflow.workflow import Workflow diff --git a/datastore/workflow/workflow.py b/datastore/workflow/workflow.py new file mode 100644 index 0000000..63e6f78 --- /dev/null +++ b/datastore/workflow/workflow.py @@ -0,0 +1,226 @@ +import json +from typing import Generator, Hashable, Callable, Literal, Any +from functools import partial +import logging + +import networkx as nx +from geokube.core.datacube import DataCube +from intake_geokube.queries.geoquery import GeoQuery +from intake_geokube.queries.workflow import Workflow as WorkflowModel +from datastore.datastore import Datastore + +AggregationFunctionName = ( + Literal["max"] + | Literal["nanmax"] + | Literal["min"] + | Literal["nanmin"] + | Literal["mean"] + | Literal["nanmean"] + | Literal["sum"] + | Literal["nansum"] +) + + +_LOG = logging.getLogger("geokube.workflow") + +TASK_ATTRIBUTE = "task" + + +class _WorkflowTask: + __slots__ = ("id", "dependencies", "operator") + + id: Hashable + dependencies: list[Hashable] | None + operator: Callable[..., DataCube] + + def __init__( + self, + id: Hashable, + operator: Callable[..., DataCube], + dependencies: list[Hashable] | None = None, + ) -> None: + self.operator = operator + self.id = id + if dependencies is None: + dependencies = [] + self.dependencies = dependencies + + def compute(self, kube: DataCube | None) -> DataCube: + return self.operator(kube) + + +class Workflow: + __slots__ = ("graph", "present_nodes_ids", "is_verified") + + graph: nx.DiGraph + present_nodes_ids: set[Hashable] + is_verified: bool + + def __init__(self) -> None: + self.graph = nx.DiGraph() + self.present_nodes_ids = set() + self.is_verified = False + + @classmethod + def from_tasklist(cls, task_list: WorkflowModel) -> "Workflow": + workflow = cls() + for task in task_list.tasks: + match task.op: + case "subset": + workflow.subset(task.id, **task.args) + case "resample": + workflow.resample( + task.id, dependencies=task.use, **task.args + ) + case "average": + workflow.average( + task.id, dependencies=task.use, **task.args + ) + case "to_regular": + workflow.to_regular( + task.id, dependencies=task.use, **task.args + ) + case _: + raise ValueError( + f"task operator: {task.op} is not defined" + ) + return workflow + + def _add_computational_node(self, task: _WorkflowTask): + node_id = task.id + assert ( + node_id not in self.present_nodes_ids + ), "worflow task IDs need to be unique!" + self.present_nodes_ids.add(node_id) + self.graph.add_node(node_id, **{TASK_ATTRIBUTE: task}) + for dependend_node in task.dependencies: + self.graph.add_edge(dependend_node, node_id) + self.is_verified = False + + def subset( + self, + id: Hashable, + dataset_id: str, + product_id: str, + query: GeoQuery | dict, + ) -> "Workflow": + def _subset(kube: DataCube | None = None) -> DataCube: + return Datastore().query( + dataset_id=dataset_id, + product_id=product_id, + query=( + query if isinstance(query, GeoQuery) else GeoQuery(**query) + ), + compute=False, + ) + + task = _WorkflowTask(id=id, operator=_subset) + self._add_computational_node(task) + return self + + def resample( + self, + id: Hashable, + freq: str, + agg: Callable[..., DataCube] | AggregationFunctionName, + resample_kwargs: dict[str, Any] | None, + *, + dependencies: list[Hashable], + ) -> "Workflow": + def _resample(kube: DataCube | None = None) -> DataCube: + assert kube is not None, "`kube` cannot be `None` for resampling" + return kube.resample( + operator=agg, + frequency=freq, + **resample_kwargs, + ) + + task = _WorkflowTask( + id=id, operator=_resample, dependencies=dependencies + ) + self._add_computational_node(task) + return self + + def average( + self, id: Hashable, dim: str, *, dependencies: list[Hashable] + ) -> "Workflow": + def _average(kube: DataCube | None = None) -> DataCube: + assert kube is not None, "`kube` cannot be `None` for averaging" + return kube.average(dim=dim) + + task = _WorkflowTask( + id=id, operator=_average, dependencies=dependencies + ) + self._add_computational_node(task) + return self + + def to_regular( + self, id: Hashable, *, dependencies: list[Hashable] + ) -> "Workflow": + def _to_regular(kube: DataCube | None = None) -> DataCube: + assert ( + kube is not None + ), "`kube` cannot be `None` for `to_regular``" + return kube.to_regular() + + task = _WorkflowTask( + id=id, operator=_to_regular, dependencies=dependencies + ) + self._add_computational_node(task) + return self + + def add_task( + self, + id: Hashable, + func: Callable[..., DataCube], + dependencies: list[str] | None = None, + **func_kwargs, + ) -> "Workflow": + task = _WorkflowTask( + id=id, + operator=partial(func, **func_kwargs), + dependencies=dependencies, + ) + self._add_computational_node(task) + return self + + def verify(self) -> "Workflow": + if self.is_verified: + return + assert nx.is_directed_acyclic_graph( + self.graph + ), "the workflow contains cycles!" + for u, v in self.graph.edges: + if TASK_ATTRIBUTE not in self.graph.nodes[u].keys(): + _LOG.error( + "task with id `%s` is not defined for the workflow", u + ) + raise ValueError( + f"task with id `{u}` is not defined for the workflow" + ) + if TASK_ATTRIBUTE not in self.graph.nodes[v].keys(): + _LOG.error( + "task with id `%s` is not defined for the workflow", v + ) + raise ValueError( + f"task with id `{v}` is not defined for the workflow" + ) + self.is_verified = True + + def traverse(self) -> Generator[_WorkflowTask, None, None]: + for node_id in nx.topological_sort(self.graph): + _LOG.debug("computing task for the node: %s", node_id) + yield self.graph.nodes[node_id][TASK_ATTRIBUTE] + + def compute(self) -> DataCube: + self.verify() + result = None + for task in self.traverse(): + result = task.compute(result) + return result + + def __len__(self): + return len(self.graph.nodes) + + def __getitem__(self, idx: Hashable): + return self.graph.nodes[idx] From 49bec4730ac839c1e64e412e29de1e471750e3fe Mon Sep 17 00:00:00 2001 From: Marco Mancini Date: Thu, 11 Jan 2024 09:16:50 +0100 Subject: [PATCH 03/31] Update executor with adjust_for_intake branch --- executor/Dockerfile | 28 +- executor/app/main.py | 567 ++++++++++++++++++++++++++++++-------- executor/app/messaging.py | 45 +++ executor/app/meta.py | 27 ++ executor/requirements.txt | 11 +- 5 files changed, 537 insertions(+), 141 deletions(-) create mode 100644 executor/app/messaging.py create mode 100644 executor/app/meta.py diff --git a/executor/Dockerfile b/executor/Dockerfile index e3cc317..6a946fd 100644 --- a/executor/Dockerfile +++ b/executor/Dockerfile @@ -1,16 +1,12 @@ -FROM continuumio/miniconda3 -WORKDIR /code -RUN conda install -c conda-forge xesmf cartopy psycopg2 -y -COPY ./executor/requirements.txt /code/requirements.txt -RUN pip install --no-cache-dir -r requirements.txt -COPY geokube_packages/geokube-0.1a0-py3-none-any.whl /code -COPY geokube_packages/intake_geokube-0.1a0-py3-none-any.whl /code -RUN pip install /code/geokube-0.1a0-py3-none-any.whl -RUN pip install /code/intake_geokube-0.1a0-py3-none-any.whl -COPY ./db/dbmanager /code/app/db/dbmanager -COPY ./utils/wait-for-it.sh /code/wait-for-it.sh -COPY ./datastore /code/app/datastore -COPY ./geoquery /code/app/geoquery -COPY ./resources /code/app/resources -COPY ./executor/app /code/app -CMD [ "python", "./app/main.py" ] \ No newline at end of file +ARG REGISTRY=rg.nl-ams.scw.cloud/geodds-production +ARG TAG=latest +ARG SENTINEL_USERNAME=... +ARG SENTINEL_PASSWORD=... +FROM $REGISTRY/geodds-datastore:$TAG +WORKDIR /app +ENV SENTINEL_USERNAME=$SENTINEL_USERNAME +ENV SENTINEL_PASSWORD=$SENTINEL_PASSWORD +COPY requirements.txt /code/requirements.txt +RUN pip install --no-cache-dir -r /code/requirements.txt +COPY app /app +CMD [ "python", "main.py" ] diff --git a/executor/app/main.py b/executor/app/main.py index c59ef92..35b90fe 100644 --- a/executor/app/main.py +++ b/executor/app/main.py @@ -1,146 +1,477 @@ -# We have three type of executor: -# - query executor (query) -# - estimate query executor (estimate) -# - catalog info executor (info) -# -# Configuration parameters for the executor: -# type: query, estimate, catalog -# dask cluster base ports (if they are not provided the cluster is not created: (e.g. for estimate and catalog info)) -# channel: channel_queue, channel_type, channel_durable -# catalog path -# store_path (where to store the query results) -# -# An executor will register to the DB and get a worker id -# if dask cluster base ports are provided, a dask cluster is created -# an executor mush have a unique port for the dask scheduler/dashboard - import os -import json +import time +import datetime import pika -from dask.distributed import Client, LocalCluster +import logging +import asyncio +import threading, functools +from zipfile import ZipFile + +import numpy as np +from dask.distributed import Client, LocalCluster, Nanny, Status +from dask.delayed import Delayed +from geokube.core.datacube import DataCube +from geokube.core.dataset import Dataset +from geokube.core.field import Field from datastore.datastore import Datastore -from db.dbmanager.dbmanager import DBManager, RequestStatus +from workflow import Workflow +from intake_geokube.queries.geoquery import GeoQuery +from dbmanager.dbmanager import DBManager, RequestStatus + +from meta import LoggableMeta +from messaging import Message, MessageType + +_BASE_DOWNLOAD_PATH = "/downloads" + + +def get_file_name_for_climate_downscaled(kube: DataCube, message: Message): + query: GeoQuery = GeoQuery.parse(message.content) + is_time_range = False + if query.time: + is_time_range = "start" in query.time or "stop" in query.time + var_names = list(kube.fields.keys()) + if len(kube) == 1: + if is_time_range: + FILENAME_TEMPLATE = "{ncvar_name}_VHR-PRO_IT2km_CMCC-CM_{product_id}_CCLM5-0-9_1hr_{start_date}_{end_date}_{request_id}" + ncvar_name = kube.fields[var_names[0]].ncvar + return FILENAME_TEMPLATE.format( + product_id=message.product_id, + request_id=message.request_id, + ncvar_name=ncvar_name, + start_date=np.datetime_as_string( + kube.time.values[0], unit="D" + ), + end_date=np.datetime_as_string(kube.time.values[-1], unit="D"), + ) + else: + FILENAME_TEMPLATE = "{ncvar_name}_VHR-PRO_IT2km_CMCC-CM_{product_id}_CCLM5-0-9_1hr_{request_id}" + ncvar_name = kube.fields[var_names[0]].ncvar + return FILENAME_TEMPLATE.format( + product_id=message.product_id, + request_id=message.request_id, + ncvar_name=ncvar_name, + ) + else: + if is_time_range: + FILENAME_TEMPLATE = "VHR-PRO_IT2km_CMCC-CM_{product_id}_CCLM5-0-9_1hr_{start_date}_{end_date}_{request_id}" + return FILENAME_TEMPLATE.format( + product_id=message.product_id, + request_id=message.request_id, + start_date=np.datetime_as_string( + kube.time.values[0], unit="D" + ), + end_date=np.datetime_as_string(kube.time.values[-1], unit="D"), + ) + else: + FILENAME_TEMPLATE = ( + "VHR-PRO_IT2km_CMCC-CM_{product_id}_CCLM5-0-9_1hr_{request_id}" + ) + return FILENAME_TEMPLATE.format( + product_id=message.product_id, + request_id=message.request_id, + ) + + +def rcp85_filename_condition(kube: DataCube, message: Message) -> bool: + return ( + message.dataset_id == "climate-projections-rcp85-downscaled-over-italy" + ) + + +def get_history_message(): + return ( + f"Generated by CMCC DDS version 0.9.0 {str(datetime.datetime.now())}" + ) + + +def persist_datacube( + kube: DataCube, + message: Message, + base_path: str | os.PathLike, +) -> str | os.PathLike: + if rcp85_filename_condition(kube, message): + path = get_file_name_for_climate_downscaled(kube, message) + else: + var_names = list(kube.fields.keys()) + if len(kube) == 1: + path = "_".join( + [ + var_names[0], + message.dataset_id, + message.product_id, + message.request_id, + ] + ) + else: + path = "_".join( + [message.dataset_id, message.product_id, message.request_id] + ) + kube._properties["history"] = get_history_message() + if isinstance(message.content, GeoQuery): + format = message.content.format + format_args = message.content.format_args + else: + format = "netcdf" + match format: + case "netcdf": + full_path = os.path.join(base_path, f"{path}.nc") + kube.to_netcdf(full_path) + case "geojson": + full_path = os.path.join(base_path, f"{path}.json") + kube.to_geojson(full_path) + case "png": + full_path = os.path.join(base_path, f"{path}.png") + kube.to_image(full_path, **format_args) + case "jpeg": + full_path = os.path.join(base_path, f"{path}.jpg") + kube.to_image(full_path, **format_args) + case _: + raise ValueError(f"format `{format}` is not supported") + return full_path + + +def persist_dataset( + dset: Dataset, + message: Message, + base_path: str | os.PathLike, +): + def _get_attr_comb(dataframe_item, attrs): + return "_".join([dataframe_item[attr_name] for attr_name in attrs]) + + def _persist_single_datacube(dataframe_item, base_path, format, format_args=None): + if not format_args: + format_args = {} + dcube = dataframe_item[dset.DATACUBE_COL] + if isinstance(dcube, Delayed): + dcube = dcube.compute() + if len(dcube) == 0: + return None + for field in dcube.fields.values(): + if 0 in field.shape: + return None + attr_str = _get_attr_comb(dataframe_item, dset._Dataset__attrs) + var_names = list(dcube.fields.keys()) + if len(dcube) == 1: + path = "_".join( + [ + var_names[0], + message.dataset_id, + message.product_id, + attr_str, + message.request_id, + ] + ) + else: + path = "_".join( + [ + message.dataset_id, + message.product_id, + attr_str, + message.request_id, + ] + ) + match format: + case "netcdf": + full_path = os.path.join(base_path, f"{path}.nc") + dcube.to_netcdf(full_path) + case "geojson": + full_path = os.path.join(base_path, f"{path}.json") + dcube.to_geojson(full_path) + case "png": + full_path = os.path.join(base_path, f"{path}.png") + dcube.to_image(full_path, **format_args) + case "jpeg": + full_path = os.path.join(base_path, f"{path}.jpg") + dcube.to_image(full_path, **format_args) + case _: + raise ValueError(f"format: {format} is not supported!") + return full_path + + if isinstance(message.content, GeoQuery): + format = message.content.format + format_args = message.content.format_args + else: + format = "netcdf" + datacubes_paths = dset.data.apply( + _persist_single_datacube, base_path=base_path, format=format, format_args=format_args, axis=1 + ) + paths = datacubes_paths[~datacubes_paths.isna()] + if len(paths) == 0: + return None + elif len(paths) == 1: + return paths.iloc[0] + zip_name = "_".join( + [message.dataset_id, message.product_id, message.request_id] + ) + path = os.path.join(base_path, f"{zip_name}.zip") + with ZipFile(path, "w") as archive: + for file in paths: + archive.write(file, arcname=os.path.basename(file)) + for file in paths: + os.remove(file) + return path + -def ds_query(ds_id, prod_id, query, compute, catalog_path): - ds = Datastore(catalog_path) - kube = ds.query(ds_id, prod_id, query, compute) - kube.persist('.') - return kube +def process(message: Message, compute: bool): + res_path = os.path.join(_BASE_DOWNLOAD_PATH, message.request_id) + os.makedirs(res_path, exist_ok=True) + match message.type: + case MessageType.QUERY: + kube = Datastore().query( + message.dataset_id, + message.product_id, + message.content, + compute, + ) + case MessageType.WORKFLOW: + kube = Workflow.from_tasklist(message.content).compute() + case _: + raise ValueError("unsupported message type") + if isinstance(kube, Field): + kube = DataCube( + fields=[kube], + properties=kube.properties, + encoding=kube.encoding, + ) + match kube: + case DataCube(): + return persist_datacube(kube, message, base_path=res_path) + case Dataset(): + return persist_dataset(kube, message, base_path=res_path) + case _: + raise TypeError( + "expected geokube.DataCube or geokube.Dataset, but passed" + f" {type(kube).__name__}" + ) -class Executor(): - def __init__(self, broker, catalog_path, store_path): - self._datastore = Datastore(catalog_path) - self._catalog_path = catalog_path +class Executor(metaclass=LoggableMeta): + _LOG = logging.getLogger("geokube.Executor") + + def __init__(self, broker, store_path): self._store = store_path - broker_conn = pika.BlockingConnection(pika.ConnectionParameters(host=broker)) + broker_conn = pika.BlockingConnection( + pika.ConnectionParameters(host=broker, heartbeat=10), + ) + self._conn = broker_conn self._channel = broker_conn.channel() self._db = DBManager() - - def create_dask_cluster(self, dask_cluster_opts): - self._worker_id = self._db.create_worker(status='enabled', - dask_scheduler_port=dask_cluster_opts['scheduler_port'], - dask_dashboard_address=dask_cluster_opts['dashboard_address']) - dask_cluster = LocalCluster(n_workers=dask_cluster_opts['n_workers'], - scheduler_port=dask_cluster_opts['scheduler_port'], - dashboard_address=dask_cluster_opts['dashboard_address'] - ) + + def create_dask_cluster(self, dask_cluster_opts: dict = None): + if dask_cluster_opts is None: + dask_cluster_opts = {} + dask_cluster_opts["scheduler_port"] = int( + os.getenv("DASK_SCHEDULER_PORT", 8188) + ) + dask_cluster_opts["processes"] = True + port = int(os.getenv("DASK_DASHBOARD_PORT", 8787)) + dask_cluster_opts["dashboard_address"] = f":{port}" + dask_cluster_opts["n_workers"] = None + dask_cluster_opts["memory_limit"] = "auto" + self._worker_id = self._db.create_worker( + status="enabled", + dask_scheduler_port=dask_cluster_opts["scheduler_port"], + dask_dashboard_address=dask_cluster_opts["dashboard_address"], + ) + self._LOG.info( + "creating Dask Cluster with options: `%s`", + dask_cluster_opts, + extra={"track_id": self._worker_id}, + ) + dask_cluster = LocalCluster( + n_workers=dask_cluster_opts["n_workers"], + scheduler_port=dask_cluster_opts["scheduler_port"], + dashboard_address=dask_cluster_opts["dashboard_address"], + memory_limit=dask_cluster_opts["memory_limit"], + ) + self._LOG.info( + "creating Dask Client...", extra={"track_id": self._worker_id} + ) self._dask_client = Client(dask_cluster) + self._nanny = Nanny(self._dask_client.cluster.scheduler.address) + + def maybe_restart_cluster(self, status: RequestStatus): + if status is RequestStatus.TIMEOUT: + self._LOG.info("recreating the cluster due to timeout") + self._dask_client.cluster.close() + self.create_dask_cluster() + if self._dask_client.cluster.status is Status.failed: + self._LOG.info("attempt to restart the cluster...") + try: + asyncio.run(self._nanny.restart()) + except Exception as err: + self._LOG.error( + "couldn't restart the cluster due to an error: %s", err + ) + self._LOG.info("closing the cluster") + self._dask_client.cluster.close() + if self._dask_client.cluster.status is Status.closed: + self._LOG.info("recreating the cluster") + self.create_dask_cluster() - def query_and_persist(self, ds_id, prod_id, query, compute, format): - kube = self._datastore.query(ds_id, prod_id, query, compute) - kube.persist(self._store, format=format) - - def estimate(self, channel, method, properties, body): - m = body.decode().split('\\') - dataset_id = m[0] - product_id = m[1] - query = m[2] - kube = self._datastore.query(dataset_id, product_id, query) - channel.basic_publish(exchange='', - routing_key=properties.reply_to, - properties=pika.BasicProperties(correlation_id = properties.correlation_id), - body=str(kube.get_nbytes())) - channel.basic_ack(delivery_tag=method.delivery_tag) - - def info(self, channel, method, properties, body): - m = body.decode().split('\\') - oper = m[0] # could be list or info - if (oper == 'list'): - if len(m) == 1: # list datasets - response = json.loads(self._datastore.dataset_list()) - if len(m) == 2: # list dataset products - dataset_id = m[1] - response = json.loads(self._datastore.product_list(dataset_id)) - - if (oper == 'info'): - if (len(m) == 2): # dataset info - dataset_id = m[1] - response = json.loads(self._datastore.dataset_info(dataset_id)) - if (len(m) == 3): # product info - dataset_id = m[1] - product_id = m[2] - response = json.loads(self._datastore.product_info(dataset_id, product_id)) - - channel.basic_publish(exchange='', - routing_key=properties.reply_to, - properties=pika.BasicProperties(correlation_id = \ - properties.correlation_id), - body=response) - channel.basic_ack(delivery_tag=method.delivery_tag) - - def query(self, channel, method, properties, body): - m = body.decode().split('\\') - request_id = m[0] - dataset_id = m[1] - product_id = m[2] - query = m[3] - format = m[4] - - self._db.update_request(request_id=request_id, worker_id=self._worker_id, status=RequestStatus.RUNNING) - # future = self._dask_client.submit(self.query_and_persist, dataset_id, product_id, query, False, format) - future = self._dask_client.submit(ds_query, dataset_id, product_id, query, False, self._catalog_path) + def ack_message(self, channel, delivery_tag): + """Note that `channel` must be the same pika channel instance via which + the message being ACKed was retrieved (AMQP protocol constraint). + """ + if channel.is_open: + channel.basic_ack(delivery_tag) + else: + self._LOG.info( + "cannot acknowledge the message. channel is closed!" + ) + pass + + def retry_until_timeout( + self, + future, + message: Message, + retries: int = 30, + sleep_time: int = 10, + ): + assert retries is not None, "`retries` cannot be `None`" + assert sleep_time is not None, "`sleep_time` cannot be `None`" + status = fail_reason = location_path = None try: - future.result() - self._db.update_request(request_id=request_id, worker_id=self._worker_id, status=RequestStatus.DONE) + self._LOG.debug( + "attempt to get result for the request", + extra={"track_id": message.request_id}, + ) + for _ in range(retries): + if future.done(): + self._LOG.debug( + "result is done", + extra={"track_id": message.request_id}, + ) + location_path = future.result() + status = RequestStatus.DONE + self._LOG.debug( + "result save under: %s", + location_path, + extra={"track_id": message.request_id}, + ) + break + self._LOG.debug( + f"result is not ready yet. sleeping {sleep_time} sec", + extra={"track_id": message.request_id}, + ) + time.sleep(sleep_time) + else: + self._LOG.info( + "processing timout", + extra={"track_id": message.request_id}, + ) + future.cancel() + status = RequestStatus.TIMEOUT + fail_reason = "Processing timeout" except Exception as e: - print(e) - self._db.update_request(request_id=request_id, worker_id=self._worker_id, status=RequestStatus.FAILED) + self._LOG.error( + "failed to get result due to an error: %s", + e, + exc_info=True, + stack_info=True, + extra={"track_id": message.request_id}, + ) + status = RequestStatus.FAILED + fail_reason = f"{type(e).__name__}: {str(e)}" + return (location_path, status, fail_reason) + + def handle_message(self, connection, channel, delivery_tag, body): + message: Message = Message(body) + self._LOG.debug( + "executing query: `%s`", + message.content, + extra={"track_id": message.request_id}, + ) + + # TODO: estimation size should be updated, too + self._db.update_request( + request_id=message.request_id, + worker_id=self._worker_id, + status=RequestStatus.RUNNING, + ) + + self._LOG.debug( + "submitting job for workflow request", + extra={"track_id": message.request_id}, + ) + future = self._dask_client.submit( + process, + message=message, + compute=False, + ) + location_path, status, fail_reason = self.retry_until_timeout( + future, + message=message, + retries=int(os.environ.get("RESULT_CHECK_RETRIES")), + ) + self._db.update_request( + request_id=message.request_id, + worker_id=self._worker_id, + status=status, + location_path=location_path, + size_bytes=self.get_size(location_path), + fail_reason=fail_reason, + ) + self._LOG.debug( + "acknowledging request", extra={"track_id": message.request_id} + ) + cb = functools.partial(self.ack_message, channel, delivery_tag) + connection.add_callback_threadsafe(cb) - channel.basic_ack(delivery_tag=method.delivery_tag) + self.maybe_restart_cluster(status) + self._LOG.debug( + "request acknowledged", extra={"track_id": message.request_id} + ) + + def on_message(self, channel, method_frame, header_frame, body, args): + (connection, threads) = args + delivery_tag = method_frame.delivery_tag + t = threading.Thread( + target=self.handle_message, + args=(connection, channel, delivery_tag, body), + ) + t.start() + threads.append(t) def subscribe(self, etype): - print(f'subscribe channel: {etype}_queue') - self._channel.queue_declare(queue=f'{etype}_queue', durable=True) + self._LOG.debug( + "subscribe channel: %s_queue", etype, extra={"track_id": "N/A"} + ) + self._channel.queue_declare(queue=f"{etype}_queue", durable=True) self._channel.basic_qos(prefetch_count=1) - self._channel.basic_consume(queue=f'{etype}_queue', on_message_callback=getattr(self, etype)) + + threads = [] + on_message_callback = functools.partial( + self.on_message, args=(self._conn, threads) + ) + + self._channel.basic_consume( + queue=f"{etype}_queue", on_message_callback=on_message_callback + ) def listen(self): while True: self._channel.start_consuming() -if __name__ == "__main__": + def get_size(self, location_path): + if location_path and os.path.exists(location_path): + return os.path.getsize(location_path) + return None - broker = os.getenv('BROKER', 'broker') - executor_types = os.getenv('EXECUTOR_TYPES', 'query').split(',') - catalog_path = os.getenv('CATALOG_PATH', 'catalog.yaml') - store_path = os.getenv('STORE_PATH', '.') - executor = Executor(broker=broker, - catalog_path=catalog_path, - store_path=store_path) - print('channel subscribe') +if __name__ == "__main__": + broker = os.getenv("BROKER_SERVICE_HOST", "broker") + executor_types = os.getenv("EXECUTOR_TYPES", "query").split(",") + store_path = os.getenv("STORE_PATH", ".") + + executor = Executor(broker=broker, store_path=store_path) + print("channel subscribe") for etype in executor_types: - if etype == 'query': - dask_cluster_opts = {} - dask_cluster_opts['scheduler_port'] = int(os.getenv('DASK_SCHEDULER_PORT', 8188)) - port = int(os.getenv('DASK_DASHBOARD_PORT', 8787)) - dask_cluster_opts['dashboard_address'] = f':{port}' - dask_cluster_opts['n_workers'] = int(os.getenv('DASK_N_WORKERS', 1)) - executor.create_dask_cluster(dask_cluster_opts) + if etype == "query": + executor.create_dask_cluster() executor.subscribe(etype) - - print('waiting for requests ...') - executor.listen() \ No newline at end of file + + print("waiting for requests ...") + executor.listen() diff --git a/executor/app/messaging.py b/executor/app/messaging.py new file mode 100644 index 0000000..37ce25a --- /dev/null +++ b/executor/app/messaging.py @@ -0,0 +1,45 @@ +import os +import logging +from enum import Enum + +from intake_geokube.queries.geoquery import GeoQuery +from intake_geokube.queries.workflow import Workflow + +MESSAGE_SEPARATOR = os.environ["MESSAGE_SEPARATOR"] + + +class MessageType(Enum): + QUERY = "query" + WORKFLOW = "workflow" + + +class Message: + _LOG = logging.getLogger("geokube.Message") + + request_id: int + dataset_id: str = "" + product_id: str = "" + type: MessageType + content: GeoQuery | Workflow + + def __init__(self, load: bytes) -> None: + self.request_id, msg_type, *query = load.decode().split( + MESSAGE_SEPARATOR + ) + match MessageType(msg_type): + case MessageType.QUERY: + self._LOG.debug("processing content of `query` type") + assert len(query) == 3, "improper content for query message" + self.dataset_id, self.product_id, self.content = query + self.content: GeoQuery = GeoQuery.parse(self.content) + self.type = MessageType.QUERY + case MessageType.WORKFLOW: + self._LOG.debug("processing content of `workflow` type") + assert len(query) == 1, "improper content for workflow message" + self.content: Workflow = Workflow.parse(query[0]) + self.dataset_id = self.content.dataset_id + self.product_id = self.content.product_id + self.type = MessageType.WORKFLOW + case _: + self._LOG.error("type `%s` is not supported", msg_type) + raise ValueError(f"type `{msg_type}` is not supported!") diff --git a/executor/app/meta.py b/executor/app/meta.py new file mode 100644 index 0000000..739ef62 --- /dev/null +++ b/executor/app/meta.py @@ -0,0 +1,27 @@ +"""Module with `LoggableMeta` metaclass""" +import os +import logging + + +class LoggableMeta(type): + """Metaclass for dealing with logger levels and handlers""" + + def __new__(cls, child_cls, bases, namespace): + # NOTE: method is called while creating a class, not an instance! + res = super().__new__(cls, child_cls, bases, namespace) + if hasattr(res, "_LOG"): + format_ = os.environ.get( + "LOGGING_FORMAT", + "%(asctime)s %(name)s %(levelname)s %(lineno)d" + " %(track_id)s %(message)s", + ) + formatter = logging.Formatter(format_) + logging_level = os.environ.get("LOGGING_LEVEL", "INFO") + res._LOG.setLevel(logging_level) + stream_handler = logging.StreamHandler() + stream_handler.setFormatter(formatter) + stream_handler.setLevel(logging_level) + res._LOG.addHandler(stream_handler) + for handler in logging.getLogger("geokube").handlers: + handler.setFormatter(formatter) + return res diff --git a/executor/requirements.txt b/executor/requirements.txt index c4a403b..f188e90 100644 --- a/executor/requirements.txt +++ b/executor/requirements.txt @@ -1,7 +1,4 @@ -pika -bokeh -dask -distributed -intake -pydantic -sqlalchemy \ No newline at end of file +pika==1.2.1 +prometheus_client +sqlalchemy +pydantic \ No newline at end of file From 09353264f99fc3328b6ab2f6e4514193b6faf693 Mon Sep 17 00:00:00 2001 From: Marco Mancini Date: Thu, 11 Jan 2024 09:19:21 +0100 Subject: [PATCH 04/31] Removed old geokube packages --- geokube_packages/geokube-0.1a0-py3-none-any.whl | Bin 86682 -> 0 bytes .../intake_geokube-0.1a0-py3-none-any.whl | Bin 13930 -> 0 bytes 2 files changed, 0 insertions(+), 0 deletions(-) delete mode 100644 geokube_packages/geokube-0.1a0-py3-none-any.whl delete mode 100644 geokube_packages/intake_geokube-0.1a0-py3-none-any.whl diff --git a/geokube_packages/geokube-0.1a0-py3-none-any.whl b/geokube_packages/geokube-0.1a0-py3-none-any.whl deleted file mode 100644 index 99341a80639a5b9ae3ce0c2089ced3901880a684..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 86682 zcmZsCbC4*Hj_%mDZQHhO+nzaN+twM|wr!hdY}d-rWurT^&abW-Vb@FlGv z4Ge+;00001pq!(w!XcB8PX!1700ayGfc*EWxv9PNFC$ZWeSJ$iOBa28ItP!KL|MB5 z28588EFz>#HKBGvbj9~Zz-Fp&+6;~)14FNcI}$2hA6aZ|fuNjXHT5D(CE|C$-x$coH$n;gY3JM|!csHe+4u;2OyC$@M;___g5vam_AWke%5e9du~ zQFS@ND2S`vevg4{18xC4J|X4tUyI`KB?iI67QF<`NAEHofb)m`aq~KXu+>W5mYb7! zQ@Olic_ga{+6?phgwzQ8W%`=WOH3|a)derHw)XyM1kz69MF>C_lQ!0ZBNxxGK@Y-h z7<%|Eun|o_4``9NRP(?&QZKFGrMes}vDGp&`!?O2t{m-xp6JFO#&btE!0G;wl_6q-DTIfl`5Wc@AmHkWq82rCX z>tgEcqHphDYWLT+($o{PsuJ?kvvjgjs$)~MWAs#tWAqZ!QSy>Bk}_aWPL2V^Hj1LG z$x`0T4=|PY4@$Z?ygo!v_lYSi?EwERZ^(KhF{OJD+tC69z>0doe75l9Q8DP3Us72)DAm99j;JGi`P)F>HLCDo2 z8`sbpl(0s=edHDxCW0C78y~kDFL^}K>ckYX#E)8>WxPhpJY(@p$k6E=#B&F9`lIqe zD|cCY@?~(SIFi~hrZoCLM2IW0No8`(7$AYJ5Plml$D?Z&ClBLcjaV;=KZ{{SOq)jA z3X6eoo>pSU#RpFo2I6!7uwBLy#P1O5d4-|zUJck_Wm$My#t0009Y008rUF|eJfi?NB>UlVU>*v4(LA$?!# z_q+3pK&kA#FNeTOuuEbST%mP$4+4$|<%>?BsiFB4HR%E5qY0FmtpHt#&V|e@E}N-n@pDe}An} zyQ~WPn?kuzD|@oZc4FdHwb?syW2}l`c>oK$9B;E(W|hY;x!J!p;hEl$sj z9M*dDi9~=3-$`PN*!iS+<$1hWQ~`5S(~{l+zP;fkFzmD${j?c!r4Vg(l6K(sLQ;xl< z$WJ4$eG4>9m^-8K?OYkaJb$Iuf`q03OtA!b38k#DXI3m^uJF3e?~Tb0_UyWLy;k_5y43L>C-_b~6%A|if3bw8!@v3aqg|4T> zaHCG>=6LF17ay}Jl5Q{g|D;w<%S}IqKiEli7nW# zeQjlSf)a49I*|j;swzZrF5~VAHs~~`7@S(K*c3kzQcp-RNuV=yZaULiLV*i%Pl(+{ zXfv;L8m>gpT4N1>Lc?M_G2`hO+Llf08AuK zuBcrueuMuFPDO5zz zbaA9o%kMIPq8boM2_()R6B`Io33$^|`0Nrc6IFGv2c>L{Tz4A(UZKqMuq08LOFrN2 zbzh!n)uP5leaKA-YqJ6@YXfcQRFf|Tcq~}dpF%d1zC@ZRf6>uS`>`aoQ6DqkPeLFC z=1;w7>DOu0rXK9c?*dK_A)J}(l0oeT5y}jV?3%CGs*RhUTj5>IW2`U^M%q`A)}SHA zEpR6&#m1My8WLixVTEWcS3*9YQrGhCf(_*JJ*vqKerBSH{pP(Zy6Xhg-}d|22o;o%&1Ef%Fsz`a^oK1BYk}r`Sa@=#QzJa*k*IYzgi@)kt*R(uJnQG zX&JF$UtC=cfZWW)wjx_wv(am7V;`bqhYIkhuA)q;?e-aRTf+&4P9?!B$rpTzRJCYj ztf1OkSIh`SyS0a2!gKnb$OZZ2 z14I77{91ox_*^L@%cqAej+}O8eia#Zy4{99Z@~P!#{hpc%GV^^(}5$@gDIo+!dC>n z^%NJNC>RdBY*lyO-_x-{MV&v?S@*5+c&4YV_2PA=#XWZD-Bxm#9Li=8Yk`{OjueyH zBE=QuutZmxmHL7ZmX%Bb1UdHDkYGM)fIA4~M==Q9mmz)BpL!e20u7Tw4Qf5@c81p}%p{vu`CM-6xhoGQph09vM$(qz4& zAh-sQLB6Ln{WkG5-fu%EN5kbZ$vNf(^HV|M&CESdL<&qli^hG>pTMt(?RQ5T65*1( zsZk)g!lrb1kG2r5p}$sl0?j0ZoWY-pt>t7X@X`0zMDP3}P!?JdxM{p>aJd}DSnAt@1$YFZBG|T0ccL{SEJ}W}V?1Cpby@9eF2sJ8j`Cw?Tv_snk z(A&XJ^l0cN)$-&>rbqdXs&aki1v5GV1G%6O2bXWE@85C#gZcw3?3Z{qRC&Qwq~}9$ zpYE`}NG$>o)pC>6Y>|t5Ud<4XedQ4M1s4{lGGZrjHIxptqg*U#^kmIz&c>m<3%ikY zW8BWltLC3u9nG0@P9LtBa)Wbjk8p-|q5O9Z1IAoAFiw~Ir@JAPfScr*+60XA`BU3@ zx|>Lgw6j01w&I`9?s#_BbS0H@DiKCh)=160`P_F0=;W+IRTN^~V=jr^c;)JZk339a z{U*Jk<>x<43#gD+)(lbH3J{|6H*vHLWDsqOtb@!m(CCAezJQ}J!e;lYf)Kypn+xD; z9gU;a@#MG&Y5{qC&rJOshfRN2+kvzArLcZL|Fa~JllWl+00jUDf&JfWE>A-zr@sY3 zwz{qTCL6*(1wp%hk&t0h$(I5=R%(l!8Uej%;`3mjKpyD?Ax#pNB&}=E=gXGjQe`%3 zrvKXgox9uY)*!c7m2@$!qFJJKGnuP4R69vQYR;Ig$V^B*V!HupjmTj|laQpE$YBc= zG+u%|lCM0Ouk^@NB;8sfgK&c=amsSXp8|>0m@*S27ezI*m?@l-5|15*>ff7<^D&p; zW9J+pO*X^jiYaoO)m0QS$W6z7&bID})ZJz+YNIhr8pZ30?4G$OYX}QjdAaQ?Qmq83 zI@J{+t5hYbjfr;bGX>c%QuzhHR3=K-gmsiJ zyzcGqd!n5B9_c~k?)t<@jnK3;j%R3x-o(70X!-69CykTPrZbYfAt#2+>oP!|EeN99 za%3X7`?9BMCIt3_r*KM+I4AO0sgVs{lBd7snXEX`nh$BWOgdK7ES|A#EQ^_!<7F*pV$9u91z`qjO8dOtLM>{$+{hLxO9Nz@`F@XbisM6(io`)8GcbIBVQjrDQ_Zw_v2GX&1;9J=3)lHVz$M z>{t!(ga*pamp(RllhPOx&toH9^OD@?oS$4>xe*B#Y+F2&#oXm-;%R9ntK8+B>~H1) z|0W#1m9tvwg{Y*44doY?{$hi>NW!^miyQ12OzcY9qV-HvW9!3;yY*+vDj~=X%)Sps)~Sb_*DH z80W%BZm=@NIdiT<95?Y}2#Ue64?267%2L0YENn-Vzi&#k)m;2!e z9*%6GuI_0=abSPEN{7G^dO$QAzqpqG=Zsrh-U1*J=?FZ}w<8>h48D7ZQ(Z?)3W}d9 z&)wN!I=1G57T45xHWNzvvjB5GqCp)LB#v7{;F;P-DRmF4U^uuAm9!eeKzhru;#!!2 zRO#MVP9ONuY-y`a$WhG~a+A1<91a1+NqL0{8?iiST0h6_t;dGua0K3`j)BUZw}paXom`XMUc;pEll`{DEC3nyncA7^K}{&wPG zrS-kX_uVGu+xyk~#q#v$$VJcj)rbp}%n3(}raWZsu~H(fZwUWtN|l*E0}f?JNwb?4 z1V3tq>shA6Wv`pSS8FLaG=fTqsF9#5#Vc0o-Y~=uZx7vvgRMr|TuhKd++^nvDQkZb zn^K;6U?DhXCRfho|nmbdUvU;F$PaA0=M-48FE%&%+4S+etz4|qkeTI^F{S`Aue&%tK@_7jrs z)QwAU*sjuSt4sLirbR0|*EOEJ94%8-sIHKJ)u_A@;2nZ&Tc#eE>$OYODm@@Jlx3}0 z=+zDU=tnah_C?!tM{uU``?i?{Jg+v5-tM{$Fg$J@y`JZvESTr${E^{)w5Yvi3_12;C>@8l0r08d>UEB9ykAP1@x^a4M>YWJy)!tFf1oXMwLb(GT@< zN@_q(YcZoYG3MJ-98RkkQ8K?fQafTzdRgcys@56*u$!0$J*nI9f;sfe{F=vBpbX42m>*t$um(IHvCianw0&(NrLiufoJj(DrhTQ*42wzCbAI0zyGu0y|B9T69qKKpf^>PR&yM zdkCAktwyF&nv4~j6-raS;V}+1kqkQLZIrp<9z9=+qjhD?&z0@bGZ&S%TP`}HB4ug8w%nwK z)>}fga;eFxrE>^4KX=;=4s^l@yp8o}PYT9xrgK?en|FUa%n_`|>W49(5;Z7tm`{{?K#NYc=$ zImpPrOjnIfHSQ|`1ST)Bg#4M}iP>MuVv!9;fS{lbe(ikKraUB5BP*sNF)t!Br#2~} zBuPz1F(pe$Eed=zJuWWkV?`-WO-4JS^pBp8qL-$bkdc{?Tn7GKH6Bt&Q6H#=1}H1m zKtbDB)&{agRmD{ZYc#%4VvR8g`ft#bL#~DjPEPKiG8h2B^$G1RKCUW;}cdSIo@(; z))&ndKX&7NGT@wl9EeUKds0jVDyb|IMT!n(IMOx{=|skzeU(9w-7bc#h6cA9X5Lw` zB0a&>x@~-c;nU|7K+YL4kD5Xyw(4Ha$k?H4WtKI1n$sV&S~noH%Q}EmEi$@5Mp`&g z52R(AmZUVV(*fI%fiy_j%%FMYpE%r<70+^_DHfdc>Hui1ZdShd28#J9R9<|UlM^_B zsm3v0rj?u&e7|U2@U4}5tbtfAw-`7(ku|m$Sa$Tjgq2OBlHgX76@_W)FI^f)MMNP7 zIzE9Dm0Xn?C;=%};QY8GcniHEh#ocY^k<8O+>=tnMS(=b`n|PB*w-Jv2G+xnrk3fifx&q5w|i$3EnB*HR4&cAS1Iln%hc}HEUKOYF7WN7T~D}7~cyR%V}TXku6 zIt^|g!>G-^)8DCPYzV`D?x12Y)1b>@?qxB<^4`99CgV&)dR|?IQ$i9-rbd?JwIUMB6U(r|Xs z)~D8y>r&O817llXYkJ!@EDMo!FA2pnC_MNP~*}q1=xhN9VC7I$R!( zT#;kVys?+-&+Dd56+C-SdJKLTwp;h4;0@K451!5rJU<4C-h2TF|3=+c_*r0n9k3rX^qmBgOKFCN3h&BmSrZ}J2^y?Way*aefUFSv|iliD**&kNR5P2D6|rz z6lpAC4yRC3-<%Pe_c14|Na&&t7S)!Tcpp0-0gj{8sj}FTB5}@hGkSt zXdkMALV2EZV_caCdsc!~VF|uQ;+5ml5yes(Z?}T-P!C_B7+TC*xqGKYLE#O1ipC8w zX%UnL7t^j2beB2ha@s9TpXu=}(~rCE~qx~dBEq_9pl+d7P~{m>QxzL#VwT_P{Cr1Z|3~I#=eOvM zac@BWaTpw}UVC5ci*r*B@s;LpG!z>%O)KUHY|U0OGh};PIG5_w$Ma|2EUzmd1Gu+8 z3}Bnj1}vp0k^Hj4wE}6Dmpe@=tJ?-Hh#NLI>K`3|r!bfAA2VZt*=J<6BgPD3tBRx& zYsrIzW~}{J%a$s-kzB_9`bs% z63dC0gGJTQf+>AK9Du@S5&!7sQy$M%J)T)=lX&_Y@D8045`SW!N@R)Gu~DK1wtq>O z3%nHVsYd4nwJk$@P&(H9M%t3gNT_CyLEi?qiD`DgFy;+=C=VIKnfP(JVlAR51xa+OC76bJ4riN&`zAWFC24WPDPC(y+ZfUi@C%KY zrzv6|tmE2jLz{P>t#B6?ZDLLxmgl}9& zvz^?@&*YW~dEQ!ww9(}uQ(`qCKTLByD)>szY*&ZJBd+duRu`|S(yO&4e-ESy;ofg~ z&j#;3%iD|^PJ%ykwmEP4I2;6{o=Kpc;eP&PE2{)$-^>*kk2OD^ui|#(m^yW?VtjoTP-uc2bIfMt-CH2+0e@zQn>G=q7Jyg2e zCuD`kBS<;|&M*ok1L^5dX`eD=I)ksn0gK8}EPN-3od4EnVDIc+KeWSwca;cuntu=; z^ylJBFPQ25&$OGbTtw#gH+`=CrGGCw|CNM{?0?yr{F8=kV|bwl1Ykm6J|pMOpp+a? zK@iQwR~6#UvYxdVjbj%{!iUucc^ziJ>xUrI~ zn=}9X$oBlF%3z<}E82hI1<}7FC7i!%|3{@@Y-(epZ)@seVgE11Q`EQY57=P*X7vS& z=-JLYXu&QSfHlK(?FH~_z+t*ei6m6CEnQ9|a*7{;MSorr8f>In7WHBP>O~^)`IShz z%a`HQDyO$Et$VZr)>JFl`ifLSG4qRZrrCy!bEpY#wBJ+qSUagbfObfkrp@g1uiciJ z8DJZnPLyK3I(T1FGOMZVa;W(-r|%OjVRJn+**4cP;HE>L~n&&>Q+n=pmN|hcw^wcEOdxLe?jQ=Qzq!8A0)Lyd(HIn3ou;Q8Vi8gcsPmv#1%sb)fqe28!GWCgkgAwxm9w8pMU&#I!g1asp-`T9jG#gwzikBKomFn zz+&P^i@)9$YWM{e@$7ee1oQ*X+)oBT5j5212JgH9`74J2H|gUVd=|D0ma>9@mUVmO zSz`KbMLIliOpH$sg>&Ix#j#Hk6Cd_dm*d;@WG(`%BQ;Yki~|>qwo>=Gt3EFF5PaUT z28k}U5cw43(=b)x@+X>sosP2#(JeoJ)m3!Ay40M+v7Zfp(B-=2C`W* zVjq$iZ)AW}e0Kh0%S_w>3s$c@Qo0Hg)Smrb$#VN@j4DOuU2L}Vok2Xasx$yG$#tJ= zSX)PA`-~%gV|&9gpLiDN_tv^X(JyzcZHy>AEoT}ORM{v@z%}UzFr@uKB%vWM#JfMH z+V+kbB!3z}OEp0y8<7{@RzZ+f4JU$4J-K|x!_84*87Beu82_RrEL*~4CK{gX8^V~` zC|Fz3Sv!tmA=mmfFU7Q0us|`keRZbLgN_@0u28F&Shcu}R!}H(z#-xe&G|?XSv7h! z)yb>|=BtrZ0oiVKJR^{dQVf^mPaaEwA^zcFtfYZ@E3#J_n-k3?fh};*Sl1+T&G=GK zx+w!94=`O*1v5$A&bMrZ>8|lKQ*x+A#7^833KACd>Y$+qa=Z?`{G$5n%xqqU+$eD3SfmYsVBsv!YhZBVco8|m zW4O3en9%3`^aUc8xGjq~wr$v7Xnj3G@RSQt2){bOTS;fY#@&P|_OdRHg@ckXX8bP4 zi=84gQI2O71l>2m?$7Qnpx;=k;zw_}O@gF5vt*JS<-Qv}>-+NY56&N{H3Wgxmf~7Y zHaqZ9Fpcj#2|$ItUr+rznW4meww6xHYFk?mtT<`xCOLeWJp%$Gc44uA_PFOm5*NUoi#B4EzcA2bmzqWS`^4?MWyuP=ZXG4W zP+bh67u+{AT-XVI-#hQUZ8C#AP<0%Esd20AFW_rDRlJqm9p9mtUcFtdFTe}&%X{TA z4Bj=Oim}psiHS=wj`?6-q_1kp*w8CJvk;2?C14Yw?k#>P*r=#^(Tz7k5YntK|<|j8~ zLtt?lk!3v0o$iP+-l6wc$#}eZRonjMaHBfC22Kdicsn@D42O9sz5dtR=c;*f<>#i? z-Q(runKc?|GPSfak(}RyWQKU(P_%9dW@pw2;~XMMp~+Ba7eoQaOAATZm`Q3ZsCk+u z!||%+bB-?RFbTi!tpN(E`XV&F%qTq1?B=-(m$v+?X|jlDG8tzoR*5BBbYk8vP3{pW zKCz<=dJH+`lGZ1(IQnuFN$zc3Jb!Hv4dBc@tr2k>#3at;p?+yEK&y{?8oDL|nz5cv zv2a37x@;Yn=|UQ^33+%c$vna#?EBpS%n==gKI%?H9GCN@(ddgbh?FL#&kHs? z;Mg$x>7yR#6RK@+!hDo#c)-ukf}BZ~f=tjFjM@HAfsvE;DqCIqZDVF&N$vCvFd}MH zVM@2yR0pu(dzPuin#O!n3pci`O5V~uF7}=z)7UnERvm6c)<8E-W5^FhmA==Kl2IU% zkt1UrUOp{5m6|}%c!I|>k~pQS2K$f{&B+3!LZ81M7$l(3z(rML!g$O_Z9q`?zyB`n zl!u!NCObddn*>^Ii=N{#@P}IEdD|4HOi67i#(c3n%u~56X9qqjswNlF7t6SYb{PZr z%Lru-QqUKWkTgbz7Emm|`!j9eL5a5Ve=;DLE@9DPhVJy4V}N_W!6D-GW48O#u`K&< z!%e8^Fw6erB+F*#`h%Mg@!wy$kTDT|F-{cZ^TUy?eSU%bLA?$AjI1SL%>B9g*vcAB z^GOX(X7SNY({)k8o|o_z3m-}^BAeixOW1c#y4j4ARdS*&P$9wSfI4^z~5 z<->-lC>X+S=7Y4an&o|-M2RNIO}s&r{J30(;V~Tur9_7UWm+37Dz;Hrf(i+a>kH*~ z82QNwv_)Z_ML}O60HfmbV)YZe4)S^m5_>R_#3e0P zB8$vd3Crl%y!idc$J=j;FaE9TW)Od_h;`i)aYM0HoKB7P&hMP-PqZ<9#{r{!Wa6kH zme6bvDI7!f0CD<$(!!wrZRM_TvRvbaJ9TGD37v^d@|xV$m$60csdneh2fjb+)PlKk zFmS;a8c(5Fyd3oHm^Lb9h_dDPS|`MB;p{tH4S8KA4xl-o@GBU=0n=o`{ykL%ZGi6k zJ5&Vbv)(PARkfD4319#uaKf7vkq;?AJjcbWr**yB8f3^B^uPzy$hEU#LYI9UlDj6A zIL!hf@NSIp^>g7n#9T1DgS7@FK!)8xy06Q|7jL%kpu1s)Jdn{xeS{b#jtbbfvq@hb z#3mOq|6vG!4LuM@h;k4#s#pd5_sgKtq3hFw`c}7BdGTjlsW^IkJnnLG_+^2M-o4NEXYDNe@RNTnTkV;zZS8ot9TJCZat*;jbtPnmwW(LiULRj*LS}7ktt4 zxc$dKw@P&%F?y2y#ccJIi7ozp;r9G@RVDcD0)Tu|&1M1{%85%XWVPxuYfVvL?nx_l_tvv#B zgVsa9dq|g9MF=>(3s{YpcPgk_0D8v;f-yblO0q@cRUB@>vJyQ2{|HL6*>&o^%uf^` zv>ELX{lEru9^CAlUny3pB++F2`PJCN39i=-qJ{D(0{G62$Vl8#Wf+7I?f|;n zq3%0~+>rrZ5XanL_A$f+8z5pYBxtTiEtv{>xv1a<>X73VY=hR6se4Bd6+WT!oE(rN z>^48KNUv)Z?*q&^!Rui8GL3nxNXjto7o~HpjwdrPDO&-NPbOwG`Iac{5J(NNmzV+n# zTg^Sn$%Sr)j3=6om~NxXE>FSBvl7~JOnh$PKIFHD@Ax;7Kn*p77u@Oo`>tQ!ilo(f z6lKjYnkhb}@DBUlIw=Rpo~cEdfI(qC#-QNyuWr+RhKysl@5l=jTB97?r+V5}2QfI= zQ+Q~6VUA#XQeR=PAdajNtT-wYtie_-@lgQmn%G&?ci3(H`KX5uxl-lZ zP)WqucO#T2Zyrz0$UsdfnVUnQ2SWi|G4vh)^1OAet;nAtMTWkecB_4P`#?sKqqq76 zxI0KhnpC~#+uhRcuPt%LT z7)e_@PEJ<)rFCUoohu(M8!-~3fH}5U95;ZhHrqGSr=uFERfkrfI7$mjfpz&ce z0h>*%EG;aFTmvGSs}cHY(-&cD2Y~rmEZRynpgeJ#4_c)Ep(p5}#*kk)JB7NdxXoeC zia_##enI*5ySaYr?tvs%4=6qoVMnt&sZTw_n9r;i!5>*cWk#9Gm`atTVE9+g73vU5 z@ZB~A6ljZ)qE(PmRK>46R8^zpZt%2|%Zv`32t7+Y>yv^P0uFX4C#+D4$$7fw{hV8# zsRC#S-Rqw)^D`f69Y-;06)S0XkYQ`)Y+A_=8+FXb)zQNkM{Lf}_m)k@rsXs4l+DV~ z^zvq|rJupRa_UipT67sQ9!yu$JyoslTdiu|%3HRGF9E&0Vx7$gu1)XK;`mr>mDz*o zndMEVpTtGCu~x2uKw`@yoh0GhnstofBL$ZjVttNP{~$vznIcl4ig-t()ccn?)SC!E z;?FPF7u-*LeXZ1kERyqN#eq1@+Cg6zExAlL=I}!ikg1}BuM{!K7u%Cj?_1+g`c97K zrA4ye;d^dYnSK4|^opKFR{83%f4DO(qHv&nm0wj0{YrfrNsdqHo;0J5-w?WK*0;Iv ze=sqUh>NOnQg$*3elv3}Xl>6zqnrSN(~4+QW^gG#s%qQFwv}R(IJ+umilmN^n_*uh z*yPZgk672s6@Tn<XhC>4{kF`$J)SRnAFfvAN+ZFgB|WnLOos^Qa`Ralw9aKBMa!mE9QFiO4@ zRp(&?!{VNErp_;OpS`(PBVVh>Mljjf__N`s#0Xbk#Cx`xf@1P zq_!t5Zt62!2Q>??iXdQ&&V~fCNgU~lb63&ba+L|e$%Vc@ftLMA2KkcD03(_(VBckr zuaDG_hSHSE7I~8@uOtbbRz3BCO%kAVkzQvRT6U9`=$haczb}$>#wwRo>yYdQBnQa4 zvntnaUQ;m?hvwgij8C+o686>aLKywjDExj@(zggPyi2a9l1`0bQOkhaP}^^;$;SY8 z;ORv{rgTbVT^;NMEg>>_$lr7B{)K<}yxw2KDI-nYo{=<*y5QYEDQN~>IwQ_ZC*&UI2I<8&)g6#%GS7O@om<%lU^75EIo2=_-fevy~$*$ zgD5%p0mlD}SRr+chC4k^)3|l9NCOfX8+9jFyzJa1gF>~UQgR&_W11_SJ85F}S@_@Pz(GXVx8Lh^|cp*&yV4Q!Vc zX9J*j5+fbLu-|+ryZxczoxe{`WuL_oI>jc7b9;wUyaNsJvaZffdVXqtD*>DfpVjdk zWWXX#_!pIDx%bl@j(+uv2yWpEXU^gWhx4Rd9c3dbjG`VA@L|5+h5gSOv^}e46_dBp zW-o^5`Zk)C;?s+yF@-5$ZC*#r&?LmGEE)-?Z0F}dBLl@k6)#NAup$#mCHM5vZRa0hA)G-5^f=1sP)uT=;TFAJ06lQNU-uMq zuP$GH<0!1HQ~#_z#m2_MxxGC5ZG2eH2Sjd6N{DHXkfPZyGo_{H*gE4aTL@TF8Cw`d zq}Dnw92;86Z77{((VXFgLr(9c7s79dTMrpZ_U56G`^0qeye(7R!eoOX%K-~v?LjFJ zMSf1F5IcWHj|Z5QJuto0Q>t^aao{W>xN^VDxZ=dLEO&sn31S6;XG>LLrfRw{?2{N9 zASIa5p*bd2WP|$F46;nFPT4=V_+{(}VBeq0me9(Y9$$+%$INC|$0j!2@Gi=nfP+$t znaWMw)CtD$YPNHE>#6~wf?^8NSN_^^KRH{dJ7~93ZRXSv>?OGiK>V)82>Jc6PY6ps zA3#i>+r=}|z5DdcO$~IvR!P$xeiW@vG0$Vm)-M~$4?u|6)6L`cTw7D^xqFIt7Zj#F zM$1oFIY>n|Nc9VzP_9I^5(9NgxIQD#Toz27bkCSsX||tg+(h~u`v%|?!ye!{-4f)P zaq)^%{{lerhYdcf{LIVhX-l?4u8K`cNnY7*Gjby$8Sz7!!M9zzo>mRqhl(=B!Y7|X zXWDOa&rP}`<={pestj2ccmr>hRdmTZyab(UPfhr2IR5_J@WE)#{Q!AU-wk4UK+3wo zo;tjPJ0N03-P=l{!2nM^d_P)}&%UnSqNG_AJD1T$A(gUXQsJ=P8SmRrVk$;KTQH zBT%WPUc^?8d%F0=)KnPLS5Pqcx?F5fbrKh~2o@+7h@BQTIB^dYPBQ5$)YdFGG%DGr zBo=9i-|!v9L?<*3bqbSc=?;SLAG@XBjw<3h&Nb=Pm*tuc3BDBawZF{}wkCgBVIiWk zeZU#?04%9+>@Sp`rwfOv784rj4%?Z3 zHW%Ea`{|)v+ApMYgL~=0k{|*GE6o?sxbphkr-`3ROAUh{GRp!fqyZ7=@mzM_5Pg@T%?h0 zH!ro+%stEnHWCPe>p;ximL*$j%G--IA^h$DE*19cbj*p%c%4@$boPbd)C-|W03moL zMhA_Ue@unHtc`*BvtH+<@4iHBvHpIHn}mzKjnK#}+ti*d>thz#@h4E!vXsjc)q-$R z;Obj9=GkS*m3^QqB`pjyLxbe~VaZfM8dL7WDsGnJ9 zt7q&SFSU|V9rB9>NIKYeqAcLRv?Df!yX|Zfz49e58FqTcqClU+O1a1|4ExyE_mAH( zr*o;S(uJqmC| zUERH%?5jQ_OK0*Uws!8K34<17$MA2`Q~lwA%XcN7uK_03z_;a(G;y+Du>H~=^P-NV zcH$`HH7|F+1{^0zLRjy~y6;gcdxT)pjBJ44w_RC&bAzTl>B-h#BbigBwr#&b6%uO> zW`#DsIG0M;`$#4jhUgOkEXxXq8CBogr9JU*&hE(m-4V{r#B)cCrgv8Z&>)ZlB7l&XV1Vbi0O^m4L%uF3HQzCBW+pl+Xop_tv+!ciQJ0NHbhiwL5@pTebIjCM5U_pg zEFRyzt5<7`1#jFn9@8`Pq!(Vlf~6TrX-W>ql$1Ip*a1bXz*=v1wgEy#S~Z(RLsrkq z`#j%f;J5CS0n3dpbJX&Hpb16Ywyp3lSon%9l;%$D{;O-EyGz)_>(-Ec%dt)X7#x3d zRXb9&eAjUD9@ni^cJ0DlvqAfcUbk{-3$B1}lnmMWAqw1x~jy6W|9+xl~W_<&efYZsKh7t^pDAee4VV*#td?3en<2fdOU^q0ai zsXbyKnn*YsmTm&OBROWwvXN zFr2J)*|URove4yTfxzh@iGOI0o7rmbrxizU?aPn_2AaMg#8NlRaLFPzK2BG+<(iqV zHBHl`>-Dy$O)86ZteK5`&bTt%y1j|TPj79IA@T5t0>aN_t?+&;fec(Ozd*ipq0i)mt;rIn4Bo3O}%K_M8UTQ2_ z&*frsW{LhunUk~N$QjEA!Z0G9-wO$h7eO8y#gB(AqDv8CD}s`h^RxNX@6DkvN3nu z`sBUXRx7S(R<@hxoGa8cAiWpS55(#=L8f}nW6u$N^8yW3;6dw5e?>$0Y_(4 zy@VMf8@{TJ$%Jfeo9sm~m=1{+xwL}~q(g=Y_w}%DGHSXT2$r@&jqxxI$aIxMwYwK8 zX>tL*$axg{Zj@pb_JfW{kIz>|Y$7ZSN1XxPR7jLyME4wZcAdtIpq9?^kL%AGuNS!I zZgFUmTAZ_O`A)6#95b&PGF})SM|#4ghOS2&2w{@b}6t1Pq_#U`WV=6Q*z9c41(`}+gw$k)KvMdOvji1 zyXqgv3;=-j|IOq78U0w=8M^%Qm*q5;?%!$rJ#TN|MVCZ~+SWAgbw7A(8rk)Vz&TBa zrI#qeScA-{6^dkv3AteWk1w;Z5((+V-7_#RP-nvUo&(1Yj*~9xOZ3U6SL$|}=yDyU zH-2?JI$vs6#d2SkZ)E;)xy*w$t5XI)=w)Zy_RwTBxXsF~l(!Fl8U%XXWP@ZB&kT@j zGp*y{D2^{at|UlhTMzU>MP#mq&LI6VJWBFXN?Ju_w6p(6k5lnG-e!5+asmMZOsi~0 zbJN3UFft@d62F$fUL?6FDtpMbwb~!C3)>tK>YhK_8*`Z^orj15f9|FF{oKHO(3eO9 z(uUden_qkK^#eXXzIA-2XGz^Z(_8vVZ*k{-)a6Oj04NI3_KEBk%sThYMSBi7uc-~t zMSVBVl9gLMHp+hYy@{E#{mgnal-V_!Vl0;o(WTT2UOFgvhNrZ((?#0DnG|8Kn zqL=d5L1HgUW(_Zgd(M{Pmp(k4eL{h&bsfGzonkP$PQBW1x}cliCb5EYf23xAzwWFL zUp|KEN2@}N+t}qxKByyP&-Ucb@K4j7kQEdb9uyjhaL&OTth4n#&G4&pDE z0iUl7!8An@=305_;q5FQx}K-8z<1gToP$+52pY2^1S4~KKcUP5yVe{7M8#>MENjj$ zDT46aiX|wNB)}8Tsd>W!P%2=H=9~_wJ=ox$<|c}*J0uw!{#y0G41~=e2bsgqiKG{u zfZZ497_DB`i}LuTY}pJ`l+og{Gbggv!I@C?wc#J|H5HXbOi|z23PEMjXFv(9#*hd8 zIUy$lW2&0mWu;_il<<9)frIgBKE3Oq4mZdM5`CF?e2{7f0AcaCe-8|U zK{qU{DJOZZXqYx1L7r81ef)ry`?8l?CVl@`norvq{N#L94 z#>y*9%go`OsI#v?;b?K8<$>h@!C3`ftKg{S94d}4`c}@eeQT5YSGauN&%4YuXY;7jGP ze2pAlzIw*nV!VWgFdoie#hzIaO+|%IC(e7RT;EScck^c%8b&|$M;GWLGMKatN?b3d z7I=>C!@FMd!vyU%-QsT9d;E4Ru4{&4f^v#PHVFdbHscSwPxWM7*nLXjtE6x@OAinwUAq=MuW zIwXc|S3iSUzKQT-(RMT9qB9$M^5;d}R7-top=l&<={rKN&gpp@^aPz^)v&Y&8Qzotq)dN<|pe81`DOW(cze zTfcvRY`Zj}I2evp0BPo&Jn7M~dLuA#@EH7iA7CF@Z`Enetm(9oU36Zb8^T&9JCs=B zSPD`!m+pI!!e^tfOdZ)Nf$wVAjZwx%V(41pfgcDv5~Yxrx!(^o8Li=d3eoeIM4b9z zkvNh6iIE{O^tCyf=WRz2@S+w_Kdo4L$r*iM-O3I9=JD*{+0@gR9;LhhSs?6BzB8)E zXNg8ctXx3Jk*YXpBFw;s5-!&NVe6cN1q+ufeQevdZQHi$*tTukwrv}AY}>Xqb-QDt zJ0@no?$?YzGIOoAK-H1NPU~#kj5{tABR)HlJ}T=Wkq)Qo4RV z!&y@EgFO!Rtl4#e2YFW!*HZKhxy5WO*z*5M!t4jkuq7)ER}M>os$kFu!1Q7XJopbZ zg|cMkbn~{EDiOxGUMa%B5juii4flK>{(`H)hx!l(?Kk&__YVo6^c;I&4+XF*iRQNY zqxq)Kl<`l^c>m5FP38<5a=-1>V&Uu^8VAuaPZNqEJ5c!6#y-b;9$38i1NSr%a7@xm zkOl|&=eduU{r#!9vd?}lLH>GVUa6g+@Vk1F1i#FEY6qvt-|t^7GK1lzubg8~jr}}U zH3aWo+!d;A#wM*-x8NTa{BGfPSgLuknXXVgg43BOwygUI;y;7F+RpI4^$hB3^bWI# zZndXWw>X58zw=1_@pwkh@RG!YBMDE2adi$)6ckl$9=5HJw;`9(rb;=nfkOgu8O)w$ z+%&UW|J?t(3Hd2RlwO~t*D@7w`*Ho5H59OmSRLexfmz)S30B!9v6*Od@j6e=zCx_5 zlB>f!#;}Z^|4LwZiff4L{x8-NrBrPE*`?m@woo&y7XVr=Qc?IINNUA$(79O6tZ-7~ zjR$Whj8@mc00?)_cDPs3NSiY@uyrT)!oYdecI8u&_$-m%bb)UZP>jcXOL8~BIU9t0 zfBLzXcX3GchVC6|FwRG{h)NSuL!XQL<)!&$YlNmoJ)0$8GZC3tNuY*fy}nuJ-y_O) zv*FhhQjuFnwJ`({Lw&TWaM4)XOHnH4(q#1~BJ2Rqd)E|BXT5qB`(HYfENp}GHHu57 zQwdbGqVi$`@@XHCD+gPFm2{o))pMED=#ynpR7rObrWUP&o!w+ip8348ywK|hzG;U% z1Q+nu)BdfSx-iX*vZ5_F8Xg10h}A!cUkHZasle7tHJ=aJ_$OsVGeHHNA# z)`+gPs&UJ7+Uhm1e}|3hT~M*VQPI&I)La%ju~t$(TXU&O=|(J0Y`dKl*excKO9phc zomwV?=)uHHS`m=XN>QkvulQ#2HVVyV&<>mi?x@&9f^+?*`67yekP69Z;Foad4eu}n zH4#8EnB)j@(3E$fjmZyY`O*Gpf59Qz_|(h@Y2;!yD85-GKs~B2?cS3U>?8(wL$quC zVE+{KMQ1{-JbOoNlK?62BkMonxitODDC(;(w=UCD)n9-lNL9s4 zvyIY5un8>%MD#La>E!#3_)QT#yrt+HYoL`l)DQo>Wcc$2*KV=PEB6W0M-ZO-cv;cj zWYq(ktpYD@Vq$@3=8zJn^O5A4Ei;c^_kdiZW_0iM-n6%F^rp3+zyGZTtZFUuw`Xsi zaFMV*N6Ok-)ZIOu^!#z`w*-g#T?lZI2dHHr36w&j@{Jk{t4(|J8uPCdj$ z=XG@I`;m%#m`D$>Kn7CONr3N}`gB%}I zZRxvR14F{#tAnfwimAJ>N*tV?k0)myiSjH2Wtq#H zGu~O8wBu_KC41}y<-N{Fhlu)vmhS-SRZc)@nZzoyl_;x-O+Sv1DZ(>Pi5t9w1)2Jj zmu|GXPDS^Qh8I!v$S2bi74^Y>cu>rm5CFKCS^rBXsK}v+4?GK*8v-yE*o^+@+x=Y( zG?Wk295A)h=}BZ$GK{4>wy*eGkG8bxc`{9nCHNLfEY`KOW`olMxz-5+qXFMp0{OGb zT#UEk6Hu)FJj7NQ>FO0>r%Zyen-yfz`u&(pVRO6Cx0KlCO8aO#D8b``NEzo|QRn{( z!g3kdtKJg-T9fWLHwmj)gq~zF{fGae)ZcDuIWN(-o6DwvKN4L^HM5)R*zbQFc>fv0 z2+ug$lpz2BND2PISpMHJ%-F!$z~~=E>E95xjj0=V*m}R+7r4xuoziKU7egTtFP$lT zYa3d9TqJ7SwN|d2 zPPVr?OT!EqZ#%kw5Nh^oUQw;tW;0BuKKZ|HVHMPYg zO}15ObMmu59gi~I6^0BS18cN5gD&@y1*pz@g4aqj>vC*yZ>x3sQ|@_F^-Y}(Ml@Ni zI!NyAu!|Aa?qX+YSmyeUd^300jx^wNUDFCU{7L^n{ibQomsuqLNyJ5>=&% z0&#jL$4em%mFt4)4=QRGL&yth$MQpMmB$3zs7PziF2}Be zz~=g`dX@Uo&&~99s&{;*!U7c?>Z&?`I%#}Wd1-lkwNdAu>bw%H9KlLm9+#}BsH!T& zM!kHUGPBWZrlYF)o#-9PHua0=?cQFN&T9%__1{caTo2%8#fD3XZ09bf86^~5S{HSc z{vj^PXva=t;l#npAr@^?1KjBhT!X@7)VG53@P_~(@`d`e;@FF>u0EiF@oX!c8RXeW z@!)VUwD!GD3Tywp#fFb|OwZJ^n&mRaH9i=yl~z6;R}3QzbHq%ucQPfZpC{OKvHEL< zG3@Fh_-F~mKe*%|ASW2P2q1-lJfx`cp~(P%U_!1U(vD}G92tDf++i9faK-NlOET58 zJ;_I$@6sCR_<2yw*40Vy3w5v$e0PzmM-(Xqx2zxF3KlYeF6o?Jft0y?w9%aue`EHb z4~aMB<~6n(uFYgTxlF;DN#_I*9DPaZYfWq;8v0vvfSvfwHVEQeo~jKlhF7|Z zBD@-ozS9c%-kVLh7i)`Da=^L&E=R=t@)Y9woe4DVO=&@mWy%sFxS7ztB1t*zX! zU3aNo5v>E)c>P;;H6n_TkrbJrFvW)L^tYbeU=(cI_VaUNNcubwui0gHS zLIOt(K{0|sFo|J_ZbG-OumD;Ne?<)R4rDUkx+XmykHd)se8cW0nD#e#fGZ8LsWR!< z8;Oo?y!a?QYHU>)Mn zVjVTzuW(WXI*!|3C1yzN*uu4w+?R-B`i`Om1`Dipd1mysW!5FlxqaxTa=pDVjbgV zAc$L6q*3+7mCo5ekm^MG)>yN?8r0%ftUtkm9o~-N$C?2r!>(X37(Io+aCQkfuc*cH zbZ6{B&DuMxC_-TViTW|Oy?eX6PSEhQ&|2lwDuWsPd@&vA-@4!APm3+82< zlk1O>6VR_t79X^v#fE5an|KUzlr&2NLAFkj1&P70#`CBL$3tD^K~Ic9dEdfg{S;Vt zY@h=1l?t9?d$lV)A3GcGF75?xoMX!?mTP=GJ3UTsa)_=|lUg!?v`uUFSk?frY&|&BOz7nF@cJ=oJP8IVVq?H^yjkJN0#TU-9Hu#AuBM4)uzAgsX3tk$Apm`g-H1P!7xOi7|4y)tgdv?=(zkb*J^K3IN71I^ zmQo^cU5CDks9BZxsJz+sKIk(i6$wMB>1Md0&lnWDkwMwpwvk13*;|9w-|Jn!>?$LZ zBZt$`P{d1O{O8`{BkNu&jTWBEvNz3iQY~~6pyZ0^o({AWAoVr5hLtI0{aecc;emrXkPpf?prtln)Op?jQZ~)&Vh-7rjqJHKN>1Tg9U|XiUuADb8 zbWdI0VrartKx&1+Y2fjrL>e|b(5tM^mTu2Ak&{Vbr$d{RuHXB_^XWaed)r^O26o~o zEnyEd9dv=-?}yNX^fvTdV8|)?`SUTN5P77o5$#xm`3ax7qe2h-4!%VJ*zR555)9fV z!agaw2N1Yfm_+(^Nhg019Sn;{5s7XqzLoAJ-KTYMNv)8#sIIf7ehA7B0mjJZ3Rr}9 zQCg_d$&0h*@CkaQohLRj=r=Mmtf%EZ!2fAP{u2f0I`n7V``3sR{cA-2H*)X4B4z(+ zM4U{V|Melqn$mGwY)HMYb$hP<(MJ@6CsfseE~76uKuJ>rNjg}tu-b*4Df3pOf0<(< zem~XjNtIOOyRkX=tcO-4yiqPk;#5Z*&#)3rj)%(vOfV}ld$BnRY&ISKR@LINV=u47 ze8e=B(pP2LNd1!mT&5iv*-FE7lG;mMWZ_w(#^aiAt6ne?UaxkMt(;ivax#Ij#dv?n z?cfK#Lr?aj;UckWYCL@mQU?wSt-(PyWH-46p1AtC>kjc75sjkcIcD z9o;@p&`$Xo9O=<|$T_d)cO})4OkuP=Ewzk=8y{iU z|M`61K3mo^Ke1e`2>4D!HG1g6Z6?r55|H(@Tu+@XT|8q<*u&wJS|ZC+*RU(2MSl{J zg-^CBzb z@yJGpGlt9F0&>O(oO?R6DO>o4V)@6qCx-!%f*mxKZO^WF6Id$?%bM8gdl++6Vx2jXkg?ZbR3q)Mu_`+!-6Ta4OXyxA^D|(02Di-K9vuv9 z%#rM|r4$1ct)X&E)M1)0RQe7j3<2lcg?)R;tmvA4Vd#vE0nhy%Ob{X>Dy|t40`pN; z!=uHEpI&CwFEh4A`32=A;vf?cQ~dJT*8E3ozL@a2$wiYZVEVDlU85G#d^&R zm#hwS`^-6=v)y8rF*ahy<@X$vARx;7U-4R8$B8@fs}u4>lEnNSxCH3K>|;(m*ij&K zyMcvZ0_E>zJ6sry&%&YGbl*xb8gC5KIt}}$KCTL_WV3_?CzOS94dqX3TSu55xJoug z7H?5L#=>?K-p8(USr6aW$=z!e)}?E$aFF|jO;OWykl7SR$_5=v zJFViPO5v^dROK#*kAS&l;9Z$Q*18aPe}iso%3UhvmdssO%(*UC&RCY|{|xizpC+1x zr>DTG`4Y_XPn8+W8LI+5)BaQ;on-a?F}T9e=TAM0A%&AoLioGitlq|e1eU8 zH&sq)jY6rVE4mFC!+JhTIpy;5EbK!7=?ig}p?y@d{bVen1vS^!%nDuP4ip_90pn6l zG4wXKaf)eWk}uQq{YVHGKg?p;Qfe2UOydWiNZjwcb*xBi%loyPrO^k4E5pN1J(a)N z$|*9MHWOG2Qlu^hf$sOc<`b@snKeZh(CW;{btIO!r!wmnp4d}U5M?=G`9zfFx<%9~ z?8vD2cS?TzF?8l-L*@)Du?k-ifw*z>ach{`t9iR3)89Uvfe%8S@3np=lEG*Wr#A|} z16O>+0N&562*xIYVhNXOBHx$@Kn21Y=zi-fI1GYDnM|e))dzAM&+j+ecwsEmJOMj~ zE+0?J#tioFA3QVa3rC0sb zHE^oWlDxsq^n=s$k?Yq2`D2TqBWF_!&HoW-!j7M?vJR@HSXKoUn-i{9g1 z{CSari(Ny!gHMIzV_89-WT|+Jt0HWi{)vOyC;tk{{4h)XMBqHV+c+9hWJ(S+6o>IO zzF1pU*^JC}io*7Xt0g>i)#-;&&Hmq+oJ8MAsmL{^0fz{a3L+TJzZ0Z(8%$`}qm`dn4v1W5}P#z}7*mxHp zeNZRkXzAN(ttGI$a>6;k|2bk3P+Bis%aTdI@9XNfmS=&x38>yJXH|8=BhR~}4od)a zkvelOy2;*nLu;`%4=co zi9G615%6kyaya`ytpP84dazLTc|=R14nvuE2ww7GHCREqw`#`x5_|TXO2BFnPv6>aCh31hj^9C^++2x20e-QBuV1V8|luAdkDi;6o%7V$!7V#G({#(&_MC$px$- zY;2l(p4`eWkYcZH^WYKD;XrMmI?)PLc-!fz(1=0kC#xz1hgaIM(5&D81$(&>(p2dc zL7VqO1OO=D|NrFdjqPj<{)Q7&=2xZ~?J3p0_ZA}mSI3lxsSPlCje17{{n*isi28%^}mrm02*rNLPl zO?AlqpAKoaKvV}0r?TmpH72WQ%zDVG%?@sblWE6Jt+?#Dh1qj~*mp-I|5&9JI4YVN zk!~OAje1E~7E}I4g^VNtAhhiSG~37uiow}(5n1PvIoW$s8_elE=Ju}J@4NTRS_re2 z)ocs&LQIcHrhiH*H{0Yt-A4NIn>^&zC6r#*J+ypWVk_nDgZxl=u#{`Rt@nV_SzjaL zylDgNrj09cFlfJ79BS==4NVjv7P%=%;2ptM*g==L0SJMNh>mIpHFR6u9;ra5NTjw& zjsC&4Q!4_3pkBCe$KM^;%Dz#$jGTTAxd+ zm1qd;xLe$2Q9FjCycoAu?5_~AK{{|`(Ih)uMYJ1%qX{pic%+^q#_Bv)g@P&`_oLZ^ zk0VkyGrNoXN8~YYrUb`~K+QX!{)UndMw;sc?{zyU87GOrHn~Thv93g>~&W`d>i1Pf~;ksPMY< zuMha+KRYm$$`Xp=414F0dQ&&2A1ty4*BRlQH4$YdMZHLMKt4)3x@c&+ma!Fc42d<> zwc9+~p*-g2AO~RMv#4IEexrE-V7f%R4p_~lCWc9nnxt%$lf|^wKyvF%8?O>$0{Y_0 z1WmL`TK!cM5qc!<)qC3$m(5t2RqB5fZ=Sdia&`22k{E2ii)y%a1gZ{zg-`G@H(eDN zO)Lp8Mh0Zae-7mBB|UIW^nIW)qMF19h65MlYL!IPIglUXWV1at?j&BL$H&7*%641C z-MM-&F{T+Bet;vtwUsj$?4S7^modUbz%}#f2F>#!;^c{v>(k1E3`>l_YMgpY2(Xj^ zputr@icKvV^RD+jeS1eMB{QL-NN}$*Ahf?|N?H^0Lo!*!duUBH0M{Pz#a}U75zz{x zhTNTr%TkajgZVSq%$rs3t&g`5l}Pbr6pC%QMA#w=`RlLA2-*o~@Gda2*1*9;#NOZx z!g7J{9vHIy(&o>v--MnpJ;APG`BCW75q_y5>V@}ksN_e^mG=^?A1juYjO}Lwk|<0T zNlIK)8(S@ff{n!% z%|>#azwH{8uVwY3vgK}6x0w|*TxL93~d957mOe zV8ct(x@2Ss!wir}ZPUh??(Xb13xA}q7jH`=6f{*IK(RUk@`3O4gNBr_p*TvHI3oEZQh znAyy@D^SW%X;TRtDLb=MNt|tM?JS;OFE!pukcI(_Ex?jE)?(;#54*WBaRSPo-~0m^ z7pf{68+m|5r|k{BgF1&PQ9lM8RZNl%ue1Q*J89M}n{Rv)W%EZb99Z{uT8Z@hVR&{W zUQAFBl4f3_{0$#x>kigcC9)O8UVOT2-e#4wJ#kA6=~Qh%&-5zZGRgyoWZj&&1|P?d zm@9vt5k8I0i4>(ag2-to4EW&gaLvj7clImfy%3>yzhNeGfsnKPyiR|Nk3 z!z)mirJ!^H+nE$?rv_=(1_s-+|iOdWk4iVvQDq@3%uGf$^}>9>@@4x`3cUy~^D`!@{6%YMBw$3+5^nbfK$oPVxino!#frj6o%E{ z$7&>=j-Zr953o2RBCvdDSINMhE2lLYEX}RggscxJqw%91iuIwrU}UrOzkVfL&uZ0i z-a_7Y0Gv|~091(#dXQM|s_`NqYs!hW$_X+biiIX?x5C7-6Bax2c_L6Ux(4?IYFJA{ z-w+;N^*nU<(raJ3g>!U@hH2)e?K(LfjfT^7$AD3Y9W=t}mZ3FJs^7^F9ka<~c>sy> zENEILG>tM_{WNiB5M3~Red0R#q z;4df|!_h?ULep`D;97>=HB=bzwSW^$J3zQ2#j}}fj!eTS3{yHB;v^rh>Gbs~MYo+B zauVP!863mO!#Ck-OwJLq>suc2B*LL!(*`*|R0I{=OPbjJ)lGrye|+}4hS0+nPs7>) zy%2pT_)PFv=*o|oufldpP~ALQWBZv?;oLRYN=}<*kD+D1vT;9J%d)Io`b4tnQ?Imz z;1+e5X!63b-u?_7!o7w2c1jT#fw!b()Ti8lUjgT;_du*Y3Rgx_2JcR>h;0lq-*VD? zi@McRY*-u&IW=X&ErG*619r@Xd4g>wVkay2fKHalFj@(dWJ;9=l~u}{|EgMNamm_; zOHf1WjyzIR@0O_}RiD1<@}#A#W>DsIbMkxnaQ&XY8MnVIEQD%Xy1Ymd_WF@v_44*{ zBB>HJYLPu@Ht0a`ml#C}`ny+PfQM!c!^E#I&Xx%+_$&p%XhT<&2!brNv>%^^ zDniXO@A}A{qc^ot8GC7cZk#qllw-=(p)eLz9H?5pS%><0g%s}H-=-5!s@djZO?q&Vl^aUNRn?$zY9PLR zWOkz^)uPu1=Cy(-xb#R#oMCX$z#$kHs0Lc(r8{|k+2)}H(s?-Ur5=oR?nQJ^|L6>% z*d36#Uti*)JGq3~hiD9l`WYqp!;nMVI^&pm3OS?{oLMrb3wzt-an>}qo0pv@o8kmb zH)Wirp`q~rYN`*tnWf5DM;`P#bdkMA`qGgFROEI73;MHUFD~28u_;MNr0@{f5BrqG zw`lwL{cZekj1g(-bU@I6k@XA@7?wrTi(i7lpWMaB-?o$vrG*EY;SXC`v)`Gu2*|(g z(o_AH9|W-H0tNjrkg-XONpWZZ*}_3+eASaQAsK7N`4Zo|sbQ6>-vdYuYeMYjL?Hs) zPSXdm$OXDCURH?S&x3&}2ln;vlX6hp9GDZol+2i->+G`ZL#gd5p(a4igR~>wOShUc zw!>{1A1yRYt*X}5`$?|=ePCGz2D*>cEk(>!hKxX~XI!4khB(EiVL8;AMW=e#HjruD z;Ywj4^FrOeciY#(UUK}F3UgS;b65#y&XQo0u-U}|*XNc|r(Qp~3DDwu4`jy+Vyl;U zx<}t2^wx4Y!{WOlnTmh<&1W*C1AphBlfqqhLF-}r5EG))5jA_0MS|a9ca#H;Pa!Fb zI&jo3u{FwFj;3M6$&)6yIhj}QeMhV^Xed@g%Qv}oJQ{CrvBV{<46NOO|hLXv!>V)3j8nIZRq-j!stOVcwlq_K)(mvSytK zZj1>z`$f6uE~+NG?k->+^7+cqykd+fhhSRGeDB#giWivSshrwrant(yXW9Kx8VX=j z9j&ke##YDPEhhK!zvse+wUdX>sq@@L6!5TyQ0$h$Ow`^5tecuSwJj z{b8G7tJTVGL`Y@5=xAT^0PLC|Tt*;)9)se%EqH(}LHGV`0|I%6!PxD5QEvimfRkV} z275CtgQtxh>=L?YooIrnE(`F_1#Nmv79j3r8_O95jtT;vYtf4j~GdC~F+raSWs$+hGHD{Q`49Fb1BdEQL+3l)wKFG#e(S zd~Hz2=#EC52D>(2x6*sFdw*6hD%!uo*1mpYeqPIUCiBNsC+!auv2E3#!p=7ME*Hx{ z9-UskAwNIQKa&pmhqyLRc1`k15Bm%Xe+K@yj`g3*Mz5i=eR}_h+`s>{tp9y=>c4fY z|61AnU$*eEnvGo+1H#Wt-Q5Fa0tP?9LaC_alBo2eW#M8aMIjx|6!qFTo1((+7qmXz8(ur1ep1Wc{6w*d zrmsv25p;sWsNH(NeTC>96ah)@7{_8Rdv3DRiPZcimDsKlrBXbX-Pa-2nT8+^UQI$i z^F&0(_R6R83bWfWA5ChPUUTLRLwncsv`IH_Y`XlRpnYE||Ms$X>y79l4W`(X?)Hll z7fj&Wb{8Lh^Uu9e3~ygW1X7~+&xR_44GCJJ(>)}$5qEBbhn9Ob*&H2KJl`09<;7B)JAe_0tpqck0F?XTM`zH1{_22hzFcfy(*{lIt8}-`mh^p= z${*SM>1hQplh<&7HW)}Y1Yq+d^zky2ChPv|%5O?qH_;ERBR319NQV;2SA%GZkl$i< zag$dS?WZkPJ_NV^B^J5*IppbJXx?J3$g^b1{+}D~JhRz5;D?fAq1f54Db!dQ4I&|> z@%N|5hMimq0s&M)z#38gZ*cn+mJ@m6E@Ub+MUV&S340$mryJIHiQ>m?&UAL-Sz~|q zoWT^6hrBBFaC?&dPu|SJ%Qa&Tt^E$3bib$>Dcq0V| z;@}4)b<+lNxd(o7^bbvyxd#o!=JHQ_Jc}8{b&z-|S*-FpBN9E`0S+!8lBMdVYUg0O z#y-aknW&IIX2K^9`Bf?>ydGj za7y7n%et~wT+uYL>!7ll!pdrk0qXl84QN$ru+=B1J>J=v8qJSTK7?qhi;+4WQFBqH z0R!;i%EN3&C`B|b(U~jj$oL-vf2kyf84fTE9rGpd^`sRJ#1srsb#&#CKJP*F1C{Qg z{_+Y~aJ|#7K4ZKO_+WtcQa(FPFsv(y-Pv3oz?5GB2oZ7y>k2N|z{A)EXl;Zp1R-lW zf;BJl<~NhbtIx8%%sd&IYTYN(6bY5vhHcAIee8wcc$MjFW&?*ZEr4TAtONu9)vCD? zGM@$#N|Q>Au%%Xm@2mmJlIow1S>YJdGSU8WWUgds<1Dmf2Azc-JZ@Nz@QqX0jGyY@ z&F0Vfd-m?kVCR^@{$)1-Mw~L;QT0RovQ9r9t86V_{e9OL3~x!kBed;mCadhS815Er zh9y3!-|&!VU&7r}sa?uyk5n%=U~fB*|0fQY#xf@Ej6PS{*Bja*zE#mHwZd`5bkL=#PAbQP}tj_(0Z);MNIL*U!u+|P-#6nfGXbStVpkQHn&#-3jQoE-H+gxZoux^0p4_@F7v<Wj6t=r+l1^W8~tmuh<3C$g$)>eoe_Ks`69(2%v}W$%@H zrzYwwFk;==dZ#8Mx0TNijpj>PZO)T*o!3zMlkDWnY!PygBSd{VD3VWQ-90B&>?WI} z^Rw?z(&Y19s4s+nNaoi)LvDYS26`?QSin`(Hk~&V%1ZD0J}vc*B(OYT<4zvFY3%q* zJQk0%y;&!Zoz4a$@l>hb&09hopK1AYDjcy@-4@HuM7y#Vi3O>xMO-_q$K1JdSE582 zM{FH1!II@JOC_u>Y72L}Ud`%ud%oYop-ay@sae;frkfE)3Puc(X;|vAOmRoqucnP)Na>))(EOGuyT63u zMGU@|s?5plaTWBLX-}x1H)~Kh8xQ>Z#mt)+a?zO)vuU%5Dy<5U=Tj^$`Bc!ZMgMRj zyVFxa-9BQWF!iIcq+W(Ei(vvfOAGNt!dMHtW%`Viu7l<$=@#*ZA4Q7T%s}1g_|Z#O zRlEG^XDt=s1=wBUIofm3>Cc?p_|Z}OR}KyIpIh`WtDDlHa9q7F+zWczq{G4C8xx<` zlZB7>-Tvj~`xpE7>Eh^$9$(KVy7BBr5v1=8$rDzY9q*n+($D*K|F{YsAK90?=CM^f zS+!s%ub2DX{pI9&Q@wDQ=vU=z!%pt!>!Qe0c{&>1?oZ_DbMrX$^TGCcRZ`o5tnyBJ zxwrdw)M)X@{n9O8Zces=^lKa*uO|#Ms{NJNe6@7~)q10~_TKB^Ig#Dwp(d|Q7uZb! z4R3@mRn&+zK}34`Fjg#{V$h*OS9uqjh!q;_8Ds%t?Sly-`6zH{&-3_^`K73ytaQB3 zG@6J|ns(jab$8IFW^-!=>D2Ipb%ni`lWlxJTBBN6gc_Ps-;u(l>U3dxDgDBGjn{ly zG^cHVLPo=V&lx3dK)`f0W+XYtZh84P%f;eMy@v}SVIdGP#JP0Vq~p^qsEJ1> z6hL&L_B?5id^~{Us3_>O{mo7)7M{#LGLdI6KbRfQV17oko>Rk$f!0J|2z!Tl_(3m0 zXR-Dz+~o=o5i?H*4JSMWa6nxGf%4JfL_)IfLJGStL7|Bx5f&J|7**vu0b~m3rc9C$ zy}6;3C`+xN_@0mG%9COyuQD~LzQ{~l?yN()tfKt2O+h0J+`fEmLSpmmRe_iRhG#RZ zAA4VuH*y0hvnTd~iiG{V;e4}FdSuZG89j=2VJo^at#SyRoK+6Seb}GVZp5f8%H8le zmg#EoW1OY*Mj%Wb*(gBtcqJ$E6dE)eLcL;wZa&n)6h=A~qgu|~lttU<9`G$QqrH`Y zv1=WsBw5U-zyr(+FVph^L7cLrRM!Meonk4WR~AG;Z9rTtm?mKmZvzA&RUweFZTlma zq6z7I@dimrwW4@0P%4wdS9HI}w`Eh%e{-a8O-J~J2aBStOksD27JU>HWBy6vnd7$T zwo3qwv+_(n)?T#vj)6OqP%cS|TgoEOJ)Oyh-3Wu@s65dS>dcKiZUg8-U zkgw8psXi0Url3C!-@nJr96`_hsd5O&3B&(YU+Fa-I|WRkk%ig32ikZh*cv3_kR*c4 zSSEYCtE%t^M7GKWDHl+aEc(TG*M9p6ZZSak7#{rxPu%QQuNPaiZPf$4W%b#UF^aAr zsyt=rswhKr=+v$A`}OQFuN>&CSi!82>*bcOh}kULPbh+N5?11Cdrjh9NP3C21x*7; zAlXncvBSO!yoa*CzDoZs9Ah3AP~_f~A8A$jY{17^+jW&?mpJ#9SaAsj@RhOc1Dw#a z4GJ8j9UG8V&&z;-C=4KhiLZyMQ*esPwD^PdD#-3dgdJH6%WI>h*g&3`rjOMf?S>bi z!=ApEWEyix4xmDV@&kbK2J&g+&)&1+&SE)`S>s-FEtfzEs&~3bUSv%iE00Kqxc$@& zGYp1YazK|LhRbY0e+XngrJ5Hx(DLmCBR2||l*1ckSPKR!GN4HPypbCo7^;elFp6A9g^*h6d{6aoY%2(q}r)Ni1@_?L> zEW@{|1*DZ|TmQ?Lf(gv+{vXw~Ngfx*eLB*mkYEKlgb@*f0v9puf_l^le)LT|ULwA8 zA|ViPoMdpFjnijC#ofXorw&_-`CvfGDj>AH$DsJmH==xyE6uO4fp>N^YX&+qY;2D~ z0wW&pMw95W=YR1T2qW!v@GItvz-NrkB!jf-u&tix-^Dta4kAP}I-M{-(tQw2H`t3g z6eJ!G$8;Wc!-qMVWj!bh(06K#AE|l;eXc>T1{OfzL7(*+GthKz%fe06i@&uB>gwE< zQ>xSyY9Ih(Z|OG4vol{2Y$RfoT?PV4wqvI}MG<Ij!nv6F7rTzWD=u8)m zy`GXUc1Ml)1q==4A4 ziN*@}YQw0FeiCF$Yr(}1tvwQeHCnwd> zDKn!_|LE1{ke1z%g%sex6e)tJznd~Rlda;}&*Io?gW6sOEIuadaKYWL)pyX9=8boW zFe^iX%ahC0-?labL>S4N?G79ZLBd2L;CGuQJxZgrRxFqjEb81dIj5SC$dt-ze_o@k zMY39bcI+tZRgS_1=_*(=hASP|zCzhlptQQK7m2-4qVLDCMQY`SFz7} zb$pRN=s6~GHqq2%(!U$_Oaf!z1FpwP>~k4f;!@`1Xm!$*zGDFl zV9lY(Oi;iDDGgcB5c!)+nj485BwT!Twyp^hzbPt#-q!ktefdT^qq1HAwv^3t(RT+TCx&fxH z4sNKdPH6oP;1;ve35gELS56rgjd;b%9CASkDgUm(hOtxy?7c8c)JRwOqA9$^+@I^( zJGChvrLiY5f_ zR`fO?;BvaF5;Qay3^5^(I z4dm0w^aHn&c8|W4fm@cXl$Da#E^nqLRcc}Khcm7?aUT6Q5HXpklmr1gu<4tQE0GpA zrqHP#IZlQ9A;3E1<~C#|Iwiy)m;DS&H{i}nQF#O$wz6(KS9vi67Aeg+-;ymlnESJ$ zOj+wkN|bEZh;n|WnX8szmZb(IhN+I!h$A0gw5f8w9c`1zIxAMiJ8z5Lnp?;Eu+m?z zO;?7SVK}eX?|%4+zV#Aybgx8=62Tn!5Y5j`IWh)k@q~FEp1j2wd+tZwGf2xCnhEei z-k}8CLaBd4N&OYBnO@+>eY7REMP|3#>r>p*R1L?oi?W!zucm8t!I0847OCbXP9Vh8 zIIhq_PjvNNb^sfTm)c>bYj+ZBU=)r=7z3i9!3&wvxS<12noRGY3-3Lo!cEDjYe`O3 zzi&nJFYjWq7#f-~E`4UN-|NTqy0{-%hjpW9SH3bpxo=VO)cA{b}{BN;n{P@6sw1rr?O!A4Dwv(g-LUlUORcux^|-32viX z&@DG}IS^WZtMUG3hVXZu`^^p3L$P0y*B$@T#C{35lJVdGFph4GEgrZY&4 zIUP#s)5|Wed%CpxxcpE$GQmh~X|k|*8OPj(w9p!>@T+@%iB=vItyE!F+Gx-`=^3?x z$JcO;g`QNZYcJ)T-s_qxZHc=LXgb(cvQ}gKsL-JyQsOXu#4dxuU{rG%TbNK$q!23HN;5J$nY+USC-&xoD z&Fh6+NLkjdXY?6&g+iz@y>K^65*Z<_=|iLwhTaE&r4?x2LNeTWD>N!vIKF=l{Fs&i zUNgT2xuk9RY?4%(PB#ASz1js~E3YqVRcrqxA5|e3peW3!X%JN;oYs1f@tKfQ?&jxs z0JJz83=&7<*?PU6cNMNxuv6YPA?*m+8Ipl(4{Dna$~vtzTH2Kw+XdV_SGn5PzTkG7 zhj-eIc==G(G@A6uW1`Z3rf#;J^<}UA6tA7zps#)%Hu{irZ`iV$9rNSIyd6KqnlZV3KAY`tTUEYQ{@+O}=m=4souZQHhS+V0b~ZQHhO`}LiP7w>*= zCaNl;qWq{Rj20pm@O6zt%Ff0420}9ivqo)TotN~tw8A^Tr}N$Z`Tg-Q2G(u_?m62$s-vrquVf=BmCm!rjIH5RxO9OnqyLw{m$uq)C7+T$?SZ6EQ zucYrhJ&Y-~nzDC3ohW5L>%84kpGR+&s4jihK^#1gB8^?G1>=xSqQIr%pswO~K%=x9 zF7h^ZH5#mpQO$-x!b^XamePH28oKw^08L&BuUtbu*-_2`{?JwdOty2%;1yWY3jm}F z4sNK;ROr(iFE8$1Z2Y+5Hm_(7+OE+X+(`vC9pP#>hLwqhvyHuuP2;%Z%kzj)%6G5X z3zM09RKKD*=sKrOL+@H$TkjRbgsCWSNtWHP+(+C@AgbSCC_bp^rA{G1aY_tu$=^ae z_iqXY=6fT={}Ff6k=yDLOQ5G}QzR(gY%i1bNI)2sPfF|B;6XTt1BJZzFF4kKW*^g# zw;DA9%iALtR&PV-^W7PQ(FC0YLTPesAosl6Ho{2j$a9JipW>Jz#|9XAV81Xv_RP|^Apf2Hc4e9n48q@b1)DMjH^gx*VZLdV-(&5`zN7XZf8}v&e1Q> zAe-Kpf}dUni$m@|)Dn=0RC^`xgtYWZS;N3i9cF%m;TBeFfko7*#!@20g`j^TKxh4Bk{$Zbu*GS|HT9OZ8+k#Ct4?}sI+3PME=QQFw2u2)Ec zINEfb+x}Yo5nk0WY%dAejcUJ|P4C-#wO#oatk~o z@`kC)9kovRQwkOu3hve(KzmyImw2LTU3s`vq2^h&R85tNHvUH~4fzPHV_fDNfd^ak zC&CQ?&aU#lqTOY@C%pS}yR@&xb+cqGl7uO+85%QBB_BOq!BRnItkcYEw0z-Z7ryYb z5-fN_CI6_NC*tYBY)oy3x1-m?&lko=a(5?0=@w);?%q*`Q!pcNC2!-xF+dW2?UF!Z zyh0LWQ+oasrXhGYBTAoMA=Vz!{nt=gPf7cPj3%2rHalQGhFF%NxPm8ju@!;j1R2%v zW{p+By{WGD&TKCmgfkKhxFcfcj^HO3LLA62`Pv%VF^woRh+Gba0_()f0YrR|aP7no zB(|MXH`PTw{)5bW_oz7Uf$p-vV)bBcSqerWlVRTsCGpptEQB-iH#&krjWGz9Yn%i1CB|X2-c@u9#~7M7?yBp?0s`jp4p=mW9|Fgm z1Yoh7@GI<2RR8kvyet_)eE^$nd23NDFGTgBAis$6GQy_A&?H#j9xmTzmB)@;+j(C) z`*L_XL(c3jkCO7Ex!)tZx_wC>gC_>;*=Suf*aM4$)|`JWIN$m`|5b|`9EUEx$HRM2 z<1VY(h>IGh|L*l$+~US^OpJ%7Rt$6E(Ki&A|1GswFd2}#Za`(Y^}Cf0-PvC*x)W>X zfX9o*dA-W4B#0G2B{{bD32mrI4|Pb$#s?b(PFYfRQEM@9d{6ywnS{JF9Sx-GFtIF0 zbOzbQY=}sG$%P1E&ybxeihE~j=p%FZ3}-`|fNXT~bDheVD5-@1F(EG>e`uddt`$eP z#Lv^^{Uke7NJyjoa0`V>r^heIVI&vr@P!ck{``UYb8hJ*?&Vam72^2Bv zMcq5+VM3XySi)oirL=+k%n@?`1eSvW*y6u4R$14K(<;pr&RAHULXH-XWYZLioeX0PZ1%?WSH_V<`cr! zBoeY>-~s-T?cp|P@pk>fkldfTG zFT?j(p(2kw5XqBjWrU&Vm%xhW!(uRsC9|V*A$_HU9~WoS`{Z{vSKXkiGpIDU+QpyF zo;1a7rhf&>D5>BDX#aI?#nvojNR{ToR<2ze6P5|!298(+Ac|J0EKVs_ex&c8sDj-M z((o`6&a6?J1L?nwzlJ0A_@|k@TO1|4J7AxGrfogceN0>!dkzZu=l$lI}*v4nfh|!^G_i12pL^s#tpj>{NB&EjomeHJJ6$ zU4BH}oC2t|BTqd!_jwt;0KPm%9uJGF`I_C^dA?s?ji}-K0{-*!Flp#{viyAB3d_~& z>SX8Cl7=8?SO}jgPHESn56BrxLsD-9j?xcEp6;-eXy<%Gd)$?xrrovBPtmsr6B$X( zpv_9Xy=8LshWomo9a_ci3tMy9=1pXrR9$jk z2GTi5V67kblc{5CweS}xPbS~MkQk|mhzHOCFhRaX(3hVH8J32geGRn1ESYTY&kitT z3uU8vpO5ack`9p{Rm{0cD4I!^IBtZyAe%sfg1n-%i+l{~#Qoz@5RtK3&(?4agB_z} z*+q}_ZZ4w(%4CVVn3Disi-}*4U!D)WCCkA6}R8EKw)v^kHNLCt;wo5oXEkYIr_+ zkP~6~orD_q=M+V-PLVavXow(#{XzVet=rmmb!VI7>>1dzMp>6MojVJQ^Ce##W*({G zV5+;*HZR`?hP_Ey{NP>4Y_J_LMig!OuFiT(poLZz)k# zoI{T`Is!EvGO3td#2<(0yAlt_fs~PsFwIs#MfGJcd=JhDJuAjA5Hw=R z(f*kg`wRfOVkU1UO z?QJp0nnA%ErmMzZLy$q6$5EO}TJ#jReQdWuSSU>qS$;Jz8!b=>&{`L={g4~if6NQI zH(R0#bUeFukqavnNakH^wK}F@Uo0ZaE3@&P+K>a1W8^*VW&|j@ps9j=4Jf4|(|687 zNbqr92})2zZ!j*Vt6{nRrCI#?9jKK8H@eSK-tF zmojA(lB*d`=5~MYi?nroxteyc{~Yp1tN=|?Bk@oCzLW$;c?dF=L}S$otRn~)m|F79 zy!w$j=||-#oKT7920FO~ZtfS^oDyrv{55#N3eC`78ql@m8 zJUc(AVb|yg!DZ@|ut2h8vBFLJg=%4mT#3^Ij@p)XUy{*5f>Y)`xeQ*RrKfbQKqgBw zXNQ#3wf)8Ul@JXirna$X~(NseCr+L z_gl~FiCFtj0^X;%+&9)vCNwEQxW?&zDpP@FyE?F(?;aQkfq+)tDQac=g;e87ZZ*I` zCr=J4`~LmeQ9-x&`_|*d&B;SekMHxxS<6hWT+bWcw@;?u!&bwIp3lqDXUECTbfno1 zde4ulYCpwxiQ|^I+Qx`o_viUX4Nk|jB4?4*E}dzhe*_A$jGr)PmQynCR-rOS5F*ZJ=QCZMuPb zIKP~U=gG9|QRV#wIvS|i$oU;CE1fvn??bm{KVlBQvo}58+c$wR=3H9A8tZHQz+*}< zyFjGi3&UZk*-C9wh+d~y_3kKKk6P2VOuB#qgX?2P*yXQRBrZfFZB%yTUwKJYhk0@T zebA!^VWc(YX#*)~$pi9zZbx0pRd;fNI(l?$ zQ;0RS7i)TeT#hCz408@7MYXH7@8~hvsj9SkuDqJd*r!ek%^}%B1M#@{h4tR)?m1A{ zzg2*ez{i`6Io70t`IK5-kb0!UFa=y(;f!ERbU1pCHLU3sI9=Y)-b{q*p|iueZB z%w%+Qj8Xn^C>&}H*bJN>u6UeqBSVaM7YuwU>UB0d^V~^@P%qaZQov0zt;w_~{wYeT zjsqTf841xnW5$#O6_>sAbv1Y=bn-#JiI|VPt_1gK447SX+-`VBrPvWLWI=AvteH2hv%`(vm{}%)!krs2z*O?9? zprXW-B!AD-#c#hpG-}`8OAe&>+ ziB*Qib9P}SUrcFdSOO)@E1H1Qu6G?DCKh0V*jfqC+qe_Ce>y3#di=Gu#3z`m+va{L z0c4d}aHu>Q-?eTsOjP?Ba);{IzmY)~d zs!RIIjFCSB7z>I8vT9CDI*qA)Ww}v&km-s#e_+Wz zR1RajJBhLbPpZNl>REXPHQ~ykhMJwEQi4t=P3h1GD-(^KNtq>->~vr<^L=vv``O3) zI~!Q7v!Qw$TnZ4zw^=xV@&~*)wrp!e1{)}N30UBB-%eYrp?ihYW@6`p7u%!Os4CB!(Ugn_L zdCtkDTHzbUwf+!HlSSr{g1koUKZ`)iM?nrj#VBcgm1A~AF;}Y^y2EJG*zRP!p~(X< z+qZSK1%j!<)JR7N^iWDM_W=6!Fi?Z`y9&i-4r~N=g<8&6N3^gWBmPj(@@letO&HX= zoc<)x85ae0M5lGT_Gp`na)ogrCr+upcDsFN*mgwTPg5&9U*DN# zJAA00ZZHdl~SH+KQQYOyih{JAior)Om!Z)+>`AjGC;SUYBMh3k8X=jn%%CzdLIQFn7uFAa> zaiG~AMNoiY;NR!NRRKbPKAE`kQHW2xWQNI+M*`VCZv=N?(n0zjOflgYlqLBIg>>6K z2_kilroENc2elXsrL1u`hg&|*z9srwdatE>Qi$Pw;vn_CV~!Jve5j1NilUM_DxTR> zzA-nnnd zYtbI4W9FDN?RTNqz{i<%l-1MV_j6IwMAAERNpXIoB=17G>@Btf8{MHUV^!AfRxFbb zkyzVUun#oYy05|lTNo07>1cCm_nmqsbMFIhcd2QX4q;e*kD_9IX{kK{hmi~wnMZ9G zfwp`5a9qjqpA6Q`K-h$H&xXWch8tlruh^Wtu|GJPJdNyOv#w$%Djsz_A>*stI%Ec@ zcXUaSL3yyf-C2z1QS65|bGjFub&ksNR5*wFsJoSXthQ!l@&GfWFwMbd{7MAUM&VF9 z8AyT;u~@Fbk!fOb!MsMIpC=bnt132<4$q^q@S9lkU-C5S(QV2Tdq(7 zIXRhKlVqh$jXrez93*u}9XGOnk}@Y2M7EnZE3zF4(&jDO8KQmD@W7znIqRB{J*%0u zuef(5jW56ewO3k2bl@F@N&Qhxyx&9BnKG~cW9CH$>)yR zlM;NXN)ZcjT!Wc`>fFNaBI47Wh+aWUpa=Pca48I~9*}J&-x5Vi?5Sl}s{G9;kV0Ix zRl2ZNlZXkKuuZ8XYo#HzsUg>C*cYI~+D>pwvbPV-Uk$)$~j zkhwTCaref)$_I#BkhA8HgBb1+81Tnql*FqVY4 zXoHm<&R-F~M5I)NV`bZzEbfhUs$PBXFiGysIAILCL4s(3m z;8@bn-u#8%hiS<~QN~+4_xwCA;7`$MOG0sqAARzrgWIyPG$LKJN+XU|>{<7g8!0ef zt2vB}1#6c*m)1~_eZY56C|j##iV!5+oP6+s>xZ$VinuB z{PR^hC?uaC|HJqHi9r7&ci#9|ZQTY00LTFa06_Ww%bi_pE&gLAm95lgyGel1^+An5 z?q%`@tm?8>7swX5>Un3PmMv;Uw+M<{f4lP~&0tWfq>~6)aI@7Xk;G*ph7jAWMK)Y3^W5v{z-*{nr$NTWNlFr&njuS7QZU6G!Kheenc=v< ztfxABUa)YXyB{2HfXpGBFv12b(W2@?=Wkz6SVE1{n7EF*W_vA%pW=l zss_Vj(8xoS#kM}=aWvORU<@E|1&d4S+I?f-@3Q1i&7XR2alD4OrMG#DrGRF;eARxA zDzmb0NxiC41}y8EBcOZ+VX6y!k>nfKP~cnkR95;>htt@PDCVs+b~s4RW&b5N_-LD&MKznJsT zjt2hTF5-1pS9dG_U}4b@4Q`@t8J)csMexa@N@4z|ggtkeg)GDv%`@ zCB_}=HLooG6kmAs!6C~qFHJ`9iOi3DGHhaei6~@hxHmoWV#N}2eoc$1x9SDqu9_Q1OG4|*Ke$c4Ino5rV5t!Z%r?)i82 zpHaQ97)fWkQeY`X#|r9UjoT>q#Aw=c>pNp7h1ejqn&w^&zOQUR+EI)w zA*2oCsQ2aSl_){$&}W>hgVA&!R>e> z%Ki-P#|U4#&q0d=LAEiz53E#Map_Tb-yf}vs9ydX#kjeKLm)wL;sFM(B{OrRCy+@t zdGH@Yb}T(zi;*0T*xw~1#D-u(FMAF_yanHwr7ns;NHO>n zl_y`TF?Y~Knl35rfmmI0?5d$PzzyQ4iu&|!hfs9tYbBk#P}QqKX(^K0weVcN=Q57X z>-+fCH!-07noh3wZQAdz?KtBH@yvqrFbRf8ybCaJY@vUe?&E+S=1yf9F3jJGNyLqy za?FXVb3C^+<0Ci?5ANlDvqtf$;c`mafoi=18T)pUAMeHB`Gpu$t>O2}i$H)FJ(&=u zBLiwb4w4>po3*c3R<;BtMZ#1v#{iWqYB9tbJF$w0fE2s54NO%?2#m#@Fx;&(q&BNb z5ttWS7>@Svf38`O86RE7T$|yP5D1A2fk^URYBYjTSQG`q5>-{}GRXx|DB%W3AosGq z)SqSRL~P6>^W}IEnY~M-Z^4SEcnqD*%b90PF9Py^Oi_v!*Fzj*z4*n)wgDj2l^nP*caYGKJXqo$%MQdsZ-Sya^8#XIt8CX@eqqbMQdjP1Ka`Q6(i;# zs4UtE|B;t_603Ao;$?q+=PP|Uh=+(rIqpPMmNaoxJJZLju!_#*%-*3^S~rX#Z5lrG zzt&dSsY7H+?76Iy!KYdZHgZYfTXcQ&b*%iJ;k&1Dqn>Ag3s-{FHx@)i<^oRL-%Oi- zNzDvyHljHw#dsBZ8+)57Q`tpuZdU4ZP(bw3iUqMn85!b)`AkZ&i+_=DcXo^Q2_E5h zZ*PQz$M~Y1_D#0b*+#C=VsibNBtJ#ijYJ5TsauKrv>X{*ujp&?YREA#so$br*H<-A z{~B|hz#PTF5(KiaY=YRMhzA6uQlwC3Q1)FcZXK) z$>AXL5J21D#>?mF=)Au)1!MgP@;uQb=JcXR@_E+zXaY(CfiYIVEfJ|` zCy3VQ#@1+9zGY{RkT+wDRc=h)7!BQUriB(3?-7UVToG&^e;oXI_&XM=%k zrP8BP5I0cc@v7jPHMjhIq`;h=s5lb`dJKVQnRJ0_f8{)0W|HH=zs8Q+u!orlDyF%eO%xT8y>M~7z$o7V~%j2`}w_y$u===bEUDjPn zA@&d$lv%w9-{kGlHM01Y^mLGxKh@c(|CBu8Wk_>#`2Eey?{{DHCZlpeW*sL=y32$m zsr+;ey#bn`v6{tgICpz7=3Md}R9H_X{93O}2nD3(!r(=pm#7yYeP#+H5kA5=s#{$gvFy47R0qHv;$)@Kv6oKO}DbIRC=wvWDj0+aGQ! zzxCu$UQi{KNmvDr$Cg6&>y@9(S-DOeZrj~_vczYTK{T{lWzazzgQJ@BFW&!2E|0dW zx3Px7X^8jAJmKlUA$PT3EI^xv@mCqz$`N%IDUxJkQA+k^MiY?ze7k6=*`enHC#$0L z*%UGX6tivTf1S_CQ}RH=fNp*H)JOVvHgIX95ktDjV2CybM;X57F_=&3p?GvBC}p}U z$&m!4HuI32r7_Bx`a((aXP3hXfaQ5|wl%A=2uBA~)}tNQ$08kdJP$DDXbaub$>5fh zL2JD@CgmNvl4}{ZvM zcTf*!@t5gaQ%y-5)V=V!gkV!7^wr^U?W9(10N@%OD+pZ|aHKqEIJ5x3?6{DhGj?1G z_qERshLK~}dO6NGV7)t(MA1*nK>qob-&vUw;*qH4Mf1fA-Eg4e`ngwf1^U0H5H?i47GqGQ~M91__a2;C+<3pt^I()E{s0wsjL(hgnxTIo;F%R76{ z4BwN0Y9RqNzVTb52jqUGn8UJ9))PINEEP$9{c@APHDfDNk<`~_iU5PdIOQ2_FhtbB z)H4g|d)?*Ul>z-w9flrMYz}kr>21Gwq?52*BL;u?jSxaG3_zMqCcghGTe>mTNl!)e z@uOPc(O%pT3F_gyb9r;%taucdaM@lP8pY^nLpdwu}cuaBw5f^hoJxEpR#J!>M z>-@NHwFzu&0XnLLrr;vO$$GbdkFW5S87YD~0N8B1_=VuwY(D}-g*+fgFO&>&a^6~< z{Jr`apMb0qA)M0`N=j}Bg}E7TA%>xybV?e6evWtqlK|h6ZA-l}1?jzPmQh=nO!IuI z=K5jXU)4b7!2SFA#p=|vwwnR!R(H+TL}E9iNvP?@mu%?aHIv>F6L5GGtTHPJD|4X= zg#ahZ_)L|oYvn-I>1&b=GlJ+V>n3&6Dj~RQ$~K$oz)l7QW!Z2mC#gr^2`NHUx8KvBE7u59P0FuG%;k*uPLZw)S$p*C*<&9J|MgkBb1ch2Er$J4P7Sl z`KwLyCFHZw#V0xn2~a zTJ+g6xAVZ{Z$CfY41n_oro-mcf*I|hIpb;xqMCM|Bfm(cwdx5@&W1Tcd4}XDpuD84 zy>EkkBF_Olj(Q;9k|EUf%yzhXX{$bXtz?6LwxwFC(JqAE>QxynngJ1@4>hN?h`_9! z@k>g`Md$2r`d_q}HEEc#GXY&Lq9V)ch!lJ+w4Dz+3(_)rUQq;g=X z_uq#%NWxZlb}Wr2v}tUe&qKvkXsZ?T&^teKbVT`PzY&9=6vwOy=^@@IuOw=AdFa9a zk+Rkb}G_eH3{m||JQmS>^~>Rmisl`Q6IEU&Z8Tb<%l)V7ILeZ%a(knUvrUSQm^ zRIcs>ZHRGYtNnWfP*=i3S9ei4%Z2ut)Lw9P73~fdS>?s00An6Da%|ehA?H z=dhG*odM)D9@A-73z@h5qyOe5a<+ESk|Q{Z^YG2Frgi@Cq+ikf_gwv}+s!vS?N$2} z5#m;QQE^m59H}UAA9(SfFEG;~F4e~Q9-1Y=kcyC^Xrob9h97*@@XIlL2Ip+D5NKUD z8c{OqT+9>C9C!KO#C`saA9-540^yxr(#EG5ED!NIU>RE>uYFPGoDw)c3*ha%Vh0D%y!E0~}JTM0fWouP< z@A=pr-<<&i4yST{!^dEBIy}wT6t;Ob?H{g_wYeEPZYAIK0nHT`$DQjx0B_b8$F}Ob zw~rUAxp@_C71if6>iEM0cML4lY3;}N4St(dP!zrO-Zf|NVRfJqtwjJLA!8)i`TXg1 z23l0NNfl7(!~6)apwM{E9(n%hB+QSB6Qm(%o}uiTZZBuC z3K&L%1Ry5&ZnX}OGcQ`9`@q)4AnWr#B-s*8?Li4vNR z$+nqG7iO8_OGK17jCr~D$IQ+0q#aM^K8XlDG^H0j6yFcgFzF7gB$U;r0@{YbAT z;EYt=!|D;$WsjE{Kb6!_C7@QCG~8xBJODmh8kVrS-y@#b=Mb05@*HaumwNlBHxS5g zPn{J4^hm|H3-tG?48|`Crs#+4;aji8_rXwm0^02d{eNB}Cbxe{t-qIt?e`*K{?{dP zakj8_qSw>2`0exQ>HT(fRO0%e`WX;}Z{8x8l%VI~Z!cvK=yNt!rJWgVEZGo6KHacU zo18FK*N$k7f_^13ev)3wl=VqaD`-N-JJ1g7uLu?gA38H9!K}8DWy=g84fhvC)L&;bq5Tzk!VQLd1=4K&T1nf^U&Oxa(PE?i7W&tCQmzuH9?+0|MR$jOPoHZ zzyJW|zkMwH|Kqp@&d!cbdcXdTcE7=?X8&n#swl)2{!*ivn%enP;Q|ly9o6-6Z2~Q2 zAVdQ-H_a*<`n18wzEQTh~uP;$xqr{NXN`W0EFCI-9|w+76>6ZBi1@cGxW zU}4y@GQoe83S*Wgm=b9E zvqnfqDch{l729*XFG;;Kffb?i2!IKsctiUv9zNMWjg z^As2-!n(KV;-b2oE`b~7J#oD|N%(x4VXR6}k^q0UIZOOS0H;3>`NK^iCZtiYS zV9sT*vZwvH1SYiLZ47oJHw&m{acHpiwdCjW`x6%y{|f-H zUpsW;9_`f!w|HprIU|y8xH7st_%r#w9k5)%En8b$@2b1(i<41Zn~xwXBlI{*wLc^gAMcd zMY&=R;^o|Hd1m$XG3Mxi0+j;x$47iw6)DUyO}%6uAz>o*tz4lGcto}Ci(9p6(T!oz z-6m`Cm*t+{{I|S0=^#9muRb+(RKi{wG-6NZSV;2UoLe%}m6~&OvQnAnV@2&)QBA^? z=U2-QaW6%h(K(%E(@1ExX_X7T)~Pxv*Zi&YBia$NXwKJ9Ka#6kD|~|Wz%W=N=i;JP z4wo58hCBHpSx4MvH3N1om*tUh#;4M}X#{p4wh1RwN3wx*gO5ycK#_aiUA}z?vplNq83Bws6L(=uWzspu z);v6u)m=%!R2*-uw()#4czpOZn1=je$^M6r=S>44;t&`QYZ)v`Hsv6GbCe1ma}d3&+|J?PNgH^u7x$DlBGJ`B(|b=mQm{(pkRYMkEx!bHzs zOvL*?K*Gq{z{$zp(azq)(b?laz!=X1H$VUr;wHO?NRkY!Jigd<4l6GyEtM&Vl111; z5BHXj7k@eP?6nG~BWZv1!aIaWu4(MuIhWWDF*;WWY~+1w94*bk0ec<6eY9&hz-q2W zQr?z@XXH^83QyhCf>FD^t5X&v*LzBW1KPwt5q>Fy>nxa2Vvl9++NUHbK^m;joU-8X zQJ~=6-Pg0d?q2rD%v|HQu;7 zY>ZJwXCh*eG}jSlZ>E@f79Z3#7RotQCaJVsl%v9$rf+c19_8lY#x*7{ z>W%@d6t|IVxL&!xps@3tOvvjyk|JO`OwRHQLEZ(17Z1dUDK`T#*o$Zl0$LvP#ZXTv z&RhD|)~62XJ`Jc9J6BWXCb1fDD%$5VQcBAkr{gDMFRu;s-cCQ~L6(`Yd(-$cb~$Fb@3HiW8Ylc^)@Zs|=nOMN zE8PS-jK?Fne|6lon(x%@aGYp&=!62}Cay)ZOW&&?H|o>Dc;~-d(En54dU6~co!|N* z{F250w$%FH^)+!fGO>5I_~nYf^&L{0knQJ3*?OU_c2Pn)1&?oK2o)|Q1e>)fyRu22 z_27UH!T8zQprj3_fJSN?BYEI>`9sE>%7D>p{x`o~8cL`liHf4}5MgAxeta-$vP)la ztX>#4n!1d3b9;6>Xa_2gphD3|pJpgYzXtBc;Wr%Jg4V?j13(2MG)}X-4#!D-A1e)z zkd`B^WB?b5+ogQDcN19{nVjaJb%afeKs3Cb}7mVAEPvl}bT2vDLQq(K4+vof+fjGX9*1H{M$KO+k3T6a#gh|X5BHIS&X zs`sO1R3LN{0ken=4aXK zitOPm;m^2?;z)mv@h4~1=J1=V=i)n>r|iwu1J~`e@7)&A%vr|wHm(|~Bg)ndcdZ-C zL_m&T>=Wq!L>j8_?re=;+V!OQf2)eAoukb!CHdb}5kE;h`* zLSs^SDC!sw*H2Rc@F4?wJ)x9tg@$!2DM(gX6|LilN}lzE5Flfm25dc3Gc%L46vs3H zmP?neQ%oDQ0-f>)b7w;&Jy1G^b)c|?WRC{tQwvCd5%PlBLvf$J9iw+q2yr{Kz1Ei| zu#@PZguZ3i4eJ-5h=&P)kz%u0F49M^NmZqEPjJBwl;Vf2>E&84NbsPMZ&K1YvJ}zS z2y8(#+y4uSR8zdmoG7Do678>6zgdl0kvDR?BsfWywD7<=pklEx785w0A+-p^{qnSk zWcb=#!441|8f0#w5L!_`y_*?;JcFo~G(BxJG3;GTF-1&m84t7zHPsxvcgAU(smoUC zNi6_;0O3mbud4txusCbL56Z~+m=74nMuW!0J<@x)@nQ~vcz{40q;>1;Gq@O0K_rm> zPlv)~g^vzuOheqEWtLAP-Vu+C?U>pujv2nj{u2}~oWbn!r|_p1X!(*2b!`U{!j9yv z;L=I`02}2## zo_z=4OK4aQKhXZv3SWSig_VN_G)FBeFM)y`wnOGsJX0F9qIRKzNdYLky5w{3ErW=a zU|CVw>OM1>kk%^MIw(gd(R39!fEu*mo06*Sk(+x~XdhO{TCV|wSTlg1Ofw_}>6J79 zStUdLzxDkjl!Y>m29DhDjrO+C6L*Xx_SW-u8E%j7PbBXbSHO~mcg``buO4%)yywq~ z2fxp8rr~$i%-raz-uY(ZD$=bR-Nns9S<`nPuwjF#fL%c9uPn+&rxl1V_xuKezDVUqex;^c?H9uB!c94naa=cuB8f3>hiZ-LklwS7F z7wZ`EA{er0sLb7%Mndz&*6>8CJ^4$!I+5%e;z(+-Q0go=vq!BS9fHNjYHASZME-I# zRvT2~JQM8maA(f;1c06qGKYtb;*6@)uJne*dCBY}z%+%k^vqypq~CzhiLW;E0`2 zZg;)!(+?>j-H*A6Jrbk&tu0rY)ppV5!UM0sj_kX((NM`}xOV6Y6<2Rq#{{!2n zggGrGUu7u~!OSIMBTe5E9b+Ie5ah6nd{c11F;$OH_U6c^x;)32{s^87mLfF&ZzM{G zd;?Kaoffz{!APz&CQH7>pI1rZdAGJRSDh3U&=vA8- zI+r^Yy11rpqj|V3Rfu=5glYdc6DQ_%UsSGZEEO{eMcAt+1y>f(THbn7(~&6vNS#dC z0D{#~ag{Un{}IBgSd(eK)HmH%!N`S>z=Q=IWa*lyE?TvB{%qO&2mYac7sZlR;n66r@uiD@-*z)Rp4 zYB4oYd)4j~pC$tEK=e?-8_pJdS8yp#Jm$m^vTuAxj(vECSw)=5#m{dZO<6e@9A-Lo zfC~Pj7#PAj0T%2@42virMRZ2QnQ|@McVL#%XF6oaNuGv@@Xmr>Qe*Ln%(K3ylAHiwnDrV2A)qRiEka{2CbpYIhbfg}P#8&wtB5oM)p zdcVO;xl@&+mE=5;Bxaaa#17u}u)Z9dbAY~tJ$*b`!ZrTV9NDo-v@Wj@;)P;aiRN`R z5eSKDpgW^rT?qsc;z)@w-$bWZ`fp#f0|vnSY*~T3p zp9O-Gh}hH!FN@tI7GPhrNi(5d5tBc`?s|{L&m^~se$}V9^qU!SLxu;6*F!SO0eHN| zw`qyz7c%;a_8Ugic?~0xCaQ{Rpi#=i#VShPdaSm)j*=FXk5`eVqWT-f@@hvp!^!qI zo9ZfoJWWFp2|{Hh@}Cs`QXZ8?sLLyMWB|aoIm*6ZEacoOf>H#%tPH3ANwnV#ES_-P z2>QnS#Pv*_aP2BL>xnJ04-+=RsCsv~A@tRQdK>bW&gO^RUfG|gs}4WEOyAE3xt*e$ zo{zIHaQ;s42IZzcND#JTDSHk=hgDvud0W8WkfU4hb16l(Qf!sYvcqXIW(ESbgOah9 zo9!uuyGylfb#zp=OhwByMT8w{Qy+gDng%c@vtWtUKikGSqFbB7?Bh!ku$LG0M#K-a zG?jv+yF!oy5G>sC*_GDDDL1bWVH8GnO4+lpC%%j7C$&L%_Jt{$9@uNI3L)oqoM(|- zXP!Fxw>IV3QrD-JS~^Y+GR_+^lmkbX*0QpG5`esJp^)RMcP{S*6d-4st{S`UW}miM zI6P1FR?+zm41!2OSNDZd!T%3m?-(Xow`>cSy4Yn~UADSx+qP}Hx?EYdZQHhO+cvsz z>pkcD?mc_&``mn<{E{vQ|WNi z*eZQ6qybjy)#Ur^#aK4#i%AW1MPCQwT-McObx0@E*G*e-Y?HjA(PfnOX^*~^-lDF(sDeKO8AQ$An~7!uyW3=2Trt}!l{sW2R0NVj@&pZE zfBr8T${B->e*229odZH{xL|c&10hN?_6AK0_Fnqd<(x1Gj3|BArYWQo>@UBbp8L~Q zczwLHL57VgxM)56cmP`Qo$qOinTIQ-*FzYGzFc$<@h;z3Dk(xgyHs2JGC1FA@^j_Y1ZXVI}4 zO`E(`Q(lYVpUc`5Ew(gnJcY`O2tzyNb-cp>O}r!dGCES}$IJrt7<3vj*I}UEjmOKC zcgF`~KT?-C2V&(jGG8!$M^1NCtrT~39QNEqor}qnUH>ak;7TcHCZKXfOiXr=35DYR z##?>KRPqZ1aPBLTVr7DXEY~w2d6vf`eGw z)Nd{2h539lDPC=*jz2i?_cN3$*q`v8u%0W(GD^DvGPrCAxMKp^nv7z^=xJeG2hQ3H zvA)n)D%tGhkArxY0^@BqcTSx&?fxF7gs^ONOQiYCC$XL)YH@~~#gQJqIW%vGrC)vv zlO~vq42yD@xX_peB%Zvb-9JZ|^>k)VDy~x_VQ~B zsb&9Y7*;r3sO$^VTyXtXcc!QfE+G?6+2)Lvc?69(t)ysN|Iq@U2Q**`eeNYB=q+aZ zH4F1x*J!G@Ri8Vw^qS@qg)+nX+Meixh{VqOw!Rp22(-=GSv#7 z;=g>7`o&(4-f4O9ymAc@5^>7*Z>chN8r0aTY+6q7@nKu!l)L`k z7DxLQQKt}zzHk-7Fw25IcTqz}oo^wSSXq4`*qQ}Y51HWkSuH18cmdse9qDH@Q8z2a zXf4#3)+8@{wabOlvRQ-8u7fJp0M9mNS{yHvfwN9p-;9yq^xcrLl$0T-d0OGtj^@7U z1n@&zG$N2e(a}3-C#NOrvCQR{)O+RXv4mtq3{*>>BSnD#Wt?$`Nw{>Jr` zN?he&x574ncgXCW%cUsV&a`+h1g>B?z(|$6jzpJ&5daMvqk#Y(m=Kljxuy27-@YXs zDj#qPxl+2;kxJL)Zsjrq#nO-Wr1lP+ACv4|wW@})Y?01Bqr^1_*#zGn=MQ$ss2_iT z+jW{t^f!UK`kIw}*y3#6+}}=t47cCwG%!UBXv-*WYOz}0z)86WW12+7U3miL;P8@ zzz6JKb{0zvm`EMfVxC%etbz{vWIb*w2D58S^X(dXhH^ioaxsKHPTDlJ*?!x$lDP5a zk4QWTXL;`N#a?)0=^YLxn5_}GaDi4?ag5Mo7?=$!I)0X$2D9=3Iu!h#(_i1QOwhxv zU3sI$_Gf=02?E$3c2VS+LU6R3;n@zihsmA=2#RprO%Fy+`%1#ZHLFbIy~F!|Y$Yw% zaZu995LI;XQZpBw?ldCRjryzR5Zj;X?&$>KRO3;_meaj)Go1&7FeHND4k=kAMU`wU zkG6n-m9h=hgoHtQAx5X8-$1IDN%Xor%&iNhR(p{6O+yN`bhCruB?C!Mo-Q?eNzW`S zG1E%hYu>)~1l9$P zX3|}o6gJyrl4RuUbpBlPU0S?6_#g&r-zn&O%90E*w|FO2v-sL)JP(J9giW-u>IkE1 zBF|WT#@aGQUvV0G=k9{9?5nhZO|5(_Zs^c^g{ydk&u7o3%HkYobA7N^P@Gtsq1NhD z*IoWf-m`U+d) z0Qb+j97$*EN?WGDRfE})bdUu0Xf|tI|A`w9ZEXr6(p=VLYGw;MNs~q9&TwzNlOM>p zNIwzm?RdD^)678CTo=XE+*3hU!t3h;w&4HLLT**pcoN=AmO3ucB|yFH%%3v6pc9jx z^YFT&v%pN`168x6*n$c5Vb<;xpEVOFCqHJfW6Sgose!(Ec3pZd7lDHu02v%Xy=#An=mZ;6CD+)mo0uxPp5x!={B- zwtokiLayLH>arOC4{jKs`&-iKLYuNvXd0Td@WUNLtYwB9RE9a4G-wMQsM}LBRn%UUn*Jqm_*31qLYqJ9bFs76((~%1m}P z)j`&A+W2@Y@7JDFo}?g|n={4v`K18q-+-Op>77V8d=E5h$xMfx&mDITZ%CviKZSDoKd16glzq_g)-M(8Y$(`Dh$~8K0@%KR^ zcaYF}p9;}st<`@tTEV{(mwODGyrb68)pWEtIL`#$`un!je||Iq&VeTl5;B@y^k7yA z23?U5oDjx*JwH5rW&l4$SA<~CeP^jUQs>nWXSw1@&y!J9rBZqvVo+Q20}ufgc70Cm z(adW#*Q6m1m~kvOf|}N!QZ*f2Gz0prffgtpPWUL={c6x$vTm$&6Q_tS~%U_Y?n=QQLn!! z^2)R#UCeD_$?oKs9Q)DPXOVh`WcALq`(ZQjv4_oy<#+%9M*j=~voNWRdPswQ*@gHq zyZ*|80fY=P^S22HaXw_p{TYr;A!jklW~GMxqfva z2jks5baHEG0M~Qv@FnQ@sjwi$Ehb|F$8$hOWi-=o zp)$ncO4SO?p!%bpVGVAc~O3Om%Y1E zp&PORa|1}@>G%sRJstb^4=QtcMoN5JJouB+E$4!45?3ywt(wK%g24}m_ z!@!;c)yBO&2HG`NZ+0+3i^hj%#|+2jZ9fF_5nj@svMoc7RKfa$OzQQW5Bx-1)PZoU z?CHmzF8R>snmOVK)C|JdPSqLV#+P*yaST4$l`aI`x;PCDWbs3WU+E_MF|LT{Pl-(v zr^B!4Y1iz_TEc0-H1+N+hD4RkPCr$QOFI<2v83gUaSgTfru~>qsNciq8IvXIk{iWg z%H+*+7KevHwRCGu2``7;c=0_6`oYu!mf-_ z)WKN=>4u)#tb?KUe7noeYz-m9oVfgI%gfL2Rh6x4E6aN}y;B#jhg<$f5}2*VE;>>5KKPv6v18MnLw8Sr0aOX|m8gxRGG3)~Q&r ze|qby_p6V(PdV-gb760}NRQ&3!ux;>|2l#`Fz>;%e;6p2Zr;gRDw}BXiaYR9KjeyP zb9t1N8><0^poESIdo>{jOy_W&tvPA|(85P;We%TFps5AQLSMQ9%2~w*>KIMG77=4H z5_=#!DC+%R{F8fD5hNn_V6Ny1!8l4Pk@A!*Rjo3`iQrdy7|Hap%ul&M!56|rv8mD< zyte(RDPi`qwlogIu49#*R9W9Y{a~V9S>15W%S#W$9BUI9e5462%SmNtHx$Cb0SU_E zH@9IGdSq=s;1wYUozfQx)_UWtZPW*ngh>y*I&4k!5JFEfu7?rpHgiNfVKM1`L1Z1I zO(tC4uwSKzCItjal;>|M6b1QR=8?bFBN<;1k+x9{d}LPC-|dJ$K`v)UKlku(V4VMP z-MKOXapAxFtLYN62NTlWLkMkvZ~}zCvzwz7FF$>ty-NXiP%kjz;^jNz&=04r|3T1x ziDqyLDnHJaYX^Mi;gmXS5O-P z`KMvQ(r6J4uz4%;J7z}*V6We8Y-ntGO?NQROK~MZ9J*^iXlh~qSC3wEEkC&_vh6Vi zG9!umQFa>K)zPKo%x%?fhs%>E;HXApI>f@5n8>B`_m8QO`3 zUcYJ^z@ ziH5D(<(#Z#&J4~06bXTjCjl&TDEz$PG&gdTEk$5pMWHGT;%ohik1$&ASb-TgUAMzj zXwHQ%<@*FQ1B(4$XJ8XQf3C@Qg*YT_@94!Y5_DXkDac~z6rw=lA!<((J+TJ2?5JiNXu1!;BJKC|mog>6J) zzKbMmgc%Ye;Nl@``u*CP8tE|Ods@lIp%zp!Kui~Cz7bxAnMivd!ldpfM+z(1q1tqn z^o&Do-1qxWZn85@PS)Woua%<+bUy{=mHzyVTBhl9>sNm9+(gQj z#yZ~BsX5Uk@B8)kw&KSkwPq^wlZ}UgC$N|B$)4QDambMQ&qW0FuEo!TZ`Y(0cR~N1 zscK;)*Tas0if8OCn-7Kv?-PgRqLJH`V$>=Hmw;04%q2Xq67DpBeAN)2Z=pnpXRaDT z+bBL-`4xhxhT?$7Z+oJfOd*~ZjzdRb;;d_qz^VFZXO!*>e-;9S2anhA%X?<2d3xu! zyx^`PqyFF0>W-!u5g%{e($XqLA$*vpEddQ7vu;32TI*hbk57i#g2L6@nI`QKug&Se znJS!?xwP?e8|RKMO4qdC+*F0Fu=JyD@pAP?T`2hU`wx2YO^LxL`E#-NF zN{ek}oZ&3MDYDXKc6W8oO(xhDDH6&I$CN7X;%5(Z%oRb`F*_p#5C{E#$G9fiF;Kn4Z2;!agIm8L{7}vpxBb;x;{G6G=`v}1)B71XOeme3Rh|2@NJX|1`{ce0wfiH>cJutT zt8!YA2-f-Vjeg44`H2=swBds`WH&POL1oq43b`sLGk zae!o0G&s!sfw#K7Vt-P%!OWF`{{Fb=k4$XnSaBb-42=hAw8S;;4w})-(vWNnquiTe z-h!_BC7=4SUcAk#&{P9`vz8G_{JR$r7|L6*vK|64e*{3v25K47Uj3apAlWhvEX*2D zp=m5!JCcMg&I#|_;Bn5AE4?v85tlu_|!}uli_WxH@qRyADzJxRmbC=|Py#!{| z!AA?pCP+8~t^7NV+yU;_CvA8S#m`l|D)(0OJwAvmRkM)@I-|}HAZTp}l7fwI1oql{`kG7u8#>8ID zz6+`kiuIS7+`YIT9EPL6_MFeUil1o7xfMkhMqH6979FdWmKDM+z=^#6aYom@r{th% zN{)@zmkd2x5;B$EnRQk665yXt0 z5uMB1;JaX%1t0jH=-_%wGJTf8x~@)*Q`3oU(J@PujOv!gv<@ zqX-_QB<^mRW)Y$P0{$Dd$YtF}{wC@bo^BeVr`yZLCnc+88K-bFF)YRDFJB)v_xZ!ve(PWE{#G*rSi9&c_ zEEMrz&->f!h$J`>Aq|26E1L+YQ~vj#($$6unoW5V0nSVyH6dd3v%o?3O7qmZ1MP9t z4+7PcHB6P)a%s*}&-lnQS;T6kQ9AqKOF6@O{p~YRFxKBNiMn6*3}E;}4*g~Sj^E~w zN_rIjdQG?GQs zn|YxKUcXc(Fke9%zq)?UEJ&+)fcCw#K6_uAMPBuIKgP)rwCi9)V22d(P)^iU$Tv3D z5dO(FTdK2{3WU%#MmPiJX*$28Y@)b`!EuXAb1-WJ7GGLi&|}DtWF;k183pHGI}PeQ zxW=(S`V$gxEqr-nH{Nl0^FG#_^GFGq(WnX%<)$3kTrcV5Vj38a6O%K$1}V;eFF%4X z6YNAxUK2DLp{`^NC!wKw+9XjFPNGS0DG--!t_Ce*4TVSbBPUSXV@r25MKMya_}Mp+ z$Z=F3gF;>`B-5DH#&P7=PK@r?c7s~b063g6l(@Mi8tfwH`IGkci{wg?b|~_*E>R@?9!{jhL0@55 ztnGjGu$MrW*^uvfh%4Xo5V8Kl!@hk>_rK7E-RI4>$V( zDFH8>XvR(uNW_}uHS-IuNw87i-E~MeOtCFNYbnc}$@a+IrA>cNAU5lJQ1Ea0=GG$` zTy=N>k;WZR`4Fhc0sXCr5Zu-@rD4%EQF*0DB?*Vo^z!veVg@lCD@qr0%rg_So%c(_3it8$Q%O%EEwbf0;8Gw9)I-VuX2;$59b`ynH3N0kQiYyh3eumh0NvUu zq}P1|&9A5dOZ%->4Ij}zZyda3Uu>Yzrd+QI5@g!icOLzib*GaAYD_fuz3N#1Nv}8 zT@Yu-ur!N3W;b$}lJy+KZl!x!E%_j{cUeDV_D7E$t~obRovt3)uNN*+v5019^{PZ5 zj+cg1(08g0Qe9&mJi?zmXFP-n40##^UA%cMt?h_VfdU1ZF!0MlN!&fJ;Ac-M#SU4Q ze4qaYdDgvgOYHfMC9D5e$g`8Xo$>$iUxjtRIz58-vg%Lwx*nE?J_Qi%lcb;#WW| zlP}pu7t#^{FOZmSe_=)lQabp19F-BKp)^@WfV6D^RbP(Rs9%?I=O5P^xfAr+?rksF zuKVAwtmlu%Kvs=cNDiLQ#`hQm?(|)~2NIHQaB$28= z_QUE?-v~uE4MRU|>u>KUHLG=v&(B~@@q;T59`p$#E{$NIHO;fdOw=!oyMH1%y4h!D zW#LtZ9-?s2)r3_bX%%CQ^6$U_M#4G}ONr*^a@-x@)^rpa+=oIFrzAqAC43uFL`ucp zTFpP*jM88lyK&anO%FRSeK~h6#qkozbjjz)jqG>;(VER9KdJWj16B0l7XkfVer0}K$Fx%NO32@>;Ro0zYy)Lp@Z%S|gw0_-rR_oLC&TqRz(AqHY} zPNs(n+%}me8Q8TYuXV=5(QZ!jmnIk~=&cQUr{8g+Ore73T*~-wmCRvJsXE-F9QhB1 z1zuQE)3|!d7vUGfQSE-$Q-(uN{Ml4b*v+yfe1o4m#0peT#o#pP;XOYThMD%0P8n!3 zTNpByWca&wT=4Z1t#2Arsde0n+H4?*EqxS5gW+#F)TjeD7;KD)m2CC#ODbAiNYs3l z1OVVlIQ)X{SitmMbgD4myu_QmX1|Y(;o;FY_@Q4)ZJ+M6p0?Idk;|MRH(ekIRVx&i zCS$uVRWC+d0`&hv<0Y8r+rsXiS+^Ql3_nT1!e7PZ++K{EN*DSj6hn4e~0H1EmoQ0aTQxa7JWqSXEW{ zTOYaAz( zjx#naiFw5cywCoRFILZWi+e-Dme~Nc_-I+?Sx!PC?sc<&{X8I(K+0)j#ym!(h5HQ- z0Ph+TvBN?@N}(e*cy;xp8@3%T4!*yJLM9eyNNb4Ko3oDY7~_ZlpXv4Fd)xPy49=8p zNyIXjf%$Ggb2)RA5kXl;4Ox-^Y|YlE(Z!r>c~4lm)!*X=UBHXkg#J+3;u=6F3wq;M zAxPI)kKi4tr#ULZ3~Iy{Y_D5ui?WrYDz)0?%b@7B zs}d*X&|AQxif{}5Dzak3ZnQ^@OyeZIz`ZL$b@G+DU%pT;RBo{7xy$ zZ#nY5TA&TRu(Ds$9zJROA>6D=L4Cjz@{3eBgr4x;X{il6unJrAXn1@QJ(xKz}Mp5Sxyx1QE7`Z&GQ z)6Z_`QjD8&&#@=OFy~y1OWYYY3G`82`wLig<@ctC3)W7vxhbghzDu_B)J29;yEVsP zm1NvP-X_^==lf}mp$me=D6(KW8~Q@6Q4=ghP7F^!05k^)l3&(r@5e9=57%K?r~hgY z`g35cUkANn#Ln?y%w<}n+KWHHKr^$en^P zF<#+f1@I)VA22yi64u#Gu@nN*vSe#775ufm(I2qV`}b}Rn@?v$VHE=2VOGFktjf|O z1pp_qHl&L-UM`&0Tf;L+2Q9uMzu4j&xJ&4n1v|uS-h*ok(fG$z+Z@2+dp~_-)48_L z{W1(v)K4|)h!8|Ryw=AulQ>v*bKtgYX23G!C<1C4=6k{+NNTkU&dmfodU7e(#wVl! z|Hp445s*4KB(D0}JA4UF5- zJM6pyD=slz1CMWs@qj22CG5ghHFcIa$%I&a`@$gFta|Uv$3c5n;CHCGi5p!HYyDxu zI9VP6JEr?^TKe7kZDTsGF{f}jZivC+teR*eKY@J4Ak;juL6fb{-an3qSR!F_s+|Bs zd;0v-+3DOoK;;8fZ!9Ho-pSt1lcV)kW#?SlgOb0l{V?sEMmX*a(Grt5bdM<#RAVQ`yg^d)?t;Z%2mxmvJxE;P)trf~50qd6YLNIZXy=+$=xzX_?jpWcb%$0K~* z3OxS9$3PM?g|bA(#w93h@clFiGM&`Neyh~BIKV};u(stE(_iOwWDkgh3m-4$G+bzn z2N>F1ll;XE#vQl;`(awbU3ri}00AL>|1T$FN2h;bD<^&z92wsQ$2}Fbj7XlC zT||RIIi)hZvQlHKFrtIrN}HecDyas_VYjQ5-Yev~RSU-SaQgB0NO~1zco4MU z3GBRdza7>P{nub@$smc8rHbNuD$+cwR<;6pA-BefT{p0>VL?8SNZ`Ahmf%U+ev@Rq z`tX{>1u%=g9}If8CE~E1xwCF)&+It(^zj*)S5yTxT26Mq84ZD|jYb>_%5Hv~smk)@ zE)ZPoKqU*0JpiAHiY~-zC1W6tTVd8o9R3;IiR{g6d#i z0BN0SVK0>hmsX`7Buh=+7N!phFy%l&oA3bdBWNXxSADEOV4J7x#;Hu8GWHhSak1b{ zQVGm@%uDLkCrrYt(HG#YqCx33{-Mr|ehJ$ATsPwZiupfv691vvefuNOw{QN}>z@tt z-_B{EZ)j<3WAr~M^`QUe)jn;wfBxa&GkmZ9N3`c(7yk1}*T&e%(8%O_vnj`H+Wf;h zy>^ElP$ps_FMA@E9@B12E-4H3udhYoTS-l8#%GppKL8lw`wmCp8j1~oGPZY&MCpZ9 z&dC~Pv=qG5!J~tqmxJm( zoWuwqV6OQ8QX5!uPbpcCHK#ZXU2 z4D(8!AkE#2K}eB?6dF&xYaA|fvHsL)uAA;cXW^2r6dDuV;I97bkVPrAQS(#gV|zDW zX-c_yj{A-rng!yWk`-7Yh*`8ms0&)+`0~(=EiBDdJJm;}{1PunrT*UpPXDpc|8J;o zouvQoet$DvjsIU(YJxM%)cbBfJ^cg%LjTuQ|79Y5H}ij@;iihF_R0zE+#&T&0OWl@Ll^`;-KLd|4e@T)WN?x^T#}#^vJc-CYtp z!Enl{$P4sbXb?f}3UKK}ZGLs3SUeAPGA7{-KK7{sph@JJ)u{W6(e)Q_W^>&TutO*@ zo8S-ZN;iE}|2nx`3d1ATG21ox#1+0i2&^p$jmO3p)UteH!1I@(@%9x<$6JQT0!MF> z2#`+QmV)5#3F-37)q$ZiO=o%v8(2XdW-x`%Y07~HX2e>$pR30P0|RACX?xFT$VTIeMr@?u@BR8NxxUxec96*;H z{dXAjh5szsNYI_HAS5?oS%9%G{^H3Zvz};XQr#yj1#Z5{l|&y<#*MYvx;>q%4Lezy zAi&W|PUL~R`PW4B!Jn5Co_g)Fa9kDM?vVIDO9^aGcTp4Oyq{ zXFD?hnf4a@W9JH4rbZ#@=(Zj(jNKbP$ipTR{th~DXE6SEdKl_t$w!S@iSVbVE)Z?8988vaf7jzM5Ez#tW9tRTDoYx!4)2gNLxiy)z z0dv)b&JogBZ%uzO;c}xWjZqw2BGBWY z`I+_W=?Tso(3`iq;p@!2^*;f@a9A`M3926OAmaE!-Z77&;d2k<9Tr2ys@ zniX29Du)-+#s%p-HNE-Q|54J*QK%~!U=E4>fB^zxk^b+J{=Y-#e{JRe+LVU2whrH# z9{NuI2-IF`o!hRDAbwSK8;pC!f~eFi9zbIt$g#R;<}W|k9gl}#feXh|rPJfLX^%A_ zy!c*E@Qjc&7Klmk9LJpm7}%Sd9^a;~u0qrssSC_GI|yQu$=8jk$?yQO^JCvI-`;)B zn2u~ir+1Tm;5t?Ix)c6`1=+ z%$9zkpw4CXZCAZ?ZBv5hdIiIiq5ZXOEW4zPUMoOH?LYq`Foem2E$y-?rT?)iU_ zJ!;0~l?65MCslZfbkajR*fhu{&nkAAG!S@kVnqtHp;T@GjDZg~+DDhnM>-4PU9e4R+`1k$Glv)WX_;C z9sZJ}7}dMrlt@y?F^!h@kaW=N8wDZJZ80L;;FcEL)p3hqsGCul1@{|fwX|a$$IFWe zw#8-dio@Y8d@W|7k)*kCbiRm7mHga`b6vJ6BzZ*UaJ^YC{hhP_vVr)ukcPnP7&x&q zN_@gr`rFdi<9$kZ_VwlEWQ1fGeAegzHgzi&KJ(h-OMCKjpvctcDtk}Q5cxBnZWVFu zGyC1G5=Rc>^IAZ%@4C(zC$2e4A0>K%6I&$R1IOR}=oC6wrk@(|OH6)SRtrf4DqC0l zaRa1(JF+?YR?9l6pz49uRP@yf@2v>9-iXU5#9)e4H`p7C3i72jZ{q$w!#;YRE5xZ7 z2Earg0EX~c1)K5_#rIQNlI&L*i7fE3Pd$$R2j*_7! zd^vK|Cp%_625S{O931B8$8d4NdsHJcrY$w%;|`)(CdfJ3i8DqS-U2Q9e7%j$ni!I~ zI2Kt;O)yu0d!;6PGO2zoNRiosT+${@U@hdZK@B)G(uDF}=tp|Jju=Vi@;u6^>Nhp7 z9ssr3^0zr%t0e7NwOwzKDoY83vaEpYZN-D0AQSsg4^{PeZ5?{6g?bG_2P6MD1rlINVDDQyqynh(+`hVjerbvNr_CyQ`qt7nqM8vq z6@0d_WkMR!ZX-*L)DWY;1#$EKjG}q~`qz~c5V4+iNJKWXk2AF6hrRvPSJ~s6$J;uI zXbHEr_$|52Pd5*D``7bT_e3=EeC7fwXkBx-;E(h^$-QsAl;@c2V25MgnWn*lLOXU#t;Bhm_p0t{L zZ+5l%g{nin>=<$wKw47Byqp` zEVk4_TC&t2isKH_7-B-E->1Mjc|x&e%mpibp<7bcF)Jjg2R*!5wDzD|`lUP)3P} zDh=UEim2L7IC`}qxrGla*)8bh^?PEFl7#e}Et2-68UCN!-18{e&4Fc5lb%ur`E$0ZF0{j66Gr>ARk&f3Mwh?t_=MNcJTfRb23V_+OO(EKxX{LM`KWi5 zGyImuEFf$}f_NaQHZ;rsVC^_5pF3~H2^~HOsdiZ~2`X1LM<~GInKnW%m5!Ei*G@KV ze3d|-MYQGjxrlkQnW>SI4Kp4)*>?`UftY8~D*=aHro3Yd1CX5f$~pWziz|C~Kz1$AR^(eHu9B z1x}+S96d!ZFu71S$Ap=S_97E$G!>}!nKnHOUW+X>8I z7_hgQCwM@DeyjXexo++%nZCN^3qqKWGgZ_waMfD+S2hwFBY!E@aetP6?XYBmJofzY zAkL{nURo6}-ff=eK$}Z5{we-+|EWW1UVpV@mR=Vxz|9>&w}v;=e^jdF3XcxajjLsq zYfTT;C1Hnv53eAB-IO0ql`skY@jvu}|H+^6awlC`zomhQG5%YK=HK&YBYh|Re-IwUb!bmaq=U^C;sWl-8(eco9Aip@7btRI;n^fY>O^WIoe+i_` zLOVvW7z}cnIPkCBJBawl*gy8W1rBJ4#hE?kcu%P-te@y%ipvL* zzyTY=m^Hcy2leXoIqH5zEvLd*z1ASUNuiBn+-p+Xs<%l87pWl;+bd77?F=b{`oQl~ zHJ>9&(o4hS;IZqCCrE(4flR}Vs7_F14F=8fvhL%Q?J3zkyuJZ{*t36jUI#Js;*<&x zgq83GuxpQ!Okk#Fb7aa09eJQq;EeUdI;3%;7={@nxj+ru%Hefy(c!{>=FMQE;((#5 z!<6T$L4pi$nZ8hOH=KM>DTe!KkVLC<**<=y)!XhqJb9Xa%lncn6fu{Q@(513IHk zijW2xKTfA8bfzz(`7ToEEIZOf8q&KiboF2~-mQjIl&b)w?V+m?I6`40xjz8jwm+Q} z&7NfQd+oQ8W0qy}i637;sv@F-Z-nT%V54a`=BRt@H-0E1H=dpjK+u(HiInkm@3}$! z_Q7=z=r7!g?8rx}?Gd}QEhRLVXlNOp6LwU+)M6e?=tOJHL0I2bQ5?b;a}-2?s#S4U zA$4bJo}7g-3t?c$mYT9f(f^KSq?>K0AiQTm}=Pw{M)x6FroXm!3}7?K{gfhn>WuEr^9W}>>QifOQIL_LecL+KTYDZ0KQID%p6FdK)&BeDD76)H zp|TA_l%_WMo77y@mw$#;2f~~ibzxVBDEo@nDxTu@NJ0CH`T5_#c?Vb)#owV8=%2RZ zKhUZFTfB8N{s+W1K0z}=M=4KULM={1ySt~L0(t9tdkpeFTE}YNJ!kcqby&x5NJBK~ ze;?BP$M@M<>zmvB)BDi)-_g4vL?VDvO((4Z_XBX~?KqJOb1B2N)p z`0Fp6xOPdqgEs2|q?P(HCpunMbV#0{KUWr(3jo*c&T`YdXse;rK&NmvqP zcd(^o$FwVh4Un!#`poohcew4;ghcg}2`nX%3KCH;Q)B*nRH0V0+e1E3EWKi8n61j& zYF-@_)Hn)Hk2B~2xV%5@vbz}m{58@TayI;2xIc#c#oK^k7*TWeNRl=A)SvDs5n_B@ zsM2x!D>Qk43vDDu#I)$H5xRD+}=fG8Pg6nIH3&;FMLX=Y)!i>r!t$~{6L62PV zL-zTj)dm{5+wA^+^;O>jzz<+f(X$n#fLrw_4nmwqnoyvYK>ArLS(BL?{;)QDEn`y8 z$^=XokbBZ_ny{SpAtT}(k-s?WDX8~qDGVh9-P-!U8hZ<R-u%uFV^+&lm8D^{-sXVJg2ySl2Yy7sPTe=^d0C%QB+ zINJ_VM*E>BF@e|ZryewR;gO0NaaRlYa_o~-&6r~uH`6-!_W;sG8$l$?EEuSx^Z3G3 z@Xz6)3qM&TwTCI*iXbDC!Up59mO>-9O~r?m<6a|wzA8vWnZ|))Y9lYYO&9e54%P+L z){=SVKL$;s!4p`XX{eK2@5>|%$+m|cAf@>Mg0@0dL`rQ^PDxW(Kv^;=!IW$23n`(& z-y`uIk~^c5hr@}?&QI|QPq}@-*kv3qye#O3sVJ?eKn~jp>umI)DLz|4@(aLsuL~oQ z&1^T6`W{0eX@3gR0kJtYRN;X09;8X3XTQA3;7*C|iY+1`o+`tdq87=BS%Vu!!9Rt|#X!Wrgh&V+H-BIL0?xEA;5Ng$msx{qKfq=8sEQIK#&<9Yhe#)*|H^lR0YoRKk z=-|tu>GEQ#My{IQ(&|>WDmGcCj3}T$smUC#(Qc4K;tE!2ZHo?Uv-tU5i&jdhU&d!t ztelLUz}EjMqIeWR_fp8_xe>&S(Zl3pWd6vbh5t5&;iqM8YBUIw9ZHpsG99r)W}Jf& zfnihPhP?1(uy-fy7oSukVjHJ~Xg=5oD>U_l7+Fj#DfjH?$bogqo{B)pTMMWx;VmsI z85n}Y*Dp2tEFrWa7ktdv|IUW_5qE9I*{s~VHTUMMKqoS@+I(i~DYO$} zq|&|@2emL|#`5YFRqm7hK&bWg6P+#^gHMWi)J@tq8+-)pE^rlF9;jXXGRSteMX~;tmYYXcf(u7H@C1r^xpxFmvh=L2=_iUDJkn9c<@t{LS?W{%*>q@1HrRtrOJ3wUyK zvA(61Sp%#_gw^hyA%+LmrL9y}dIcZFkAwuIh0F+2p4L3SB+L<~@VomF+_XubCF7y^ zv$oiE?ou$50kH@TkY|c1H;&$$oa#Zm-u;N)$t{_?wp2nUh1xdU>lu?2AbM~i(77;Msfl*I}h4SY2!ZMSdJ72-!k$Srh_j7*ZdJ_6(_->tGn` zGBxLNVESvguf}FM?;w5$-zfs#G|)u+G~XZq0U2rn0lkcXuSY6V3lnQ&z(BRDeeCpw z75VwJGq6`qQO;1L-*sDhzV*X#VG5}QnZuXXX?G0G_tvs~gE0xzQOD1ZuE1|V#lMqK z$yV;4FQGQI0{uMi-6r7+%Ohs$aX~zTWn<~N88Q<&r+sHJz-O~Y>_E}aiiB!=AOQR@ zfJdiAo#Ap|R_9#Jz1`17eAm`Nxy5PFB(y}s_fMjVrF>wxaBV&E%L8lPoG1(e``zn?E?b7h**Njza@ud}m^i z=<5#{>uv`o7w|Yf_P{|Y%)i^g9kzW}StuZtRn9%}^v-t*F$cmvbz@UQsKxuUf^C8U$7ZVzIk7Em!a5h(3OyT_UI zh5+hNl*xL4uF;rj$l)EvfB&6eljZzH6zGAm45$PI zK<)dhBTR_!x3>?qy!2olq|@WKgTT98_(vp!{!270G4|EV=RXv#NZ4+zt+P6^8AEE8 zMYt(Ni^wr5m_i{A(J$X5RLdY%dIbtEi^=y(BjR%77L(22yq7Kt|1pLj4igSd3Fpih zF&|&TJ{{a(^tdVX$jMcCpd3W$GJeusZsF&4+7)H=Jq%6vl%3I!>q@&~J7OEAQ~?b{ zp7VgHR2)c&Ty7^Z3r@Nh2VUW*9n$RdQ(msHxf(5sq){+JY00ppu{cetL#=dF_;WNxIPQe^o<=!-=J*}9eX9D-^^tb~?Bqy)iUYt^(YN0lv4<$JX z-mP~9zFTD`kB{!U4baTj9v0bIP67$jVzC?1M>pf+2& z7Hbu0y+rhuV}ceJVKwDd%Bl%uPjbET_^8ecA?@Ja1_VK5Vc9wY+k>T|3jGTC$Or5A z6d8A@%Cstl`&1g`UMuZ9oMgOa2}p;DsLe0R6;$JKz(%oR%$)vIQ5i-&GI@F;SrtvY zvFxpD&Uq)H?L6l`UvRD5G9(`75F7|}VgxWv#ZZkWVbna~GV`@)N;Sx3_T#CjzVy%W zM7Rv&Jv0D9@n{b6n&2kOij^)$D6EiATjs(~1PCaF$kYSp%tVz_p-if2XYx^D$4uKk z3g!}yw5rGvzCp9JfL1>_R_p>d^uyW9nwl7q7}Lgj7H=|ISJ02HJyrKkvL%qT3kOI2+JWuDm!%p-iMsq=dsKWADA_J zXEE^4_`iMw#S4Q$pxY#j++H3YYKpF*hW6JU8iEjD1(HZ~jw@5=`#|GXk2NJAxgxlt zpQO0cnqqLB^XzN-!3vXFc0tfo>XIW>oI zsZq-_m2ns#)Nbb;Rm9y@_^bxJFsEhXA7#Z+hqMrs3iF&6oU98nk4Q!J6Om!T=Kh&B z%JZyVd9u?#LC}pz50Ow_j&cTETc#ui3JjGfSdSGEU66q*VG8I_nVlWY{`)tPXz(U; zf$lC|rMu!j9VdL1nHApB*r}O8iXDL;gdxWNPy7uY{I|d#n5!s6~B0y*Et!6l&fiG*}fn zkafpaRBz|5KS$bs_F`F*@BX~&0g^_?16=?8R>*h0K!NB! zP`s^h+(_03l3dKso|J4HPf&%(pQ|F!oO9S|D!IO90%xV`w~ss|lQd(oofit2@d0bP zz`cQ{o(>5RuihU-oEu~nc8*n_G{C4SQrqP-dV%&e%b#WM+b_`qqjrsc(Ho%-A;cXW)coN6Cgs61-%PBLXVJ@ryD>C@ww zsOBfvoRZP6F=oahH!2pOnG`zSD(asyXb z*Yzi0fA*K4fJ&p#hkMoB6LZg zuMc>L5-psw2*x=+a*}rf)m4Vu&Z;^q(F<9@2VkZ)7MS}u0?iP~2T(hD$S7rjr=~MY zWx{8EC6w{319>82b3wC>6FHvy-00|n&VA$5_ejTZh+I0Z& za8ce!NDa?~uS%BE(v+PBw_{b#z@2(DijY}nCEFWR+Pp~ z{{&2tdX765Q5dYJD)B8nU&I;}jeE+dLOXF!oh%vI4(rqB3nl#{!SJtjBRm4M;sYw; zCm99Dl{oJe`TEaCmpNNOKBt=h%pG31gR+BomUeX(!;%9tj{KrwxD8DDF0i4S{CgC| znKF}DZwe%jC2QIjv@zyFvrXME8Z8&-#`c`uDLcmOjIJKxkFb%Mav>uL;j9>39l$=f zFpZ_amgs|edxP8QE|6o0I-%M!GY7%Pc}ksHEy5q|^D()uD{LARO{+n?S7#HHb`Bgu zhDp*s9@GNCR^X=8n33jUI7{ldXvN0Bo3~+JD(}=ezz@UMpPTAQyvuGl{&uxGF=+E} z>!e3$#CvyK@jhNC6x1Pv%ubPiALK4EcgK4nQ*PqK41q?ou)Uy&w~`n&<3oZixf($g zSCji0@%AR7uSR#$mvAcArm7(BnZbrDoPoq}7glPZEeu8`@~PWcXE*OKD}yUyF5K|C zU~PE{Dd&YBEj;^_Ahcr!qu7`YmT-frCxxzwc#YBWz6kH258R1_DEF^iL{;RH+#2ZD z)Gb*U*L02y_qDa@l#<*zHMs2L@q?1o-;Y19+eYe^u~XxYbrL)|<2N>2CAHlxW73Ag zrL~HPL9WnE>e1q^df}pzOT=@H|^6*oLvwE{@_(l`NhH8b9A7 zqIj@g680u^T1!H6zhPbu96K6~{5tSR7Tj21cA#2(WSclbqDL@_FdW~|b0L$v?ApS) z*@<-Xkm1m&nv8Odj8fv0Ib~x6^C8=K#}~z(pGV}eiW|CntHw4E=wHFfT^Hkj(CmH+ zdUq6D&)IVfxfftj#w}aOQ8^<(ky?wNRxYLXcHDVR^2;@n%dKG^h`C5hyg%Uui2F360EY@grO_n;h(3$|Kk% zwsi*)TKMV=+c_KV6Y?(6hqa;T0mc74zQ^G=V@7IQC3KdKmi)IJ!b@7W23W04hz^2D zq@wPDQiFYIC5{S(vF9ItRB^#b!Q=KTqet;Fot@?%ubJOsLIyp#E5tlw!^)at7fY8A z7Efb7QA8w{ntsz$kWR`rE%d(CYqE71-;T~HE@|i2&4yxbx)kMyOv>&+-@26%$I(Q9g-lJlIf7gAQ9{;_Rv>Ump!Wax0RO$^PtkySLV_)F4or+S*V9 zskrm?D%U(;l48C?Fkx6$LL7;mhN!8GTQml%n}>vEIgv2Z7n_8qwyv(uEi{5IjiL}7 zR>|t(hpt;nJ<->%>rP(l=VmrW>Sq!8^4IO8sf$h%6ALXCA1lwDNN$b#%h6mCY^?2B zu5MONPP`}E#34F6WYC(_68%TNR+g^OD0yQ_gmU>fVI@~$rTzNS}s+`qt&<&lqL z505jh^?I}O_j~h8RB>dF0Qn5yeK{QvApB2Zr!Ig*U4YkSn~w~(z4WjmZW)2!jX|Qe zqy<23pqDveK1($V6HF6O)V2GV6bPj&Z}NZl>yT5>)_~pHbRtqpuDgu3oxK{AA8#hW zF%an3bNgn7_Kl6{p?O{MEd9O~r{7qg(?WCDIQm9p(Yk|!3oj?#vcA8n23#H)*ld8KvH+08 zE>*a$#^3d<9ZvY&XfV`%9-4=AN*--Uu9jM=hrV6bBXu((;z3ATrHypeV6NV-} zS%Pfq^l{7go%)2sb+`hYf`@#5gX`hecIJ*Y2~z@_LKj!CDT-3M)jpWPTyRI5D>)4} zqNF+8&q!A;j+$7!gi$ENqPZw(HDC6gO?sWij9L>b^DwMpznoqhx|K}p8clMK#gz7K zaxQFX9;&1kor32w^|Gf*f_DUJ?G7HvL$ z@Q%D%fwi(Q?F-X^mE(%lD=W3Sl}R`C(jF~I-bBYmlC=9ps`y?@_m;8%+t-E z4KU)^iSuF}T5vCQ8!Bf97R!=Ot+Guf;u5g1U>|VyyPERlYxF4{=BCF0>Q6n;x&HiN z%6q$=zJ529#Fct*;Ak}$Y$c5YJ_8mgSqb}|+!Yc0mxtaL4i~#?B1-7Fa#?hKwKZj; zqi78dF%VHh8kX#{JE1@jy_;}Qoak7)9Fs@jx zgzP9yqEhd@Rb+k=?bBP)UKW7Qm-$l-snL^hJ8ye zocgS|+h$~n4{g#BS#gpMX0(Kc74*PB`o`PPQk~IH!Zgda{LJVp@(gA^Q!oO}p9Q(t zlbw?oY?CjV53@3EOYm7!rf143?YI*?=NvpCb*HTlSLL+C0)Heoh#4NmS$ri?dM_jR z7S{e87w5)%vqA+!ZuEc4!J2PODW`tkXPOR z?tsyK)a4CiQlkO7WCV&l#br|?gaco38*aPhNN>O2#Ae0h{+XCr)*xR@EAA=Yk87Us z%TdQ0hXInM?gc)-xZw9CEY8(yP$C`2LeyG@o zr|SNhJh$b>;NbaMDQH0$fUT%-}WEV0xV% zRj$_Y5Z{n(1ntV|q(gs(O?4iYQEfyyV;JysPhz`BqE!)?T zd^PJqe;*NLhMqFo7L)tnN|a9s(~N2nwjl=^fH-XejuXfsbe2pG7lkbp+8Q!oBUA=s zilCOJ4o+&q7Qv7KS+WUvk0>2FHbb{N@l;2tn6_3WJ)oqb90;W27C+iMG^fJ-4b#s#LRxLj>Z@*v(0O|BWGR8Bz+fM-Ws~?pUPSm zk71r_ab-S~Jh^9KyAP3a`&4-K5s#7e6G>H)UdXHDO{@=RII>fZ)gYhlO$T?1IGo!b7>Kl(CyKF_K60lFp7BVg$Gz3Tj5y1Rj+qrpouxBB>TS-|k=f94ZWS_D^- z5dsQ9OuSA!g6t-o?_kne)xl!{Gs{^mFdwJn%jf5ctTrzmw@?8jwH-38fO5z>pN>Z% z5tn38Prjyr71$@3^5zgiB@ZT-T6HCO{*FSz=v|nXAh)_coPbKfd@iQ(e5EVRv3}bc zGCiv?f$OAzn+W|fDNCh{`_8^JTcCq^cQ%*_;;?J&x*3nX&nTTghL&$yJ$zdR`GSF? z>(-`$KxNP%&uZO$q?X+9B&-v{*(L|sl)(Kb-0Kmt0rf}bMSW_4;l^+*xN46&#UOA_#Z~yMW_5ECeXCM*Xt(G&8Vr3aK?4i6ou`R2^%^4sy4w_ zrD4dQ1nu4`c#S1a0aGfQZ_BB)4stZ|ypd_Ul>0qLffdqFZUfcvvIeZ}>rnqCyCw}i zErS6)t+9oZGawDEsU4lPgpi1=lE|8-meZGkw}42ZCC5S~3-H1WW((~7@anH26+#xm z&9eb6X_kBs5pTF?+sMU;gV{_Z?y=y^qTWoY=Q^Q~1UIgQDsn_1vpz*u$--z+A z+}oC%Z_6Xb$&vjODWv}IYvuX&<3tG zF(xoZ_rxLpN_BojQ(d-m!T4ORNyLg0PAon_rMtU@DlJwTSc06BDNM8x%MF5$QwhNf zmQD;iP>Hh2cZdi1YyWo;k)dZzJ29!wAT6xK?!oarJw)iD($UkdjR3BJkc+&A!eIQ- zjBotMF$9QjGkVEPAj$6v&+)AY!O}we-p+z}Vu}sEuRCNRO(5KTU}!{yvIoEMblk*g z-^@q&unxa#f7(zwqX-+y%lA{vh%dvx2d{%T3M!O-MiKge7Uvx)P}&53S>juu;h?#w zRc69xCT&VY83rJ!Lm>%KZO8*~Q~%I+ZPf*v1~R6Jz5eyV)Tnh=M6{bgM+yx*-(X>Y zR|v;E2zi?iyui5l1Q}UO{Zh*93(9?gsCdZ(z#VFLAiH=bzI0`+OBrH=_Os38{NR-A zhraGQ2t6<;@2MElNrt)4pmoE<@UI0g298NlQ*&cGAVb?zNyS3@hpgwHmQY>w|pPncVB9tLgg|hH!md> z8|a#2R5wz(2jns3L)2?Sph+*wxm-HB=?uqjY^9+XOJ0M88ReOaswtMj>G4P-*j#yT z#^aj~!eCPSm<#OF6Re4K#czO;B@e5X84fBD0vLifuZt4sYVt78H-L*fVix z|5ybZsSE;T4(IO>pu02_vTbhXpdjlqd zXFdu+uC2WTNCI-c2?>=Vp__e>r4Ia-ypvckNq#=oVV3^2(XLSgaLSY}McZcD^ZkF(DC|$G1L%BH6;L45b{8pI@%pLMu?q+zD*ZSKLybj$VaArafpR;ww-vAi=DyO8>)QJw%UOa zVi`6xsb1x~1W2_PJ;|F}36|?ed|Qkb8g*cf1tXj+0-eAgzN=lDRYahc8{&|cH*g1I z1q;(`nd)DCv}Aw0LqpNvT|SrQAhR}rcw}&zst}%fSd4+Ty$h)9l1e$YhN zg36P{@&j+m1+gI>AwpeOKJ3n1X>sUBHe@mQX3{e{V*gSAE{a1JipeB2lxffK6zI3S zs6c9thOMraSiG2=g%i()Pr3@;YQ8m{M-%q z+oH{ZW6ijDi?oS=Az^$~_sv~^9nr0+VA#JD!R1Y${;a?-Bu_!Hkf} z+P{yO1jd;MRb9PRR19m6 z={tX%ek4Y$8$T4Pi#nS<{?KA}0>;Lf^aL9jU_-n&Uf% zI`pjNjW&pdMG{6aR=aG(F`$m7oNAkLX|eDJz71eAqv3Xe$eupms)^bOp&)=Bt5U{= z!CwRGF)WB-dyAQpjfkt*a$9>2x&486Febt;yqUoh95Y|rBy37BvD+w@gJb7g3^aPy z*E(gnC{-OIb3k<30D=yW*4irFhVCT6EgnP-TVciq2!&gNT&4~tX!>vh%4l3Cqe9MA z!lsah!pD?A*$ft{z_>mRJJNw5EJ6_c_tEm+;W|tV`!wDQpHZy|@)TwaxDw?KAL9}# z_&?~+vI)pwI!iamqNNHadZ~t9Q|W1dtqPFqayn@1(njW z0oJJni9S&MttYys9~iSkT?S#mrtE3b%oTx|;U50DA9p^&{g`X-*FY)w9B57jcbo!* z1J}W*mW}8at!;ucY++o1^2sZtyJ&hnh3Xwz{My@;yAP})l4+@_{k z$JVJysO-%ZsmfbJEe@Zgm9cir89#+*j3|)NI}A0>eY;5mI#oe|adtY-38Fk2vN%Fw zrDSR7BxYzO2>%h)%MdvhqOpu|JUp#pc-ORZP7qqMPMXF0%vO<7q>*omMo%4BV)fP* z-{WxvwuyXwUEXII!*+z;rg3iF0F!Wj6>_($MDkn&u~1+Sxf7`t1}`S?t&vn+aAx$I z=S`;foQzk+Be2_0?s)>`E0Uwvl7sD`oiH~j{00OB@CFP!(}{#%agrU)*aXep{c;8_ zNwkfI(DqGZr#I)MmLbv)?>}b8e>CuA2ep}E7nZbCs<^Y6d-@3p(*lyEFF}?hxWz`J zE0@QrZ?cI?f-}`h#EQ#a!jm!?Vh!#4V}4T_N?+>HgZ6V>-U1Yzjnz5Q4cXEzVmv`< z3x_?3R0dmvIu!U&oM>_j8^1dd}ZEe3M^&-=G zuVK43#E!4_`m_6wEnRQz)ioN`>(S%-rsmPgUAy)0{v)&V>)j*hgXlf%O|kUo7H4Fc zCPfpYnFIl5vP9q__^GdPCB=j_MTjs1ZF=jSowQv?Wl5v+t*yI9m5Y2jm8UT0vsY_h z@C8Qu5k{{^@3Sc0uhX2VEn4~d=x>><(w=r1;IGr2_ML>beo4DxQPb+6vHorfe0gS| zB-04*%+#y1#jM@sv~a2Vj;9D@5mDy`vR_Yd)mnFCR&?JVWy=9X+e&QcRJz0{Z7lor zOv3^zO|LGKwHC+AFQs>1Us3x~cQnfO;KDmRQ4Q$v5PT-cq)QDBj!Qu7psrRys%bb7 z2YDSLr#AzHj7r_Ua)Lq;k9)hPBrRFj_sVza5D`lyY1WDUgkQ_ev8-2fK#Pq zRN4;k_HB`oj})Gf$U?=Ud?#&~~jU#h<<J-1ZK9$vv~q zM>?3Sc3T+gx>d|xCHYimS=uiad(dqas_)5wT=H|iN%n^xc)%V_pYb~st-$NiNPsY~ zTA?_jo6g=)pq_tHgX9|WzAXit2)fKX2sx3<#82Ul@tw%cY`x)R7# zI|DOA>I*V5m?LgQ@{C6v>)NN=VFrCh=iR093&eW%3~W32OzV6Q+^ILig|TC2wdP~y zMsvX+H=ZuRE?28o_LukDKhf|so_@f!#1w-07ehSX2|gX+J&Bfn+tEROSo7No?CBI` z$RGvPv>~vfuGv=jj}eSAfCXnK&X62TdhIm)@E+C$yBF6?oSwWf+Zzp zU_fO020PyaBeH%v&G>I$QV+rjV=}7RwuYJh%)tmPn~##LET4k2nCjc=!6X+8vg+H! zT!=UvFY>zun}6Rmd(OMAMZ4NMa4wDID6Vz-eIPZ6P>1x%(Z5r#pDv&6z{7d;j*90r zD1XGW(BcO57A9x=4Sv{}wyy6-B0;`D{;u88kCPx#iy3x8P*_I7A`$Nt7wuS?Y2xE! zbs&v7-rGN~QWc#Av{o)G(g~PoUXZ{(Qk-sWa0oFhi=SM=k+m@o;k!o!Jb%38LbPN2 zqFE9SO*j<3z(3YC-Esm643PjdBIx#Dnw9m5xZ_lIlEv@*kL^A$FX~TgjVLw%4GUGs zzhpW1+l#u4h_ZmNfU-aZKu%|e6(Fah1{)*QsJ^^>s!N$I62&eu`#D2DgOxnOkg0G0 z`CYiN#7OwvF;{|dMW>_rbR;ao{`g>;M{Tx0Cb+ckyZyvz3U4M4vQO$t&>Sf>8u1ue zv}9-B70XjwL>CL~7eji?tH`CspjZdGSz6?~Bq?X&^d|xg3ac&KFSf~)lnj@df{8$n zXAhaxa?BS{ERGIitgop|;UL_MknByI?Jd_pcOJec%SV-B>G%#s4O5L|Sd@Qc=ocfA z@S0%T!Ga0gde*|=+}5S**9i`gs#LmhxgEXuX+ZIr@>16Z)!~lAri8J96j%Q(Ju}`6 z>blUI+7UGOO|Oc^5zMvQnpu#FYZ-{oY@9I@7IsPz+@r<#gNR5`P+wT9wEKVrJFA_ zI3|N~k`UB~OLu`2242?nHU#*{77hD)(G{sZ^ACdvgQj$eJmES@>?|vvj_D@_wLiQ; zLr1R~Oh)8Vpg?e!9bh*{8{7MkPo~qQ8>AanX%G;LgqQoH1QF_?dh?3KdO^|RYG2Ll ztL_4Ry7fca!6TC+!*~D02kg<>bvf^k51Ug0Xd{UsU>2DGrPkDnI5UUH{*KYK&!^ZY zAga`N?m)T0HXy+Yg}M*6Seb96s4y~}_hIS29>dMX1sHRFu-P%{b;L9aU-{gW0&96Mi7D3Yu-`H()`wulu=nM=MxNQu z2-2z=P^O>}Svq^eTSBtcRXCbQ7+ufO#i-Jn=bjt;NW)Bu%ZyLMNzqVFj7`=m z($6t%I?9bp(MVDc)72=7OAb@hhSNcmC`{2$F|p1v&3uI(nWUS(qF#U_r;!{Vmab7G zr=XPCg_e+RR3tBEUKk&pl$w_rEBpGF1=W`)2&*Ta5+cAPc)&pdILu7!tN{B1|7Mo6 zoxPs5iK~e|rUq0~ISRrH1=M`0a=7=j>?^SfUX57OpDofS)#$?-D!5>yX~?b1@QZ-eO(0Z*@8JxES)FdlYw1Q!+@#ided$JO)=wEY=oTY_sWkY3onZ z$3oI61dSiiDJb<>!U0@``$Eej+QHoMl+I-wlT5Drt@bbnE3zX+GyE7ov7sNTfLt!; zt&m&W?QNYj_h@;)X-Mt@B3mIoGSNpya9f;oH%kGO1HxF}d%pqk1EN#U7T-$GDAvSl zx?l5}F#DdXF?hCwQX}Bo#rJ37S?-K<{?WKe9 zDhxE)2cLprm=Ja>pc2LRQ_(rRk!2g#ba+6h+#)-|_{x5tKqFK!Xf%;JK7XD#5j<)} zPk4Wky-WYzK2VU)OXI$WVzCT7$=ESc(aD^XX~;}hC@ihqE)7u>FOqj?wN3igK1e?G zDHJB{X*Bo#*>xv$k1SlxMCzv|ZIySdn~+9BYScKF(EIvBPeOMvb&FNL(>JpuixMJg z7#0LLMeAK8*t+G8LrGc;@XkT@wsyTiu^FhG)%l`63pFOos)U&7?+Okvi4;KZ!@$P# zqIEe?S`|#EN4YMJv~3N;-^jUQD6H^Iyjf5%_dBguIEO|?BY^eM(u5qDDeFpR#UB&m zpZb~iQO?~PdWHd2gH;DYWbDujHzEw1iJC7Px}XO{Se`EGCyGrf??uQyec(~Ov0~fo z#HPU&7epmp@u5qQcP|1aS*$Lz*Bu@kQiV6h5F8Ob^1bA^rJwgn{Eu_Aj{|V+3!!yc z+-tn`0>+91j1~g;P3Gy}os{@ko~!5KqG#+X@o)3+Z9Ch;!g%u>Vcnq#EppVjkVzMV zcbwbh0aDewu&h=W*tvlO@h2v z#CYuJz+d-r*{~6vi6%=wY5T4@?4SkLGlsBvVDX@N!OsGH?@K&cCMJXty@@X)&sVy* zonsu-AGu{MW6et1vc7WB&ouuQznJ@?dRcP(tyrXR7BeIX=5`*MS@s9rx+1(BH6{G? zds+x9=3^y=MCf|scf8W2J1*`j$I6i#vmRww2_9OjM%b`)E9RRm>ATjdO#2X8kd~!U zghA?>TV2;YTd33CUq@Cm6u|_c(x1X)e6wx^ZZ&E9Q{obz9N#TGb&G;DLNioH>2fqr zMdt6UmqQW>45|yP6M*MES?n2H4pKjG;e?!=H>Z4=rJIB4>uTbi$ws%cwnu;;6k5Wq zy9>7ubK8!0PfpAYoW-l8p2jA#;zFPF-w=kH2ri^^k z(@|I;wT@&=C_=}Z?FC}CX%u*5=9+FJxO5kCyU)cvP>$1q)g~#+%{|% zI_-vNdh)m~WB5>}=}c3rs>51zvJ7Uvcci~syO&ZUk! zv$~7CHuBb}n2pKy&hownzwe83HJ*-hU z^frHZ@1FSk!TECJ&xobt56QR4@i+=|QNk=ge~{X0)xWD|bM$W5jBj2#f174JW(?mT ze6u>Qe0psajtsJ2@#!R5Q7ja9Gjp**iecL)Wy8@TU)cp%Fg_D=W5u!;x~dBz>7=Q4N$l8_!fk+rgfK)Rmoot zMeW{RD%~wA-inQSXokM(BpKI|Kk(HdtU0_Ww z(UW>`b3BT?GIFpRt})2yv(u+cLcYW7D|O;zm*v~x@HE6;Es=xqIvR*43Exnnl=Qpk z7*jV4h^x;#%xE|wa@RgyXR2aFa;9OC(`u4lV~6%|)XYN^Ou-fLHS7%2K%Ft@*+s^M zCDtdm68?UX^HKF9nRmrX9sH0Iy~jKhES7MNmX@E1ue7G34$Gjsjdtb|eiu`{f)WzN zE$pBGPEAo6)S^xqM5QjR5UpOvKjh$5Hg}U z5r*Ude+wueAZS?wCJ)qC`q(pyb|tZf_R~0Z57SQ0{S+oefzXF@+S|! z^%+>nn5~jxqTb`RmHmf&@gqt*Bca)nAHp=z0^$#-c%_4Av(uB64?=oq1yQ>ttUZ3Ln>5rNpXX{3nDj7~tH#s{u z08+1p)&!EbXMXH_+>j%_-T8yygSU{+SFHrNoljxgdv+>Y!LuSaJl5#Ml{-B=(vccc zsGl|CIuaxt%M$nLfyJ7Y7;V#uI-Y%fcfOu}tRhwamRY}~SL4T7>gsB<;AMW2HEX{# ztIG*{sL;C~&8`-G2muz_=#pg3#+-ih?%Pmc_7iUpXDuR<{W}s5jJiVfABO4i4xA`>WIiXaTUl058E~dUp0EFB_x(BrH;C zC=WHo;{V#A4VaMrx6}oU{y}gxadfh39vf$Dw6i1&HNh;QxDn_-{xhpo(8x z^Iw(G*BDvxXI3CU2t_d5Yg>8%;(M<#o`9%|f3O7(%yxgT<1;7BbmtTz6Ql76bNA=!cI|y*9JfM1Clv;n4x&S&rqxlQP zz|2g~#=`wyd+7hf*k`FL<78&_Dgid%p8~%6uZ`{SZ9?A%=D?w*N{@@R|Ys(7vDrxHlqz*7~9z`>WIi5CZ=E3&Y67`p*;xf0ACr z1aL+N-MazxfnWHV)Ij)}^rEY%=j7q!Y-023HoeHoJe4Km%K_R-Ak)8tdVbqf&Gk>D zUkZ(XTJ*vpJYa8B0<2Y$5d1rQ=QoF4;uYr)rJ6q}FW1$&ydIE0Bq;i_MEJ?Vq;1T+b9$mBS1L5YTk~ zYXr38E5xs@xql*FtoU-j)U*!hLYaWp_t*B1kp2qspD5)&31jK%%A^*;q^^Khe_BBF z*I&Cce&#E}9|699QeNzk7}8-c1=uhJ=t6((0i#8~DNfD?&K5>=Movz2PR<_ICbWRx zUXy&P#|xhTSL6zy0qW+zr7obd;%}svOt5qoMs~JNG)}H&fCwjR3tJP~mrq|~8Qt>Z zn*gvEk>|fYZ!Rl;WB-*j>Hiwsfaf`U4gimBb3ly&p6CB9bpbn7uQ3MB&W=ucCbmX) z0Qpn1SNH#=(w08z{mUj#fK4x}F271$0A&4Zn31)Clasw8V8nEEeu1^O^=lHr&z8F}5tn{06w*8vp{AZe_*Pz#ldVYfr zzP<*%O55`q^*W2eZxq_M*Qh^o8ob85PCW1%QwqTRdozNbo&{hphMwL(w4#@P2RyF- zRsQ+_5Ig&I`u~q9;cKDz{}6%d0{s7m`PUrxe@^IL&irN2Und^lzyC7m|7#ZYS`huW zHKgZ%Vf@RK|Hz~NS+V~l;QSlXdii(AU*w!$qh4#-{YKqC{~h%|<_j-Rq1Pf!zd=@D zz<=9$@&fw%l;NMmnSOIx|B3TYvxa|C&iIWnf&IHV|7);*y^rWOB?J5Klz$knUux#{ zE}Y+xe4M{S{*NL1b=3E7kPp!>(0`+}|BsIKI==fi=gZ$Y|Bd$kv*r9#{PJR~d zx!vDf9RG=Cehqu=WBm;~V*1~(Umn-jwAXH+-?T)w|4sXsQ|LA7wd>|Likkg@qyFN^ pd5wJSy7?~>*zf;9{@alwF9imey#fJo0Dc}2fPlgT0RunK{{u}6ZpZ)t diff --git a/geokube_packages/intake_geokube-0.1a0-py3-none-any.whl b/geokube_packages/intake_geokube-0.1a0-py3-none-any.whl deleted file mode 100644 index e24fdb35c01eca8427669f367b38d30bf808f369..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 13930 zcmajGW0WS#)-7DNZQEv-ZQHihW!vhq)#WbRwyVpwZQR=L`+aBc)A!tQ@)?oO$RBIQ zip;fQMa-EgF9i&O0ssI20ibA2p*;2EM4$I{wfNPrzM6%tvw@X~o|%cAm5ZSXy`G+h zt%b9m9-X~AsVp~KAOp-gXE5rbpevoc%jq<9P`{suJ<%$G!>j1|%?9?y6z5}qZ4PrU zcMdfpjmXvjnk!D0lg{rxGvZ)-RLN+91mRHu0s|4*;p-5|#-L1tPCyMgLfxK5eb`^9 zBze##;)OLV4nj1L0 zZD~U?8a;{%s^z|w}O6VVY13f&Tw%2^d9 zWa1D@QnGTg3Cn!Nkygq+Bz&a1%m@l_2Nts2ghI(dmpnxV?w)xG*B%6~kO6L(U)Fq7P}qtnIo2g>r&mKa7|Euw<_kKe zbi*1DY$|&cs3LhKYoKuBgcs(y;``@e+BLmYpu*dHLtgQV>Ml%=7B8BC%19RD)|dcr z@$dq`C|j9MK(zN?t}z|-7SJELiYJMjEJf;eJc8DcD|7(f!xGf-w@~eQRUBcRqN5YO zT2`Zp9&otP*y5_dmcPaBy2e7zEt$h8DPB154t~7I60v7?nfrF$ete3 zOHB_DSGfoS0&zf7Nq(Ao$=q;_c@Nif~bQ+^^eh^;ny1MkaWhpi8tLi#}d&R==%1Odeb zdwS5&+2dsc--m{oQ&h8j1U!elKDr+&UV3DIaJ_-qOeEw@*cnkP`SKcV+O%L7kzTRt zhV+9xP1`5TL_%xnBF>XUIjMG8E74&8!x-*1KV;CvV&{(D7Nt^Bxn?{sBDkl!$!^oD z8v*=x z#9Ere-&ECPE9YN@M)eW>7XYgr4fGI05PG=S_IVeY{sr$p=C@1ry~$cTKQ7 z2?FqvXiljQh^;L6_5%+b^ZEOQilK~fTG#YqKMCDx9jaO-oX#?#G{vq?!1na}h>I7o zth>RSY_%D}xUA!n5+T#*S-=)*dPHiq5D}2q%yBRq62~`ewm7eAyCpvxiu9M>oQIRwu^FFwokU$R!Mvs{LBngB3yTzCHFN5v zL9IvOT1bSHXPvT$%=|d1BB+IR*jg3{5W1?b+7_aO?n(U3G2uh z(^y6(eV`c;!@O_60ZL196UyaKa~67eo=#RE{OzOz5p860Mocnur?(k#?Yb82NX_}M z43mEBfI9tMx?xjVY`Bc?gd|S!!#@NuEy<2bpF*J>J|N2%^U;wdDAG3CKut5Z&EbhF zMGElOh*5L+HqVX)I$UjlqWQ~6i%6vo9m%~9xsl5#EX`oFq#ku~%{3rJSh%5(-t|+$ zPH6l_T9o&@x6OXq+;S({ELVVEg-noCM^uB)?3ti!o&*uyP}r9x95^hJK#;15beLFv z%b=ow*^%Hdw@mzo3mLq#SrnsTa0!)KsW|kK&}SL6BvTrG4sACmy#aW#B1A++#|Ya9 zX+L)7kt>OnjU?3G<7it=U?%n~0%fDngGxte+D8j!Q588cl2uJTZFqez%YhCj%k z^9(}r9&lo16J}L|8xNxj_`$#mo6tErf&LSLn|wQ}&9I*5n?E+-QOq7q{HO>PsMsP2 zOp-3OZ77X%kab2*NgoiwawzizCC5-zUe<$5syzDW1|eXExX&7$0T*&0uEYC1Ih?pB zqP6}YF?NcIkK$?RL_d#54+`3jT){Tk(7kxZ+Z)_EXRH9!g2d|^0*|JO&yjXH1Q%BV zdXuS;m>f%U#wYJ(>2;@gmn>U`ddy$&@$E z)*fUJ;(^c!^SlANeL!hte(dYx#aqD_w2H(tVF51Bc_=8`BNUJ_rG+C#xgD8?)^Sjp%5kOJBzV1=eelAN-cG?VufXDSPN&@7iAc^)kNR9N zEUsa?zfR<_XT!N#o=QFNqwy6f|k|w1Gwaoyq#^2UsuBd;Qd^NOZa?$c3x|VydvR`pTFxsVKP2^5_H*HRnve z>o%zyqpH8Q7I#4vR~L+4$RUwoB0A2HTHKsZMsIJ=uzbDr17z{#$@=Ou`seo#!SPb- zjG|xKc!cR71gpt;PsK}pDU?iFBy3(l3oHnUjAj0$r!(nh$CWVB(k5wutW;zPMH9i& zvzjS5wl1h?%l@yLBOI1g$B0s=$Q&Z!0*L&fxv*&t+g}#R%aQZiqDR8+j$b@m10(;r##Css7$38jCjcNK+m@Vg&FNz1;{1-^~n8*^vy&{ z=ZHcOB4&p3osl=d=#)@WS~;0VF5n04)VassfQDBKRMLxlP4Iz%0lr_57G?Myv#02F z9sm+8iR=LcF7`V!um%(?u7mTD=9beWWb1iVci7xCmld|Bwp?rO3Nk< zpKinLt$ZCR%Zkb4O=@N2?{hGwFqeM6a0=4!zT~KxHrUi(s@sTFnZ%Q-Uruf^{jNWo zKKt2Fk?s?1vM>d~@U_%wO+vM~yUv3y1315(BkP)o{XQ}lF6JpmRL`0oYG$XpuFjno0RPg1bt$LvqbOuh6R&S8dVhTIdd`4 z-|-_HAIJ^LmHG46Ztv?iJVGaBuU@ms+6a}_1oFcABuS@uVYAr}5lE%+U6iI_17w6P zRWGcH*a2s*O)zM3mAbN%L^kt{L?KT&x2Ls2G&X5MspFEz)i5h`1yPAJfzT+)3l+`Z zOiiWiZTMvKm2Weg8cB8j2A2^OgfhdoJr?~<&MUA>u0Ko)`?|V1J>*Ipez!BS`xsyX z>d8oes$2$or~7^|nH!wL7XYKuWp5R{`3XynMW5DDk?ro`O3rkrWV_efDs=$ zB@i=UtQ{Fqbhc6pm+Hff|D=;A59jc+j z*(4X0BebbxoFg{UXOTrQ6F{_pd$X&guH~(@0$L_a#?%iqxBNJu-<)9I)czQ{&t$zd z6^{n!BX#us$u20JTT5W%$Qt;(|5WAt(6DfJYCAK91lih%viiFY;-Hj`DsV`lF&2(^ zy$;CXb&E2x{PU^VLzeqMV!FXoL_3tk$FR?i2N9oNxN*@`sg|#`e<2awMu*+K10TE6 zMnjD9OqcS6Tc&UTk~*NL_(7bTMGrPgD5aBL|j5FaocLhM@)?xWL=G^t1R8_ z!}?Yk*Yo#`ep)8sp3hD+E{69%$P>L*uTb;3W^cG^nm*;DfQF`1Ws)bn!eha_ZRHD* zCb=8V8Wgb2O-9@^Ycl#eG8ECDZ#+ERbm`K!-u5t%-3W27Mw{SlgOJ9zo9k(cN?xQA zq=Jh&;VAGO-G{agy#wi8=_Ot{bLG4xZGBGcy?A|0&3Jh`alONAJy5za0Wod8r*AUz zD))UT`5{GIVhC7gR&wM*wAJt6zz;rdw%Qu{QD1&n>MIk1e${5X4@iXLy}Se5LS~!E zDE8e854DwAv4iowf!V#6s54wWm7weqs`>Cx%c#$J#^>Rj5o2p__jCl!ccdZvLEWXy znZ--CRx&BW4D;Ni1gAN;2KXLzz-TJ>qU}iATsBhB&`)3sZvdaCdZSz_V*bL@DYyjp_R8D{u6bPT0R7k;?$$HL6(THI+iAun;X?vmhG zL9M+S$`LtTdb>|Rb-v{fCl#F2>*2iL9`X#fY`hH>kzAu835{gZzWWt+JqrwKe8p&7 zlWZZU*)N_Y?L3Wi5Z|Y)O79|^&))+iCyZoUU<)Rnm6~Wy7Omkac`LC_%_0{!>=ji^ zWg2ManJ7=fZOxGj@tr>*p3PcqW5H~QdS(3rr8~t6t?fSf2{}yoaw$)l0r1)TbIdYf z!U#989Wf5Rsq*;v<3y6R@W!CMBq7>}eat z8fZGr=#mwAP*~%Lkjngs+udUSHVmNjSf>8llWW-E)cPiuf=~Y8)7bpJ|LT?12Fo5k zV5hW|H~+GJ&KJMjvO;MJ;T_B5yYmFT#VVK8d3_dd_nQ(6+?7vlFHe~jSkKKT;afby zM@P?@z}}O=5{2u5t7pr+gHgFS7~ZDhOsNtkN~$I%F~biQ7F{tMkcyZN`} zOo-3Xz02;`$=Jkau^|C`=qN_!D+GvE9A4It8^m9Q2?rQ3trSnj2oO~=IYESkMPSCB#5+RvSnLFKV|df8sq zGR*iF9#)r+Sdh{Y0Ik8_w23#?QF7?0k<=F&{;r71+5%Se}gkh}xPWSy&>V-wj~##|#4r~6wgy&gZ)Wk9WI zoMQbDx`e;*xaaRJN#VAQQ_zQOs~@xX?0YeM?cT2+;O>o@mGLnjD%txYyuSB5 zpB6zIn*u+j-I1g$5BHlodph#al&+_)dQ;erDyMpM`+R-~ujEucR4Z|a$N zDFcHpk1E*;PcBT7TPfNe4Zj&`e@i{PU(6R8zK(7)&x@6XR8Z=;J&>~m5aN8gy0ml3 z@b;WtA2J|2qgpu)x_Ga>ZrVThUak9h1%>Z`ScvD%ez1MxJMkK-GFUfK7HvO4#NQ5UPA*XlP?Z* zT~)?zl>w&XPz~WCk1@`Dy@4N2s1BC90(5aBq(|PGYsOk4grsRA;^R@_=5eRF<%~%O zDI<1o7n$@0h^Z1pxLc&OKA6Kt*rqwbq5TStj&_WZscaTfRLk^Nux+Zz#*?DaY1A06`}Yn2O#y^83c{oy z+KXjk@(h1z3yMg~p`tv&?u!<{=pv%4UQgD6UaWxfBj7fuw^d<<8SQ-6OYE42BI6au z(>();fPPC@_|{%7^$5&_r|80<6)4MBXymql5+1n+Kf}mU|17~2 z7^WCd9;o{9k=bjPNV6>>u^KT&ktRvQoWaVKD#;{=f)gejK)S7MTPHVI{JHx5-Fi!w zi+cQ4c}-h!@iCI)O=fi$PplY;<^?o?@caETUsLMYoazvb9B zvpt#`V<4I4^k@f0Gwa`^o(Bgv&O~Os)LL;AMRQ7-$+;Ji--4t}XM{QN#+fo9Up!`a z9$4MhH_0t}Wm?445@mjsm2)FvZ zKIc;Mz+mM&rsrC+Jf3^CY>C!{TD{6QwB_s73&G>i{Q)kaN9I9ycF7&xIP0iv;k#*R zeB3Yc+VNnJfc2ThpLdss%XUGBTdqd&!_^vWD1W`TI-f>9w|*<`f?@%&1hW;#$j70! z+BnAPTb_OwcgTb}N6wMfbDPXHjB57~32#9Wq;MN(HpS2JM<@p9#{AjR9h4k@cu0%E zW2r6GWmz0)K8s4{oE0i8UNSzN`(|ydpr43%X$RI?x&9_RSR3#cJqQ>A+R85l%RNyP zu*w)CcrhHw;EpPCW^6Kn*-Ht`DB@5i_Czya(Dlmf6K3gmB)>i}%9!ykTYsCMm%Qoy zxLERB#s51W(xo(%B`7nAu)p%*0~`PV zfJC;kvBVahr;2FI-@if7+E{vZ!Sd5{C^dG8 z#X!25=aO0wYB`E>>3qs^4>);TNmU0SPU+SS$%wH0pVsc1eZc9}n-@Bh{pG)=h;zkm zm<0iyLdTQC3T8A6V51-@VT?x1Yvf{Wvgn#(GD>hf3V0F)XvL`aedLkUN`5Y7d_ZY^ zjI-QCEX%&H)Pv>n>``g0d-H|nU~+=QFH65`bEyRZj)7O>dGmBtg5X@$>P%lo!0 z-uaa9X|p#mIE@f7a3k(TLg0~Wd|Ns8cgxG^+ukzChRDDiUiv8>dd1Ty=D5@q-3Pbv zd^X}%J&$16@st~f9}q&uM?ew*Bshl+7*oSAsPX>Gj}HoxMgW!FHe)%2AFnckh?eX_M!AwH>qb&%`kv*&*Xs>*a9~ zyITX_n$8rxaAO#47hzoY)>JG_K0Xgr=_;0{Duj|2Yu9&dw%I&QAXn&VT{_-9V(QJeB|u03iFz3g=%2f0=!W zZU0o?zM{}Yal*2n0HO00Wrq^Fa@vY;`cWM%t$$>Z&C;IC=H_8RhQ_(+I-hxSli}zM z1$j)o*QVOSgrq7N8YV}*9QBPN==&;%hduNZWblAMemHI#9>(BzR^%&BCl6GnoP}+3 zCqFuwcSId?<@iL81S-M-&in7wl1-XN2}QD;mCc(7@N;{;3+*hi2A+!>3Fo}cF~}j3 zZ*s0m8lwRn-l`Z7-t0TfGLSuGk#uTx3ZbdSnQ zZqoWb`EADeRUhm~F0{@8Qyd=9x3e+(=BL?nFSF#U)FK0CI?Fe@2)ZUy$f#XYDPU8J zmOvS(*vYW|cyiem*{(}=4P3}T>_VRnhiXzzUc%|uOs3hRjnY{xqZ)y`r^)9IZNY>B8gAu^KA=te)M`#f4V2V7-6>TDvz)T`Wvhd zw<{_QwajYzq|^<3L1?zuzZz3RfKrn+p`qVcs{vxpTHtXN`_N>5KKbw<22pacaMbC_ro zmK!)9mlC`u480h3fD+X&pJ5)Py@4xWk>O8GJ29!QKrO7~o}q~YJp`zdvayTqO@Hpe zkeh9AYwnj7j|R zWK@8W!1?Gk7Q#~lT=R_TN6QWX+^4;X`fLa^8DzPZ%f_~xVfl@%G!)}0YOye*J@QdB#nQOk z-)RL~s;|v>d@_Lpb4Ulu>Vbrq2K_o(Kgvz%bOOoho3o#JxVXfCe zmRxtie)R>4ZQ-Km6mpgxD*3Y!;qc|5_KYcCWLKhbDhMbSpEGW-Lq%x>Qf8CU>#3d( z&}3U2m^gIA{(^~A28J|;^>gsoT^bJAF}HKj5!*95Yrh9I6|i+qh6#4X(w0Q4?V`@6 z6+vvf;o0gcL?+0$wRZqcLMk*NrB)<#vk$b?f!mgM5(_3REW|p_(SJ1BH)=#`QMJ7z z3XGRPF6HF%e=r8&v14Zz+ie%$0r*-{wO61lvmaL`N7{^2bGX=tee~f`T;d z9hn9>EBIA@_6oKtGv_}yG8(3%+fW;ibv7|&8VXFE5VzKf$j&{idsw)Tl+^%w2ZHw2g{aqgNnEXk0D`vUw`&pfCIs;xpO7pMf#?A^qKbIC_?`9z)+*$(};c# zpp>DwKnbkF1a(h!X9~oz)1gZol-kkCD$`Q}tj{RPRQRJmr{`9Y#qm;Dky^R;%eIF76y77y#U5iw*gkN63D> zR2*BcDX|~U03s9ClP@y$O})(l(g#QaYAGsFVBxf7vLXHNV*l*0Q_t=y_Pj~mVJY5Y zi1wb8pFlmt-Y zzm##I@z+6n4U6K~Q8CkU5pWe-pX;t6cA|KP;v#&*TNpjSFblO!!lnh2dyMiqId{+E zpwM&n>Xqf9RdtBWMLu5#;dOYl*4OAab*BiP@xW`@i?cR?Dcu_7vUM;)GDnh-#}c}j z6!PwqwuCekV$%X-vskGE68bsq$OZ$k2!Zi~V&uKTb(k3sX}uP!P^<|G6lM*$ljV-z z6OtfRYNbG7;-=~vVYG3CQZBaV$!6oDg(;xV{2tgh8QR(DIxmpwWZ2>a zk&TMAv5SPUjUfCsx{on(JVav|<8)+3#qgzh_nIKIbb~C1_mjOctwf{H6peuPj|=P1k$7q@4k71 zccj`z!)S-5@iSZVQp@0($FH%uiLnMg93VE+9Kw>8N|i4*^B)fo(5=8Z`V!Z?sQf41&fgMHdfb&PvlGc2#Ew` zt(^A2Qd#Vc>X2Z=k@M|^lOU9p}A$Odf#p%eBi|F`f>jR>L}&_drK@crqvlq zrdiR%Xf{cJg*+Ls1a5jSq4XzVZ3zPOV7uN%R~KFPNqNfHLR;JZN%e0&o$3qd>$&^& zReXW50r;`UvDX~Rpbgqfwcl1g-ul}nYjhXg2KXEF7yaj>T7Z!;;c-lo8~ZitKh0Z3R8f!tFLT%+Pp)8fDFgfAjS2+7e}Hy z>YDSLXSVona8xyVnZ>AlTeNIv<+XJlNjK_GjqjStax)Dnf9~JfS?0dZMa64l#QCLV zRMug8v%F1CFZu(pMIp zSbFH34qIrN`ZdfxCHZt`S-Mq=1E}^&)gW>Jm%_X=$$`)#cbJozOMZuv6*xUw31CJx zD`aPM)43-~lc+%(D1=N3T6D#QRLQ%d+)}{>D127%{h3{m$EwIAc%&jh!`U2ql2L`=hX{i|~ z0J*-w?o~iU&V$pe-_9M)5UemJld5f7nCU|vMrip$v}AR~G_1vR|8_4Xg;=0f{}$$A z#PLLl?=$E^Q1{$5?}irLTHD~YG?t^d*2UFedLW?=*@vTFm)-zA3okFEKc%HQgYx-Ac;BSVD9AEOvpRQzn!uicT}DJ%Kv+OoKu2lJc9j94{gxW-s39TqKoP-QLC95+INVy;!l}5lL0H`@3BKgx zwd$>GKjTMcR+dJLE<=Qy5*8-~@&H;L+BsWgybeO*o2;~YRy~@} z)b{mv_-HlmyI(bUXrYst@AX*BDM=%ZYJm81c!$2=t2^c=&v?f(Vp2edeNQ__Ga)^=_ z=1iLSLrATAq6F88ZMA!4+D&TJ2O^LL1#v^u=DrQCXfi=u5othrsJE2$T z83hj!g`P8gbqqH7p_nfR#pfy*u^L==^{#V*9q@?v4tU2l)36bCUKQ;&9n&7>&BeOs z$1dXrD#ULYE|R+KqFl+KBl2J~@Hl}PKW*%B7jD`SA0m7cZZK+z=|e9!FK(-?Lj*kb zLz0Y(a8`Tf`Owq|mMo&r6O2|qm{ZjFx2n&rk73i;g zQ%V*$`yUsg<^H$l zeJTlBI?97T$JyxMsV((IQ=Wb0C+UChKv6_UPEj~`ZlpGy9|dfyV+v@))Y2dxlqdj- zh$WyluYDSZiG}Is(_KsHs0?Nz6}#EO{kz%E$&*)~*w_+_O6cIWX=|$1a+l><=>>7}G<|Y6=+_aebmaGm=4bU= zu71`tRM+v*v#eRrfRO52x*GR0&KDQ9_AkR#nrVi@x=4BxQ%a#Z?L5bsfI8K;TTEOT zHa;mvoEuc0`o#)8xLAFNl`Vjv&- z>6#YxBysv%WV-QW{f2yIP>pC6Imt09$OTC$SA6p^?7+rUmk#E}A)`(q1LF9ylF%$T;^9gx4P> zPvo`QSzTUsZDR#9@1CY*V5ruxfXCz0?cXSHNMi@*c8wT*Pe9vnV>z1C-&x?W-ej~C zS8cXl-}Sgj*_#~;tHp_)xy!CkSFXMAOYsvmW^KS$)`r2v6Y zfdAiLt9*sz{~Y!HfBpSC-TO}p{+~Gi>En?9MF9Y)_h0@GIDdaC@+Z!p;_BZx+g~~P zhd=+H)cQ}1|0GELjdA^@PyK=MFL~;pDE~=?_!~tK`#)$Bf13ZPN&Ic@^e^-OtWf+D z=|6Gseqwoc|Xi|0m|3%Y?r%eZHo>|AP7dFB<*?{jtPy{L|2fzH4c_tJ!T)XM{}cJoN&a8RfOri5)$RYC^5vyK!TvIX P`TG6*sudMXe_j0_@U%^_ From e4365d0d467022f570f8e8ab21553aa4efb09a32 Mon Sep 17 00:00:00 2001 From: Marco Mancini Date: Thu, 11 Jan 2024 09:20:05 +0100 Subject: [PATCH 05/31] Removed old geoquery --- geoquery/__init__.py | 0 geoquery/geoquery.py | 17 ----------------- 2 files changed, 17 deletions(-) delete mode 100644 geoquery/__init__.py delete mode 100644 geoquery/geoquery.py diff --git a/geoquery/__init__.py b/geoquery/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/geoquery/geoquery.py b/geoquery/geoquery.py deleted file mode 100644 index dc42414..0000000 --- a/geoquery/geoquery.py +++ /dev/null @@ -1,17 +0,0 @@ -from typing import Optional, List, Dict, Union - -from pydantic import BaseModel, root_validator - -class GeoQuery(BaseModel): - variable: List[str] - time: Optional[Union[Dict[str, str], Dict[str, List[str]]]] - area: Optional[Dict[str, float]] - locations: Optional[Dict[str, List[float]]] - vertical: Optional[Union[float, List[float]]] - filters: Optional[Dict] - - @root_validator - def area_locations_mutually_exclusive_validator(cls, query): - if query["area"] is not None and query["locations"] is not None: - raise KeyError("area and locations couldn't be processed together, please use one of them") - return query \ No newline at end of file From 948425509f60678c7cb18787bbf7a779ad879f9e Mon Sep 17 00:00:00 2001 From: Marco Mancini Date: Thu, 11 Jan 2024 09:21:19 +0100 Subject: [PATCH 06/31] Merge intake drivers --- drivers/Dockerfile | 8 + drivers/LICENSE | 201 +++++++ drivers/Makefile | 21 + drivers/README.md | 2 + drivers/intake_geokube/__init__.py | 6 + drivers/intake_geokube/base.py | 132 ++++ drivers/intake_geokube/builders/__init__.py | 1 + drivers/intake_geokube/iot/__init__.py | 1 + drivers/intake_geokube/iot/driver.py | 164 +++++ drivers/intake_geokube/netcdf/__init__.py | 1 + drivers/intake_geokube/netcdf/driver.py | 64 ++ drivers/intake_geokube/queries/__init__.py | 1 + drivers/intake_geokube/queries/geoquery.py | 94 +++ drivers/intake_geokube/queries/types.py | 10 + drivers/intake_geokube/queries/utils.py | 106 ++++ drivers/intake_geokube/queries/workflow.py | 72 +++ drivers/intake_geokube/sentinel/__init__.py | 1 + drivers/intake_geokube/sentinel/auth.py | 45 ++ drivers/intake_geokube/sentinel/driver.py | 342 +++++++++++ .../intake_geokube/sentinel/odata_builder.py | 564 ++++++++++++++++++ drivers/intake_geokube/utils.py | 51 ++ drivers/intake_geokube/version.py | 3 + drivers/intake_geokube/wrf/__init__.py | 1 + drivers/intake_geokube/wrf/driver.py | 178 ++++++ drivers/pyproject.toml | 85 +++ drivers/setup.py | 3 + drivers/tests/__init__.py | 0 drivers/tests/queries/__init__.py | 0 drivers/tests/queries/test_utils.py | 50 ++ drivers/tests/queries/test_workflow.py | 61 ++ drivers/tests/sentinel/__init__.py | 0 drivers/tests/sentinel/fixture.py | 11 + drivers/tests/sentinel/test_builder.py | 376 ++++++++++++ drivers/tests/sentinel/test_driver.py | 177 ++++++ drivers/tests/test_geoquery.py | 41 ++ 35 files changed, 2873 insertions(+) create mode 100644 drivers/Dockerfile create mode 100644 drivers/LICENSE create mode 100644 drivers/Makefile create mode 100644 drivers/README.md create mode 100644 drivers/intake_geokube/__init__.py create mode 100644 drivers/intake_geokube/base.py create mode 100644 drivers/intake_geokube/builders/__init__.py create mode 100644 drivers/intake_geokube/iot/__init__.py create mode 100644 drivers/intake_geokube/iot/driver.py create mode 100644 drivers/intake_geokube/netcdf/__init__.py create mode 100644 drivers/intake_geokube/netcdf/driver.py create mode 100644 drivers/intake_geokube/queries/__init__.py create mode 100644 drivers/intake_geokube/queries/geoquery.py create mode 100644 drivers/intake_geokube/queries/types.py create mode 100644 drivers/intake_geokube/queries/utils.py create mode 100644 drivers/intake_geokube/queries/workflow.py create mode 100644 drivers/intake_geokube/sentinel/__init__.py create mode 100644 drivers/intake_geokube/sentinel/auth.py create mode 100644 drivers/intake_geokube/sentinel/driver.py create mode 100644 drivers/intake_geokube/sentinel/odata_builder.py create mode 100644 drivers/intake_geokube/utils.py create mode 100644 drivers/intake_geokube/version.py create mode 100644 drivers/intake_geokube/wrf/__init__.py create mode 100644 drivers/intake_geokube/wrf/driver.py create mode 100644 drivers/pyproject.toml create mode 100644 drivers/setup.py create mode 100644 drivers/tests/__init__.py create mode 100644 drivers/tests/queries/__init__.py create mode 100644 drivers/tests/queries/test_utils.py create mode 100644 drivers/tests/queries/test_workflow.py create mode 100644 drivers/tests/sentinel/__init__.py create mode 100644 drivers/tests/sentinel/fixture.py create mode 100644 drivers/tests/sentinel/test_builder.py create mode 100644 drivers/tests/sentinel/test_driver.py create mode 100644 drivers/tests/test_geoquery.py diff --git a/drivers/Dockerfile b/drivers/Dockerfile new file mode 100644 index 0000000..d4f9e76 --- /dev/null +++ b/drivers/Dockerfile @@ -0,0 +1,8 @@ +ARG REGISTRY=rg.nl-ams.scw.cloud/geokube-production +ARG TAG=latest +FROM $REGISTRY/geokube:$TAG +RUN conda install -c conda-forge --yes --freeze-installed intake=0.6.6 +RUN conda clean -afy +COPY dist/intake_geokube-1.0b0-py3-none-any.whl / +RUN pip install /intake_geokube-1.0b0-py3-none-any.whl +RUN rm /intake_geokube-1.0b0-py3-none-any.whl diff --git a/drivers/LICENSE b/drivers/LICENSE new file mode 100644 index 0000000..2b65938 --- /dev/null +++ b/drivers/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/drivers/Makefile b/drivers/Makefile new file mode 100644 index 0000000..12a2661 --- /dev/null +++ b/drivers/Makefile @@ -0,0 +1,21 @@ +.PHONY: typehint +typehint: + mypy --ignore-missing-imports --check-untyped-defs intake_geokube + pylint intake_geokube + +.PHONY: test +test: + pytest tests/ + +.PHONY: format +format: + isort intake_geokube + black intake_geokube + black tests/ + isort tests/ + +.PHONY: docs +docs: + pydocstyle -e --convention=numpy intake_geokube + +prepublish: format typehint docs test diff --git a/drivers/README.md b/drivers/README.md new file mode 100644 index 0000000..f08349c --- /dev/null +++ b/drivers/README.md @@ -0,0 +1,2 @@ +# intake-geokube +GeoKube plugin for Intake \ No newline at end of file diff --git a/drivers/intake_geokube/__init__.py b/drivers/intake_geokube/__init__.py new file mode 100644 index 0000000..95b5503 --- /dev/null +++ b/drivers/intake_geokube/__init__.py @@ -0,0 +1,6 @@ +"""Geokube Plugin for Intake.""" + +# This avoids a circular dependency pitfall by ensuring that the +# driver-discovery code runs first, see: +# https://intake.readthedocs.io/en/latest/making-plugins.html#entrypoints +from .queries.geoquery import GeoQuery diff --git a/drivers/intake_geokube/base.py b/drivers/intake_geokube/base.py new file mode 100644 index 0000000..e070427 --- /dev/null +++ b/drivers/intake_geokube/base.py @@ -0,0 +1,132 @@ +"""Module with AbstractBaseDriver definition.""" + +import logging +import os +from abc import ABC, abstractmethod +from typing import Any + +from dask.delayed import Delayed +from geokube.core.datacube import DataCube +from geokube.core.dataset import Dataset +from intake.source.base import DataSourceBase + +from .queries.geoquery import GeoQuery + +_NOT_SET: str = "" + + +class AbstractBaseDriver(ABC, DataSourceBase): + """Abstract base class for all DDS-related drivers.""" + + name: str = _NOT_SET + version: str = _NOT_SET + container: str = "python" + log: logging.Logger + + def __new__(cls, *arr, **kw): # pylint: disable=unused-argument + """Create a new instance of driver and configure logger.""" + obj = super().__new__(cls) + assert ( + obj.name != _NOT_SET + ), f"'name' class attribute was not set for the driver '{cls}'" + assert ( + obj.version != _NOT_SET + ), f"'name' class attribute was not set for the driver '{cls}'" + obj.log = cls.__configure_logger() + return obj + + def __init__(self, *, metadata: dict) -> None: + super().__init__(metadata=metadata) + + @classmethod + def __configure_logger(cls) -> logging.Logger: + log = logging.getLogger(f"dds.intake.{cls.__name__}") + level = os.environ.get("DDS_LOG_LEVEL", "INFO") + logformat = os.environ.get( + "DDS_LOG_FORMAT", + "%(asctime)s %(name)s %(funcName)s %(levelname)s %(message)s", + ) + log.setLevel(level) # type: ignore[arg-type] + for handler in log.handlers: + if isinstance(handler, logging.StreamHandler): + break + else: + log.addHandler(logging.StreamHandler()) + if logformat: + formatter = logging.Formatter(logformat) + for handler in log.handlers: + handler.setFormatter(formatter) + for handler in log.handlers: + handler.setLevel(level) # type: ignore[arg-type] + return log + + @abstractmethod + def read(self) -> Any: + """Read metadata.""" + raise NotImplementedError + + @abstractmethod + def load(self) -> Any: + """Read metadata and load data into the memory.""" + raise NotImplementedError + + def process(self, query: GeoQuery) -> Any: + """ + Process data with the query. + + Parameters + ---------- + query: GeoQuery + A query to use for data processing + + Results + ------- + res: Any + Result of `query` processing + """ + data_ = self.read() + return self._process_geokube_dataset(data_, query=query, compute=True) + + def _process_geokube_dataset( + self, + dataset: Dataset | DataCube, + query: GeoQuery, + compute: bool = False, + ) -> Dataset | DataCube: + self.log.info( + "processing geokube structure with Geoquery: %s '", query + ) + if not query: + self.log.info("query is empty!") + return dataset.compute() if compute else dataset + if isinstance(dataset, Dataset): + self.log.info("filtering with: %s", query.filters) + dataset = dataset.filter(**query.filters) + if isinstance(dataset, Delayed) and compute: + dataset = dataset.compute() + if query.variable: + self.log.info("selecting variable: %s", query.variable) + dataset = dataset[query.variable] + if query.area: + self.log.info("subsetting by bounding box: %s", query.area) + dataset = dataset.geobbox(**query.area) + if query.location: + self.log.info("subsetting by location: %s", query.location) + dataset = dataset.locations(**query.location) + if query.time: + self.log.info("subsetting by time: %s", query.time) + dataset = dataset.sel(time=query.time) + if query.vertical: + self.log.info("subsetting by vertical: %s", query.vertical) + method = None if isinstance(query.vertical, slice) else "nearest" + dataset = dataset.sel(vertical=query.vertical, method=method) + if isinstance(dataset, Dataset) and compute: + self.log.info( + "computing delayed datacubes in the dataset with %d" + " records...", + len(dataset), + ) + dataset = dataset.apply( + lambda dc: dc.compute() if isinstance(dc, Delayed) else dc + ) + return dataset diff --git a/drivers/intake_geokube/builders/__init__.py b/drivers/intake_geokube/builders/__init__.py new file mode 100644 index 0000000..0b7eded --- /dev/null +++ b/drivers/intake_geokube/builders/__init__.py @@ -0,0 +1 @@ +"""Subpackage with builders.""" diff --git a/drivers/intake_geokube/iot/__init__.py b/drivers/intake_geokube/iot/__init__.py new file mode 100644 index 0000000..5500b37 --- /dev/null +++ b/drivers/intake_geokube/iot/__init__.py @@ -0,0 +1 @@ +"""Domain-specific subpackage for IoT data.""" diff --git a/drivers/intake_geokube/iot/driver.py b/drivers/intake_geokube/iot/driver.py new file mode 100644 index 0000000..93c52cd --- /dev/null +++ b/drivers/intake_geokube/iot/driver.py @@ -0,0 +1,164 @@ +"""Driver for IoT data.""" + +import json +from collections import deque +from datetime import datetime +from typing import NoReturn + +import dateparser +import numpy as np +import pandas as pd +import streamz + +from ..base import AbstractBaseDriver +from ..queries.geoquery import GeoQuery + +d: deque = deque(maxlen=1) + + +def _build(data_model: dict) -> pd.DataFrame: + model_dict = { + data_model.get("time", ""): pd.to_datetime( + "01-01-1970 00:00:00", format="%d-%m-%Y %H:%M:%S" + ), + data_model.get("latitude", ""): [0.0], + data_model.get("longitude", ""): [0.0], + } + for f in data_model.get("filters", []): + model_dict[f] = [0] + for v in data_model.get("variables", []): + model_dict[v] = [0] + df_model = pd.DataFrame(model_dict) + df_model = df_model.set_index(data_model.get("time", "")) + return df_model + + +def _mqtt_preprocess(df, msg) -> pd.DataFrame: + payload = json.loads(msg.payload.decode("utf-8")) + if ("uplink_message" not in payload) or ( + "frm_payload" not in payload["uplink_message"] + ): + return df + data = payload["uplink_message"]["decoded_payload"]["data_packet"][ + "measures" + ] + date_time = pd.to_datetime( + datetime.now().strftime("%d-%m-%Y %H:%M:%S"), + format="%d-%m-%Y %H:%M:%S", + ) + data["device_id"] = payload["end_device_ids"]["device_id"] + data["string_type"] = 9 + data["cycle_duration"] = payload["uplink_message"]["decoded_payload"][ + "data_packet" + ]["timestamp"] + data["sensor_time"] = pd.to_datetime( + payload["received_at"], format="%Y-%m-%dT%H:%M:%S.%fZ" + ) + data["latitude"] = data["latitude"] / 10**7 + data["longitude"] = data["longitude"] / 10**7 + data["AirT"] = data["AirT"] / 100 + data["AirH"] = data["AirH"] / 100 + data["surfaceTemp"] = 2840 / 100 + row = pd.Series(data, name=date_time) + df = df._append(row) # pylint: disable=protected-access + return df + + +class IotDriver(AbstractBaseDriver): + """Driver class for IoT data.""" + + name: str = "iot_driver" + version: str = "0.1b0" + + def __init__( + self, + mqtt_kwargs, + time_window, + data_model, + start=False, + metadata=None, + **kwargs, + ): + super().__init__(metadata=metadata) + self.mqtt_kwargs = mqtt_kwargs + self.kwargs = kwargs + self.stream = None + self.time_window = time_window + self.start = start + self.df_model = _build(data_model) + + def _get_schema(self): + if not self.stream: + self.log.debug("creating stream...") + stream = streamz.Stream.from_mqtt(**self.mqtt_kwargs) + self.stream = stream.accumulate( + _mqtt_preprocess, returns_state=False, start=pd.DataFrame() + ).to_dataframe(example=self.df_model) + self.stream.stream.sink(d.append) + if self.start: + self.log.info("streaming started...") + self.stream.start() + return {"stream": str(self.stream)} + + def read(self) -> streamz.dataframe.core.DataFrame: + """Read IoT data.""" + self.log.info("reading stream...") + self._get_schema() + return self.stream + + def load(self) -> NoReturn: + """Load IoT data.""" + self.log.error("loading entire product is not supported for IoT data") + raise NotImplementedError( + "loading entire product is not supported for IoT data" + ) + + def process(self, query: GeoQuery) -> streamz.dataframe.core.DataFrame: + """Process IoT data with the passed query. + + Parameters + ---------- + query : intake_geokube.GeoQuery + A query to use + + Returns + ------- + stream : streamz.dataframe.core.DataFrame + A DataFrame object with streamed content + """ + df = d[0] + if not query: + self.log.info( + "method 'process' called without query. processing skipped." + ) + return df + if query.time: + if not isinstance(query.time, slice): + self.log.error( + "expected 'query.time' type is slice but found %s", + type(query.time), + ) + raise TypeError( + "expected 'query.time' type is slice but found" + f" {type(query.time)}" + ) + self.log.info("querying by time: %s", query.time) + df = df[query.time.start : query.time.stop] + else: + self.log.info( + "getting latest data for the predefined tie window: %s", + self.time_window, + ) + start = dateparser.parse(f"NOW - {self.time_window}") + stop = dateparser.parse("NOW") + df = df[start:stop] # type: ignore[misc] + if query.filters: + self.log.info("filtering with: %s", query.filters) + mask = np.logical_and.reduce( + [df[k] == v for k, v in query.filters.items()] + ) + df = df[mask] + if query.variable: + self.log.info("selecting variables: %s", query.variable) + df = df[query.variable] + return df diff --git a/drivers/intake_geokube/netcdf/__init__.py b/drivers/intake_geokube/netcdf/__init__.py new file mode 100644 index 0000000..315792c --- /dev/null +++ b/drivers/intake_geokube/netcdf/__init__.py @@ -0,0 +1 @@ +"""Domain-specific subpackage for netcdf data.""" diff --git a/drivers/intake_geokube/netcdf/driver.py b/drivers/intake_geokube/netcdf/driver.py new file mode 100644 index 0000000..e29cbfa --- /dev/null +++ b/drivers/intake_geokube/netcdf/driver.py @@ -0,0 +1,64 @@ +"""NetCDF driver for DDS.""" + +from geokube import open_datacube, open_dataset +from geokube.core.datacube import DataCube +from geokube.core.dataset import Dataset + +from ..base import AbstractBaseDriver + + +class NetCdfDriver(AbstractBaseDriver): + """Driver class for netCDF files.""" + + name = "netcdf_driver" + version = "0.1a0" + + def __init__( + self, + path: str, + metadata: dict, + pattern: str | None = None, + field_id: str | None = None, + metadata_caching: bool = False, + metadata_cache_path: str | None = None, + storage_options: dict | None = None, + xarray_kwargs: dict | None = None, + mapping: dict[str, dict[str, str]] | None = None, + load_files_on_persistance: bool = True, + ) -> None: + super().__init__(metadata=metadata) + self.path = path + self.pattern = pattern + self.field_id = field_id + self.metadata_caching = metadata_caching + self.metadata_cache_path = metadata_cache_path + self.storage_options = storage_options + self.mapping = mapping + self.xarray_kwargs = xarray_kwargs or {} + self.load_files_on_persistance = load_files_on_persistance + + @property + def _arguments(self) -> dict: + return { + "path": self.path, + "id_pattern": self.field_id, + "metadata_caching": self.metadata_caching, + "metadata_cache_path": self.metadata_cache_path, + "mapping": self.mapping, + } | self.xarray_kwargs + + def read(self) -> Dataset | DataCube: + """Read netCDF.""" + if self.pattern: + return open_dataset( + pattern=self.pattern, delay_read_cubes=True, **self._arguments + ) + return open_datacube(**self._arguments) + + def load(self) -> Dataset | DataCube: + """Load netCDF.""" + if self.pattern: + return open_dataset( + pattern=self.pattern, delay_read_cubes=False, **self._arguments + ) + return open_datacube(**self._arguments) diff --git a/drivers/intake_geokube/queries/__init__.py b/drivers/intake_geokube/queries/__init__.py new file mode 100644 index 0000000..e6847fb --- /dev/null +++ b/drivers/intake_geokube/queries/__init__.py @@ -0,0 +1 @@ +"""Subpackage with queries.""" diff --git a/drivers/intake_geokube/queries/geoquery.py b/drivers/intake_geokube/queries/geoquery.py new file mode 100644 index 0000000..9ab408a --- /dev/null +++ b/drivers/intake_geokube/queries/geoquery.py @@ -0,0 +1,94 @@ +"""Module with GeoQuery definition.""" + +from __future__ import annotations + +import json +from typing import Any + +from pydantic import ( + BaseModel, + ConfigDict, + Field, + field_serializer, + model_validator, +) + +from .types import BoundingBoxDict, SliceQuery, TimeComboDict +from .utils import maybe_dict_to_slice, slice_to_dict + + +class GeoQuery(BaseModel, extra="allow"): + """GeoQuery definition class.""" + + model_config = ConfigDict(arbitrary_types_allowed=True) + + variable: list[str] | None = None + time: SliceQuery | TimeComboDict | None = None + area: BoundingBoxDict | None = None + location: dict[str, float | list[float]] | None = None + vertical: SliceQuery | float | list[float] | None = None + filters: dict[str, Any] = Field(default_factory=dict) + format: str | None = None + format_args: dict[str, Any] | None = None + + @field_serializer("time") + def serialize_time(self, time: SliceQuery | TimeComboDict | None, _info): + """Serialize time.""" + if isinstance(time, slice): + return slice_to_dict(time) + return time + + @model_validator(mode="after") + @classmethod + def area_locations_mutually_exclusive_validator(cls, query): + """Assert 'locations' and 'area' are not passed at once.""" + if query.area is not None and query.location is not None: + raise KeyError( + "area and location couldn't be processed together, please use" + " one of them" + ) + return query + + @model_validator(mode="before") + @classmethod + def build_filters(cls, values: dict[str, Any]) -> dict[str, Any]: + """Build filters based on extra arguments.""" + if "filters" in values: + return values + filters = {} + fields = {} + for k in values.keys(): + if k in cls.model_fields: + fields[k] = values[k] + continue + if isinstance(values[k], dict): + values[k] = maybe_dict_to_slice(values[k]) + filters[k] = values[k] + fields["filters"] = filters + return fields + + def model_dump_original(self, skip_empty: bool = True) -> dict: + """Return the JSON representation of the original query.""" + res = super().model_dump() + res = {**res.pop("filters", {}), **res} + if skip_empty: + res = dict(filter(lambda item: item[1] is not None, res.items())) + return res + + @classmethod + def parse( + cls, load: "GeoQuery" | dict | str | bytes | bytearray + ) -> "GeoQuery": + """Parse load to GeoQuery instance.""" + if isinstance(load, cls): + return load + if isinstance(load, (str, bytes, bytearray)): + load = json.loads(load) + if isinstance(load, dict): + load = GeoQuery(**load) + else: + raise TypeError( + f"type of the `load` argument ({type(load).__name__}) is not" + " supported!" + ) + return load diff --git a/drivers/intake_geokube/queries/types.py b/drivers/intake_geokube/queries/types.py new file mode 100644 index 0000000..cfb7327 --- /dev/null +++ b/drivers/intake_geokube/queries/types.py @@ -0,0 +1,10 @@ +"""Module with types definitions.""" + +from pydantic import BeforeValidator +from typing_extensions import Annotated + +from . import utils as ut + +SliceQuery = Annotated[slice, BeforeValidator(ut.dict_to_slice)] +TimeComboDict = Annotated[dict, BeforeValidator(ut.assert_time_combo_dict)] +BoundingBoxDict = Annotated[dict, BeforeValidator(ut.assert_bounding_box_dict)] diff --git a/drivers/intake_geokube/queries/utils.py b/drivers/intake_geokube/queries/utils.py new file mode 100644 index 0000000..c2fb2dd --- /dev/null +++ b/drivers/intake_geokube/queries/utils.py @@ -0,0 +1,106 @@ +"""Module with util functions.""" + +from typing import Any, Collection, Hashable, Iterable + +import dateparser +from pydantic.fields import FieldInfo + +_TIME_COMBO_SUPPORTED_KEYS: tuple[str, ...] = ( + "year", + "month", + "day", + "hour", +) + +_BBOX_SUPPORTED_KEYS: tuple[str, ...] = ( + "north", + "south", + "west", + "east", +) + + +def _validate_dict_keys( + provided_keys: Iterable, supported_keys: Collection +) -> None: + for provided_k in provided_keys: + assert ( + provided_k in supported_keys + ), f"key '{provided_k}' is not among supported ones: {supported_keys}" + + +def dict_to_slice(mapping: dict) -> slice: + """Convert dictionary to slice.""" + mapping = mapping or {} + assert "start" in mapping or "stop" in mapping, ( + "missing at least of of the keys ['start', 'stop'] required to" + " construct slice object based on the dictionary" + ) + if "start" in mapping and "NOW" in mapping["start"]: + mapping["start"] = dateparser.parse(mapping["start"]) + if "stop" in mapping and "NOW" in mapping["stop"]: + mapping["stop"] = dateparser.parse(mapping["stop"]) + return slice( + mapping.get("start"), + mapping.get("stop"), + mapping.get("step"), + ) + + +def maybe_dict_to_slice(mapping: Any) -> slice: + """Convert valid dictionary to slice or return the original one.""" + if "start" in mapping or "stop" in mapping: + return dict_to_slice(mapping) + return mapping + + +def slice_to_dict(slice_: slice) -> dict: + """Convert slice to dictionary.""" + return {"start": slice_.start, "stop": slice_.stop, "step": slice_.step} + + +def assert_time_combo_dict(mapping: dict) -> dict: + """Check if dictionary contains time-combo related keys.""" + _validate_dict_keys(mapping.keys(), _TIME_COMBO_SUPPORTED_KEYS) + return mapping + + +def assert_bounding_box_dict(mapping: dict) -> dict: + """Check if dictionary contains bounding-box related keys.""" + _validate_dict_keys(mapping.keys(), _BBOX_SUPPORTED_KEYS) + return mapping + + +def split_extra_arguments( + values: dict, fields: dict[str, FieldInfo] +) -> tuple[dict, dict]: + """Split arguments to field-related and auxiliary.""" + extra_args: dict = {} + field_args: dict = {} + extra_args = {k: v for k, v in values.items() if k not in fields} + field_args = {k: v for k, v in values.items() if k in fields} + return (field_args, extra_args) + + +def find_value( + content: dict | list, key: Hashable, *, recursive: bool = False +) -> Any: + """Return value for a 'key' (recursive search).""" + result = None + if isinstance(content, dict): + if key in content: + return content[key] + if not recursive: + return result + for value in content.values(): + if isinstance(value, (dict, list)): + result = result or find_value(value, key, recursive=True) + elif isinstance(content, list): + for el in content: + result = result or find_value(el, key, recursive=True) + else: + raise TypeError( + "'content' argument need to be a dictionary or a list but found," + f" '{type(content)}" + ) + return result diff --git a/drivers/intake_geokube/queries/workflow.py b/drivers/intake_geokube/queries/workflow.py new file mode 100644 index 0000000..a93cd91 --- /dev/null +++ b/drivers/intake_geokube/queries/workflow.py @@ -0,0 +1,72 @@ +"""Module with workflow definition.""" + +from __future__ import annotations + +import json +from collections import Counter +from typing import Any + +from pydantic import BaseModel, Field, field_validator, model_validator + +from .utils import find_value + + +class Task(BaseModel): + """Single task model definition.""" + + id: str | int + op: str + use: list[str | int] = Field(default_factory=list) + args: dict[str, Any] = Field(default_factory=dict) + + +class Workflow(BaseModel): + """Workflow model definition.""" + + tasks: list[Task] + dataset_id: str = "" + product_id: str = "" + + @model_validator(mode="before") + @classmethod + def obtain_dataset_id(cls, values): + """Get dataset_id and product_id from included tasks.""" + dataset_id = find_value(values, key="dataset_id", recursive=True) + if not dataset_id: + raise KeyError( + "'dataset_id' key was missing. did you defined it for 'args'?" + ) + product_id = find_value(values, key="product_id", recursive=True) + if not product_id: + raise KeyError( + "'product_id' key was missing. did you defined it for 'args'?" + ) + return values | {"dataset_id": dataset_id, "product_id": product_id} + + @field_validator("tasks", mode="after") + @classmethod + def match_unique_ids(cls, items): + """Verify the IDs are uniqe.""" + for id_value, id_count in Counter([item.id for item in items]).items(): + if id_count != 1: + raise ValueError(f"duplicated key found: `{id_value}`") + return items + + @classmethod + def parse( + cls, + workflow: Workflow | dict | list[dict] | str | bytes | bytearray, + ) -> Workflow: + """Parse to Workflow model.""" + if isinstance(workflow, cls): + return workflow + if isinstance(workflow, (str | bytes | bytearray)): + workflow = json.loads(workflow) + if isinstance(workflow, list): + return cls(tasks=workflow) # type: ignore[arg-type] + if isinstance(workflow, dict): + return cls(**workflow) + raise TypeError( + f"`workflow` argument of type `{type(workflow).__name__}`" + " cannot be safetly parsed to the `Workflow`" + ) diff --git a/drivers/intake_geokube/sentinel/__init__.py b/drivers/intake_geokube/sentinel/__init__.py new file mode 100644 index 0000000..4957128 --- /dev/null +++ b/drivers/intake_geokube/sentinel/__init__.py @@ -0,0 +1 @@ +"""Domain-specific subpackage for sentinel data.""" diff --git a/drivers/intake_geokube/sentinel/auth.py b/drivers/intake_geokube/sentinel/auth.py new file mode 100644 index 0000000..680bfb2 --- /dev/null +++ b/drivers/intake_geokube/sentinel/auth.py @@ -0,0 +1,45 @@ +"""Module with auth utils for accessing sentinel data.""" + +import os + +import requests +from requests.auth import AuthBase + + +class SentinelAuth(AuthBase): # pylint: disable=too-few-public-methods + """Class ewith authentication for accessing sentinel data.""" + + _SENTINEL_AUTH_URL: str = os.environ.get( + "SENTINEL_AUTH_URL", + "https://identity.dataspace.copernicus.eu/auth/realms/CDSE/protocol/openid-connect/token", + ) + + def __init__(self, username: str, password: str) -> None: + self.username = username + self.password = password + + @classmethod + def _get_access_token(cls, username: str, password: str) -> str: + data = { + "client_id": "cdse-public", + "username": username, + "password": password, + "grant_type": "password", + } + try: + response = requests.post( + cls._SENTINEL_AUTH_URL, data=data, timeout=10 + ) + response.raise_for_status() + except Exception as e: + raise RuntimeError( + "Access token creation failed. Reponse from the server was:" + f" {response.json()}" + ) from e + return response.json()["access_token"] + + def __call__(self, request): + """Add authorization header.""" + token: str = self._get_access_token(self.username, self.password) + request.headers["Authorization"] = f"Bearer {token}" + return request diff --git a/drivers/intake_geokube/sentinel/driver.py b/drivers/intake_geokube/sentinel/driver.py new file mode 100644 index 0000000..4895103 --- /dev/null +++ b/drivers/intake_geokube/sentinel/driver.py @@ -0,0 +1,342 @@ +"""Geokube driver for sentinel data.""" + +import glob +import os +import string +import zipfile +from multiprocessing.util import get_temp_dir +from typing import Collection, NoReturn + +import dask +import numpy as np +import pandas as pd +import xarray as xr +from geokube.backend.netcdf import open_datacube +from geokube.core.dataset import Dataset +from intake.source.utils import reverse_format +from pyproj import Transformer +from pyproj.crs import CRS, GeographicCRS + +from ..base import AbstractBaseDriver +from ..queries.geoquery import GeoQuery +from ..queries.types import BoundingBoxDict, TimeComboDict +from .auth import SentinelAuth +from .odata_builder import ODataRequest, ODataRequestBuilder + + +def _get_items_nbr(mapping, key) -> int: + if isinstance(mapping[key], str): + return 1 + return len(mapping[key]) if isinstance(mapping[key], Collection) else 1 + + +def _validate_geoquery_for_sentinel(query: GeoQuery) -> None: + if query.time: + if isinstance(query.time, dict) and any([ + _get_items_nbr(query.time, "year") != 1, + _get_items_nbr(query.time, "month") != 1, + _get_items_nbr(query.time, "day") != 1, + ]): + raise ValueError( + "valid time combo for sentinel data should contain exactly one" + " value for 'year', one for 'month', and one for 'day'" + ) + if query.location and ( + "latitude" not in query.location or "longitude" not in query.location + ): + raise ValueError( + "both 'latitude' and 'longitude' must be defined for location" + ) + + +def _bounding_box_to_polygon( + bbox: BoundingBoxDict, +) -> list[tuple[float, float]]: + return [ + (bbox["north"], bbox["west"]), + (bbox["north"], bbox["east"]), + (bbox["south"], bbox["east"]), + (bbox["south"], bbox["west"]), + (bbox["north"], bbox["west"]), + ] + + +def _timecombo_to_day_range(combo: TimeComboDict) -> tuple[str, str]: + return (f"{combo['year']}-{combo['month']}-{combo['day']}T00:00:00", + f"{combo['year']}-{combo['month']}-{combo['day']}T23:59:59") + + +def _location_to_valid_point( + location: dict[str, float | list[float]] +) -> tuple[float, float]: + if isinstance(location["latitude"], list): + if len(location["latitude"]) > 1: + raise ValueError( + "location can have just a single point (single value for" + " 'latitude' and 'longitude')" + ) + lat = location["latitude"][0] + else: + lat = location["latitude"] + if isinstance(location["longitude"], list): + if len(location["longitude"]) > 1: + raise ValueError( + "location can have just a single point (single value for" + " 'latitude' and 'longitude')" + ) + lon = location["longitude"][0] + else: + lon = location["longitude"] + return (lat, lon) + + +def _validate_path_and_pattern(path: str, pattern: str): + if path.startswith(os.sep) or pattern.startswith(os.sep): + raise ValueError(f"path and pattern cannot start with {os.sep}") + + +def _get_attrs_keys_from_pattern(pattern: str) -> list[str]: + return list( + map( + lambda x: str(x[1]), + filter(lambda x: x[1], string.Formatter().parse(pattern)), + ) + ) + + +def unzip_and_clear(target: str) -> None: + """Unzip ZIP archives in 'target' dir and remove archive.""" + assert os.path.exists(target), f"directory '{target}' does not exist" + for file in os.listdir(target): + if not file.endswith(".zip"): + continue + prod_id = os.path.splitext(os.path.basename(file))[0] + target_prod = os.path.join(target, prod_id) + os.makedirs(target_prod, exist_ok=True) + try: + with zipfile.ZipFile(os.path.join(target, file)) as archive: + archive.extractall(path=target_prod) + except zipfile.BadZipFile as err: + raise RuntimeError("downloaded ZIP archive is invalid") from err + os.remove(os.path.join(target, file)) + + +def _get_field_name_from_path(path: str): + res, file = path.split(os.sep)[-2:] + band = file.split("_")[-2] + return f"{res}_{band}" + + +def preprocess_sentinel(dset: xr.Dataset) -> xr.Dataset: + """Preprocessing function for sentinel data. + + Parameters + ---------- + dset : xarray.Dataset + xarray.Dataset to be preprocessed + + Returns + ------- + ds : xarray.Dataset + Preprocessed xarray.Dataset + """ + crs = CRS.from_cf(dset["spatial_ref"].attrs) + transformer = Transformer.from_crs( + crs_from=crs, crs_to=GeographicCRS(), always_xy=True + ) + x_vals, y_vals = dset["x"].to_numpy(), dset["y"].to_numpy() + lon_vals, lat_vals = transformer.transform(*np.meshgrid(x_vals, y_vals)) # type: ignore[call-overload] # pylint: disable=unpacking-non-sequence + source_path = dset.encoding["source"] + sensing_time = os.path.splitext(source_path.split(os.sep)[-6])[0].split( + "_" + )[-1] + time = pd.to_datetime([sensing_time]).to_numpy() + dset = dset.assign_coords({ + "time": time, + "latitude": (("x", "y"), lat_vals), + "longitude": (("x", "y"), lon_vals), + }).rename({"band_data": _get_field_name_from_path(source_path)}) + expanded_timedim_dataarrays = {var_name: dset[var_name].expand_dims('time') for var_name in dset.data_vars} + dset = dset.update(expanded_timedim_dataarrays) + return dset + + +class _SentinelKeys: # pylint: disable=too-few-public-methods + UUID: str = "Id" + SENSING_TIME: str = "ContentDate/Start" + TYPE: str = "Name" + + +class SentinelDriver(AbstractBaseDriver): + """Driver class for sentinel data.""" + + name: str = "sentinel_driver" + version: str = "0.1b0" + + def __init__( + self, + metadata: dict, + url: str, + zippattern: str, + zippath: str, + type: str, + username: str | None = None, + password: str | None = None, + sentinel_timeout: int | None = None, + mapping: dict | None = None, + xarray_kwargs: dict | None = None, + ) -> None: + super().__init__(metadata=metadata) + self.url: str = url + self.zippattern: str = zippattern + self.zippath: str = zippath + self.type_ = type + _validate_path_and_pattern(path=self.zippath, pattern=self.zippattern) + self.auth: SentinelAuth = self._get_credentials(username, password) + self.target_dir: str = get_temp_dir() + self.sentinel_timeout: int | None = sentinel_timeout + self.mapping: dict = mapping or {} + self.xarray_kwargs: dict = xarray_kwargs or {} + + def _get_credentials( + self, username: str | None, password: str | None + ) -> SentinelAuth: + if username and password: + return SentinelAuth( + username=username, + password=password, + ) + self.log.debug("getting credentials from environmental variables...") + if ( + "SENTINEL_USERNAME" not in os.environ + or "SENTINEL_PASSWORD" not in os.environ + ): + self.log.error( + "missing at least of of the mandatory environmental variables:" + " ['SENTINEL_USERNAME', 'SENTINEL_PASSWORD']" + ) + raise KeyError( + "missing at least of of the mandatory environmental variables:" + " ['SENTINEL_USERNAME', 'SENTINEL_PASSWORD']" + ) + return SentinelAuth( + username=os.environ["SENTINEL_USERNAME"], + password=os.environ["SENTINEL_PASSWORD"], + ) + + def _force_sentinel_type(self, builder): + self.log.info("forcing sentinel type: %s...", self.type_) + return builder.filter(_SentinelKeys.TYPE, containing=self.type_) + + def _filter_by_sentinel_attrs(self, builder, query: GeoQuery): + self.log.info("filtering by sentinel attributes...") + path_filter_names: set[str] = { + parsed[1] + for parsed in string.Formatter().parse(self.zippattern) + if parsed[1] + } + if not query.filters: + return builder + sentinel_filter_names: set[str] = ( + query.filters.keys() - path_filter_names + ) + for sf in sentinel_filter_names: + builder = builder.filter_attr(sf, query.filters[sf]) + return builder + + def _build_odata_from_geoquery(self, query: GeoQuery) -> ODataRequest: + self.log.debug("validating geoquery...") + _validate_geoquery_for_sentinel(query) + self.log.debug("constructing odata request...") + builder = ODataRequestBuilder.new(url=self.url) + if "product_id" in query.filters: + builder = builder.filter( + name=_SentinelKeys.UUID, eq=query.filters.get("product_id") + ) + builder = self._filter_by_sentinel_attrs(builder, query=query) + builder = self._force_sentinel_type(builder) + if query.time: + if isinstance(query.time, dict): + timecombo_start, timecombo_end = _timecombo_to_day_range(query.time) + self.log.debug("filtering by timecombo: [%s, %s] ", timecombo_start, timecombo_end) + builder = builder.filter_date( + _SentinelKeys.SENSING_TIME, ge=timecombo_start, le=timecombo_end + ) + elif isinstance(query.time, slice): + self.log.debug("filtering by slice: %s", query.time) + builder = builder.filter_date( + _SentinelKeys.SENSING_TIME, + ge=query.time.start, + le=query.time.stop, + ) + if query.area: + self.log.debug("filering by polygon") + polygon = _bounding_box_to_polygon(query.area) + builder = builder.intersect_polygon(polygon=polygon) + if query.location: + self.log.debug("filering by location") + point = _location_to_valid_point(query.location) + builder = builder.intersect_point(point=point) + return builder.build() + + def _prepare_dataset(self) -> Dataset: + data: list = [] + attrs_keys: list[str] = _get_attrs_keys_from_pattern(self.zippattern) + for f in glob.glob(os.path.join(self.target_dir, self.zippath)): + self.log.debug("processsing file %s", f) + file_no_tmp_dir = f.removeprefix(self.target_dir).strip(os.sep) + attr = reverse_format(self.zippattern, file_no_tmp_dir) + attr[Dataset.FILES_COL] = [f] + data.append(attr) + # NOTE: eventually, join files if there are several for the same attrs + # combintation + df = ( + pd.DataFrame(data) + .groupby(attrs_keys) + .agg({Dataset.FILES_COL: sum}) + ) + datacubes = [] + for ind, files in df.iterrows(): + load = dict(zip(df.index.names, ind)) + load[Dataset.FILES_COL] = files + load[Dataset.DATACUBE_COL] = dask.delayed(open_datacube)( + path=files.item(), + id_pattern=None, + mapping=self.mapping, + metadata_caching=False, + **self.xarray_kwargs, + preprocess=preprocess_sentinel, + ) + datacubes.append(load) + return Dataset(pd.DataFrame(datacubes)) + + def read(self) -> NoReturn: + """Read sentinel data.""" + raise NotImplementedError( + "reading metadata is not supported for sentinel data" + ) + + def load(self) -> NoReturn: + """Load sentinel data.""" + raise NotImplementedError( + "loading entire product is not supported for sentinel data" + ) + + def process(self, query: GeoQuery) -> Dataset: + """Process query for sentinel data.""" + self.log.info("builder odata request based on passed geoquery...") + req = self._build_odata_from_geoquery(query) + self.log.info("downloading data...") + req.download( + target_dir=self.target_dir, + auth=self.auth, + timeout=self.sentinel_timeout, + ) + self.log.info("unzipping and removing archives...") + unzip_and_clear(self.target_dir) + self.log.info("preparing geokube.Dataset...") + dataset = self._prepare_dataset() + dataset = super()._process_geokube_dataset( + dataset, query=query, compute=True + ) + return dataset diff --git a/drivers/intake_geokube/sentinel/odata_builder.py b/drivers/intake_geokube/sentinel/odata_builder.py new file mode 100644 index 0000000..4036810 --- /dev/null +++ b/drivers/intake_geokube/sentinel/odata_builder.py @@ -0,0 +1,564 @@ +"""Module with OData API classes definitions.""" + +from __future__ import annotations + +__all__ = ( + "datetime_to_isoformat", + "HttpMethod", + "ODataRequestBuilder", + "ODataRequest", +) + +import math +import os +import warnings +from collections import defaultdict +from datetime import datetime +from enum import Enum, auto +from typing import Any, Callable + +import pandas as pd +import requests +from tqdm import tqdm + +from ..utils import create_zip_from_response +from .auth import SentinelAuth + + +def datetime_to_isoformat(date: str | datetime) -> str: + """Convert string of datetime object to ISO datetime string.""" + if isinstance(date, str): + try: + value = pd.to_datetime([date]).item().isoformat() + except ValueError as exc: + raise ValueError(f"cannot parse '{date}' to datetime") from exc + elif isinstance(date, datetime): + value = value.isoformat() + else: + raise TypeError(f"type '{type(date)}' is not supported") + return f"{value}Z" + + +class HttpMethod(Enum): + """Enum with HTTP methods.""" + + GET = auto() + POST = auto() + + @property + def method_name(self) -> str: + """Get name of the HTTP method.""" + return self.name.lower() + + +class _ODataEntity: # pylint: disable=too-few-public-methods + def __init__( + self, + url: str, + params: dict | None = None, + method: HttpMethod = HttpMethod.GET, + body: dict | None = None, + ) -> None: + if not params: + self.params: dict[str, list] = defaultdict(list) + self.conj: list = [] + if not body: + self.body: dict = {} + self.url = url + self.method = method + self.callbacks: dict = {} + + +class _ODataBuildableMixin: # pylint: disable=too-few-public-methods + odata: _ODataEntity + + def build(self) -> ODataRequest: + """Build ODataRequest object.""" + return ODataRequest(self.odata) + + +class _ODataOrderMixing: # pylint: disable=too-few-public-methods + odata: _ODataEntity + + def order(self, by: str, desc: bool = False) -> _ODataOperation: + """Add ordering option. + + Parameters + ---------- + by : str + A key by which ordering should be done + desc : bool + If descending order should be used + """ + order = "desc" if desc else "asc" + if "orderby" in self.odata.params: + raise ValueError( + f"ordering was already defined: {self.odata.params['orderby']}" + ) + self.odata.params["orderby"] = [f"{by} {order}"] + match self: + case _ODataOperation(): + return _ODataOperation(self.odata) + case _: + raise TypeError(f"unexpected type: {type(self)}") + + +class ODataRequest: + """OData request object.""" + + _ALL_HTTP_CODES: int = -1 + _DOWNLOAD_PATTERN: str = ( + "https://zipper.dataspace.copernicus.eu" + "/odata/v1/Products({pid})/$value" + ) + + def __init__(self, odata: _ODataEntity) -> None: + self.request_params: dict = {} + self.odata = odata + self._convert_filter_param() + self._convert_order_param() + + def _convert_order_param(self) -> None: + if self.odata.params["orderby"]: + self.request_params["orderby"] = self.odata.params["orderby"] + + def _convert_filter_param(self) -> None: + param: str = "" + for i in range(len(self.odata.params["filter"])): + if not param: + param = self.odata.params["filter"][i] + else: + param = f"{param} {self.odata.params['filter'][i]}" + if i < len(self.odata.params["filter"]) - 1: + param = f"{param} {self.odata.conj[i]}" + self.request_params["filter"] = param + + def _query( + self, + headers: dict | None = None, + auth: Any | None = None, + timeout: int | None = None, + ) -> requests.Response: + if self.odata.params and not self.odata.url.endswith("?"): + self.odata.url = f"{self.odata.url}?" + params = {} + if self.request_params: + params = { + f"${key}": value for key, value in self.request_params.items() + } + match self.odata.method: + case HttpMethod.GET: + return requests.get( + self.odata.url, + params=params, + headers=headers, + timeout=timeout, + ) + case HttpMethod.POST: + return requests.post( + self.odata.url, + data=self.odata.body, + auth=auth, + timeout=timeout, + ) + case _: + raise NotImplementedError( + f"method {self.odata.method} is not supported" + ) + + def with_callback( + self, + callback: Callable[[requests.Response], Any], + http_code: int | None = None, + ) -> "ODataRequest": + """ + Add callbacks for request response. + + Parameters + ---------- + callback : callable + A callback function taking just a single argument, + i.e `requests.Response` object + http_code : int + HTTP code for which callback should be used. + If not passed, callback will be executed for all codes. + """ + if http_code: + if http_code in self.odata.callbacks: + warnings.warn( + f"callback for HTTP code {http_code} will be overwritten" + ) + self.odata.callbacks[http_code] = callback + else: + self.odata.callbacks[self._ALL_HTTP_CODES] = callback + return self + + def query( + self, + headers: dict | None = None, + auth: Any | None = None, + timeout: int | None = None, + ) -> Any: + """Query data based on the built request. + + Parameters + ---------- + headers : dict, optional + Headers passed to HTTP request + auth : Any, optional + Authorization object or tuple (,) for basic authentication + + Returns + ------- + res : Any + Value returned from the appropriate callback or `requests.Response` object otherwise + """ + response = self._query(headers=headers, auth=auth, timeout=timeout) + if response.status_code in self.odata.callbacks: + return self.odata.callbacks[response.status_code](response) + if self._ALL_HTTP_CODES in self.odata.callbacks: + return self.odata.callbacks[self._ALL_HTTP_CODES](response) + return response + + def download( + self, + target_dir: str, + headers: dict | None = None, + auth: Any | None = None, + timeout: int | None = None, + ) -> Any: + """Download requested data to `target_dir`. + + Parameters + ---------- + target_dir : str + Path to the directory where files should be downloaded + headers : dict, optional + Headers passed to HTTP request + auth : Any, optional + Authorization object or tuple (,) for basic + authentication + """ + os.makedirs(target_dir, exist_ok=True) + response = self._query(headers=headers, auth=auth, timeout=timeout) + response.raise_for_status() + if response.status_code in self.odata.callbacks: + self.odata.callbacks[response.status_code](response) + if self._ALL_HTTP_CODES in self.odata.callbacks: + self.odata.callbacks[self._ALL_HTTP_CODES](response) + df = pd.DataFrame(response.json()["value"]) + if len(df) == 0: + raise ValueError("no product found for the request") + if not isinstance(auth, SentinelAuth): + raise TypeError( + f"expected authentication of the type '{SentinelAuth}' but" + f" passed '{type(auth)}'" + ) + for pid in tqdm(df["Id"]): + response = requests.get( + self._DOWNLOAD_PATTERN.format(pid=pid), + stream=True, + auth=auth, + timeout=timeout, + ) + response.raise_for_status() + create_zip_from_response( + response, os.path.join(target_dir, f"{pid}.zip") + ) + + +class _ODataOperation(_ODataBuildableMixin, _ODataOrderMixing): + def __init__(self, odata: _ODataEntity) -> None: + self.odata = odata + + def _append_query_param(self, param: str | None) -> None: + if not param: + return + self.odata.params["filter"].append(param) + self.odata.conj.append("and") + + def _validate_args(self, lt, le, eq, ge, gt) -> None: + if eq: + if any(map(lambda x: x is not None, [lt, le, ge, gt])): + raise ValueError( + "cannot define extra operations for a single option if" + " `eq` is defined" + ) + if lt and le: + raise ValueError( + "cannot define both operations `lt` and `le` for a single" + " option" + ) + if gt and ge: + raise ValueError( + "cannot define both operations `gt` and `ge` for a single" + " option" + ) + + def and_(self) -> _ODataOperation: + """Put conjunctive conditions.""" + self.odata.conj[-1] = "and" + return self + + def or_(self) -> _ODataOperation: + """Put alternative conditions.""" + self.odata.conj[-1] = "or" + return self + + def filter_attr(self, name: str, value: str) -> _ODataOperation: + """Filter by attribute value. + + Parameters + ---------- + name : str + Name of an attribute + value : str + Value of the attribute + """ + param: str = ( + "Attributes/OData.CSC.ValueTypeAttribute/any(att:att/Name eq" + f" ‘[{name}]’" + + f"and att/OData.CSC.ValueTypeAttribute/Value eq ‘{value}]’)" + ) + self._append_query_param(param) + return self + + def filter( + self, + name: str, + *, + lt: str | None = None, + le: str | None = None, + eq: str | None = None, + ge: str | None = None, + gt: str | None = None, + containing: str | None = None, + not_containing: str | None = None, + ) -> _ODataOperation: + """Filter option by values. + + Add filter option to the request. Value of an option indicated by + the `name` argument will be checked agains given values or arguments. + You cannot specify both `lt` and `le` or `ge` and `gt. + + Parameters + ---------- + lt : str, optional + value for `less than` comparison + le : str, optional + value for `less ord equal` comparison + eq : str, optional + value for `equal` comparison + ge : str, optional + value for `greater or equal` comparison + gt : str, optional + value for `greater than` comparison + containing : str, optional + value to be contained + not_containing : str, optional + value not to be containing + """ + if not any([le, lt, eq, ge, gt, containing, not_containing]): + return self + self._validate_args(le=le, lt=lt, eq=eq, ge=ge, gt=gt) + build_: _ODataOperation = self + assert isinstance(build_, _ODataOperation), "unexpected type" + if lt: + build_ = build_.with_option_lt(name, lt).and_() + if le: + build_ = build_.with_option_le(name, le).and_() + if eq: + build_ = build_.with_option_equal(name, eq).and_() + if ge: + build_ = build_.with_option_ge(name, ge).and_() + if gt: + build_ = build_.with_option_gt(name, gt).and_() + if containing: + build_ = build_.with_option_containing(name, containing).and_() + if not_containing: + build_ = build_.with_option_not_containing( + name, not_containing + ).and_() + + return build_ + + def filter_date( + self, + name: str, + *, + lt: str | None = None, + le: str | None = None, + eq: str | None = None, + ge: str | None = None, + gt: str | None = None, + ) -> _ODataOperation: + """ + Filter datetetime option by values. + + Add filter option to the request. Datetime values of an option + indicated by the `name` argument will be checked agains given + values or arguments. + Values of arguments will be automatically converted to ISO datetime + string format. + You cannot specify both `lt` and `le` or `ge` and `gt. + + Parameters + ---------- + lt : str, optional + value for `less than` comparison + le : str, optional + value for `less ord equal` comparison + eq : str, optional + value for `equal` comparison + ge : str, optional + value for `greater or equal` comparison + gt : str, optional + value for `greater than` comparison + """ + if lt: + lt = datetime_to_isoformat(lt) + if le: + le = datetime_to_isoformat(le) + if eq: + eq = datetime_to_isoformat(eq) + if ge: + ge = datetime_to_isoformat(ge) + if gt: + gt = datetime_to_isoformat(gt) + return self.filter(name, lt=lt, le=le, eq=eq, ge=ge, gt=gt) + + def with_option_equal(self, name: str, value: str) -> "_ODataOperation": + """Add filtering by option `is equal`.""" + param: str = f"{name} eq '{value}'" + self._append_query_param(param) + return self + + def with_option_containing( + self, name: str, value: str + ) -> "_ODataOperation": + """Add filtering by option `containing`.""" + param: str = f"contains({name},'{value}')" + self._append_query_param(param) + return self + + def with_option_not_containing( + self, name: str, value: str + ) -> "_ODataOperation": + """Add filtering by option `not containing`.""" + param: str = f"not contains({name},'{value}')" + self._append_query_param(param) + return self + + def with_option_equal_list( + self, name: str, value: list[str] + ) -> "_ODataOperation": + """Add filtering by equality.""" + self.odata.body.update({"FilterProducts": [{name: v} for v in value]}) + self.odata.method = HttpMethod.POST + return self + + def with_option_lt(self, name: str, value: str) -> "_ODataOperation": + """Add filtering with `less than` option.""" + param: str = f"{name} lt {value}" + self._append_query_param(param) + return self + + def with_option_le(self, name: str, value: str) -> "_ODataOperation": + """Add filtering with `less or equal` option.""" + param: str = f"{name} le {value}" + self._append_query_param(param) + return self + + def with_option_gt(self, name: str, value: str) -> "_ODataOperation": + """Add filtering with `greater or equal` option.""" + param: str = f"{name} gt {value}" + self._append_query_param(param) + return self + + def with_option_ge(self, name: str, value: str) -> "_ODataOperation": + """Add filtering with `greater than` option.""" + param: str = f"{name} ge {value}" + self._append_query_param(param) + return self + + def intersect_polygon( + self, + polygon: list[tuple[float, float]] | list[list[float]], + srid: str | None = "4326", + ) -> "_ODataOperation": + """ + Add filtering by polygon intersection. + + Parameters + ---------- + polygon: list of 2-element tuple or 2-element lists of floats + Points belonging to the polygon [longitude, latitude]. + The 1st at the last point needs to be the same (polygon needs + to be closed) + srid : str, optional + SRID name, currently supported is only `4326` + """ + if srid != "4326": + raise NotImplementedError( + "currently supported SRID is only ['4326' (EPSG 4326)]" + ) + if not polygon: + return self + if any(map(lambda x: len(x) != 2, polygon)): + raise ValueError( + "polygon should be defined as a 2-element list or tuple" + " (containing latitude and longitude values)" + ) + if not math.isclose(polygon[0][0], polygon[-1][0]) or not math.isclose( + polygon[0][1], polygon[-1][1] + ): + raise ValueError( + "polygon needs to end at the same point it starts!" + ) + polygon_repr = ",".join([f"{p[1]} {p[0]}" for p in polygon]) + param = f"OData.CSC.Intersects(area=geography'SRID={srid};POLYGON(({polygon_repr}))')" + self._append_query_param(param) + return self + + def intersect_point( + self, + point: list[float] | tuple[float, float], + srid: str | None = "4326", + ) -> "_ODataOperation": + """Add filtering by intersection with a point. + + Parameters + ---------- + point: 2-element tuple or list of floats + Point definition [latitude, longitude] + srid : str, optional + SRID name, currently supported is only `4326` + """ + if srid != "4326": + raise NotImplementedError( + "currently supported SRID is only ['4326' (EPSG 4326)]" + ) + if len(point) > 2: + # NOTE: to assure the order is [latitude, longitude] and not vice versa! + raise ValueError( + "point need to have just two elemens [latitude, longitude]" + ) + param = ( + f"OData.CSC.Intersects(area=geography'SRID={srid};POINT({point[0]} {point[1]})')" + ) + self._append_query_param(param) + return self + + +class ODataRequestBuilder( + _ODataOperation +): # pylint: disable=too-few-public-methods + """OData API request builder.""" + + _BASE_PATTERN: str = "{url}/Products" + + @classmethod + def new(cls, url: str) -> _ODataOperation: + """Start building OData request.""" + url = cls._BASE_PATTERN.format(url=url.strip("/")) + return _ODataOperation(_ODataEntity(url=url)) diff --git a/drivers/intake_geokube/utils.py b/drivers/intake_geokube/utils.py new file mode 100644 index 0000000..a3a97e2 --- /dev/null +++ b/drivers/intake_geokube/utils.py @@ -0,0 +1,51 @@ +"""Utils module.""" + +import os + +import requests + + +def create_zip_from_response(response: requests.Response, target: str) -> None: + """Create ZIP archive based on the content in streamable response. + + Parameters + ---------- + response : requests.Response + Response whose contant is streamable (`stream=True`) + target : str + Target path containing name and .zip extension + + Raises + ------ + ValueError + if `Content-Type` header is missing + TypeError + if type supplied by `Content-Type` is other than `zip` + RuntimError + if size provided by `Content-Length` header differs from the size + of the downloaded file + """ + content_type = response.headers.get("Content-Type") + if not content_type: + raise ValueError("`Content-Type` mandatory header is missing") + format_ = content_type.split("/")[-1] + _, ext = os.path.splitext(target) + if format_ != "zip": + raise TypeError( + f"provided content type {format_} is not allowed. expected 'zip'" + " format" + ) + assert ext[1:] == "zip", "expected target with '.zip' extension" + + expected_length = int(response.headers["Content-Length"]) + total_bytes = 0 + with open(target, "wb") as f: + for chunk in response.iter_content(chunk_size=1024): + if chunk: + f.write(chunk) + total_bytes += len(chunk) + if expected_length != total_bytes: + raise RuntimeError( + "downloaded file is not complete in spite of download finished" + " successfully" + ) diff --git a/drivers/intake_geokube/version.py b/drivers/intake_geokube/version.py new file mode 100644 index 0000000..656021a --- /dev/null +++ b/drivers/intake_geokube/version.py @@ -0,0 +1,3 @@ +"""Module with the current version number definition.""" + +__version__ = "1.0b0" diff --git a/drivers/intake_geokube/wrf/__init__.py b/drivers/intake_geokube/wrf/__init__.py new file mode 100644 index 0000000..c528597 --- /dev/null +++ b/drivers/intake_geokube/wrf/__init__.py @@ -0,0 +1 @@ +"""Domain subpackage for WRF datasets.""" diff --git a/drivers/intake_geokube/wrf/driver.py b/drivers/intake_geokube/wrf/driver.py new file mode 100644 index 0000000..d819760 --- /dev/null +++ b/drivers/intake_geokube/wrf/driver.py @@ -0,0 +1,178 @@ +"""WRF driver for DDS.""" + +from functools import partial +from typing import Any + +import numpy as np +import xarray as xr +from geokube import open_datacube, open_dataset +from geokube.core.datacube import DataCube +from geokube.core.dataset import Dataset + +from ..base import AbstractBaseDriver + +_DIM_RENAME_MAP: dict = { + "Time": "time", + "south_north": "latitude", + "west_east": "longitude", +} +_COORD_RENAME_MAP: dict = { + "XTIME": "time", + "XLAT": "latitude", + "XLONG": "longitude", +} +_COORD_SQUEEZE_NAMES: tuple = ("latitude", "longitude") +_PROJECTION: dict = {"grid_mapping_name": "latitude_longitude"} + + +def _cast_to_set(item: Any) -> set: + if item is None: + return set() + if isinstance(item, set): + return item + if isinstance(item, str): + return {item} + if isinstance(item, list): + return set(item) + raise TypeError(f"type '{type(item)}' is not supported!") + + +def rename_coords(dset: xr.Dataset) -> xr.Dataset: + """Rename coordinates.""" + dset_ = dset.rename_vars(_COORD_RENAME_MAP) + # Removing `Time` dimension from latitude and longitude. + coords = dset_.coords + for name in _COORD_SQUEEZE_NAMES: + coord = dset_[name] + if "Time" in coord.dims: + coords[name] = coord.squeeze(dim="Time", drop=True) + return dset_ + + +def change_dims(dset: xr.Dataset) -> xr.Dataset: + """Change dimensions to time, latitude, and longitude.""" + # Preparing new horizontal coordinates. + lat = (["south_north"], dset["latitude"].to_numpy().mean(axis=1)) + lon = (["west_east"], dset["longitude"].to_numpy().mean(axis=0)) + # Removing old horizontal coordinates. + dset_ = dset.drop_vars(["latitude", "longitude"]) + # Adding new horizontal coordinates and setting their units. + coords = dset_.coords + coords["latitude"] = lat + coords["longitude"] = lon + dset_["latitude"].attrs["units"] = "degree_north" + dset_["longitude"].attrs["units"] = "degree_east" + # Making `time`, `latitude`, and `longitude` new dimensions, instead of + # `Time`, `south_north`, and `west_east`. + dset_ = dset_.swap_dims(_DIM_RENAME_MAP) + return dset_ + + +def add_projection(dset: xr.Dataset) -> xr.Dataset: + """Add projection information to the dataset.""" + coords = dset.coords + coords["crs"] = xr.DataArray(data=np.array(1), attrs=_PROJECTION) + for var in dset.data_vars.values(): + enc = var.encoding + enc["grid_mapping"] = "crs" + if coord_names := enc.get("coordinates"): + for old_name, new_name in _COORD_RENAME_MAP.items(): + coord_names = coord_names.replace(old_name, new_name) + enc["coordinates"] = coord_names + return dset + + +def choose_variables( + dset: xr.Dataset, + variables_to_keep: str | list[str] | None = None, + variables_to_skip: str | list[str] | None = None, +) -> xr.Dataset: + """Choose only some variables by keeping or skipping some of them.""" + variables_to_keep_ = _cast_to_set(variables_to_keep) + variables_to_skip_ = _cast_to_set(variables_to_skip) + selected_variables = set(dset.data_vars.keys()) + if len(variables_to_keep_) > 0: + selected_variables = set(dset.data_vars.keys()) & variables_to_keep_ + selected_variables = selected_variables - variables_to_skip_ + if len(set(dset.data_vars.keys())) != len(selected_variables): + return dset[selected_variables] + return dset + + +def preprocess_wrf( + dset: xr.Dataset, variables_to_keep, variables_to_skip +) -> xr.Dataset: + """Preprocess WRF dataset.""" + dset = rename_coords(dset) + dset = change_dims(dset) + dset = add_projection(dset) + dset = choose_variables(dset, variables_to_keep, variables_to_skip) + return dset + + +class WrfDriver(AbstractBaseDriver): + """Driver class for netCDF files.""" + + name = "wrf_driver" + version = "0.1a0" + + def __init__( + self, + path: str, + metadata: dict, + pattern: str | None = None, + field_id: str | None = None, + metadata_caching: bool = False, + metadata_cache_path: str | None = None, + storage_options: dict | None = None, + xarray_kwargs: dict | None = None, + mapping: dict[str, dict[str, str]] | None = None, + load_files_on_persistance: bool = True, + variables_to_keep: str | list[str] | None = None, + variables_to_skip: str | list[str] | None = None, + ) -> None: + super().__init__(metadata=metadata) + self.path = path + self.pattern = pattern + self.field_id = field_id + self.metadata_caching = metadata_caching + self.metadata_cache_path = metadata_cache_path + self.storage_options = storage_options + self.mapping = mapping + self.xarray_kwargs = xarray_kwargs or {} + self.load_files_on_persistance = load_files_on_persistance + self.preprocess = partial( + preprocess_wrf, + variables_to_keep=variables_to_keep, + variables_to_skip=variables_to_skip, + ) + + @property + def _arguments(self) -> dict: + return { + "path": self.path, + "id_pattern": self.field_id, + "metadata_caching": self.metadata_caching, + "metadata_cache_path": self.metadata_cache_path, + "mapping": self.mapping, + } | self.xarray_kwargs + + def read(self) -> Dataset | DataCube: + """Read netCDF.""" + if self.pattern: + return open_dataset( + pattern=self.pattern, + preprocess=self.preprocess, + **self._arguments, + ) + return open_datacube( + delay_read_cubes=True, + preprocess=self.preprocess, + **self._arguments, + ) + + def load(self) -> Dataset | DataCube: + """Load netCDF.""" + if self.pattern: + return open_dataset(pattern=self.pattern, **self._arguments) + return open_datacube(delay_read_cubes=False, **self._arguments) diff --git a/drivers/pyproject.toml b/drivers/pyproject.toml new file mode 100644 index 0000000..ae138ac --- /dev/null +++ b/drivers/pyproject.toml @@ -0,0 +1,85 @@ +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "intake-geokube" +description = "opengeokube DDS driver." +requires-python = ">=3.10" +readme = "README.md" +license = {file = "LICENSE"} +dynamic = ["version"] +authors = [ + {name = "Jakub Walczak"}, + {name = "Marco Mancini"}, + {name = "Mirko Stojiljkovic"}, + {name = "Valentina Scardigno"}, +] +classifiers = [ + "Development Status :: 3 - Alpha", + "Environment :: Web Environment", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: Apache Software License", + "Natural Language :: English", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Topic :: Scientific/Engineering :: Atmospheric Science", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: Software Development :: Libraries :: Application Frameworks", + "Topic :: Software Development :: Libraries", +] +dependencies = [ + "dateparser", + "intake", + "pydantic", + "tqdm", + "streamz@git+https://github.com/python-streamz/streamz.git", + "paho-mqtt" +] +[project.entry-points."intake.drivers"] +netcdf_driver = "intake_geokube.netcdf.driver:NetCdfDriver" +sentinel_driver = "intake_geokube.sentinel.driver:SentinelDriver" +iot_driver = "intake_geokube.iot.driver:IotDriver" +wrf_driver = "intake_geokube.wrf.driver:WrfDriver" + +[tool.setuptools.dynamic] +version = {attr = "intake_geokube.version.__version__"} + +[tool.setuptools] +include-package-data = true + +[tool.setuptools.packages.find] +where = ["."] +exclude = ["examples*"] + +[tool.pydocstyle] + +[tool.pylint.'MESSAGES CONTROL'] +disable = "too-many-arguments,too-many-instance-attributes,too-few-public-methods,duplicate-code" + + +[tool.isort] +profile = "black" +include_trailing_comma = true +line_length = 79 +overwrite_in_place = true +use_parentheses = true + +[tool.black] +line_length = 79 +preview = true + +[tool.mypy] +files = [ + "intake_geokube", "." +] +exclude = ["tests/"] + +[tool.pytest.ini_options] +filterwarnings = [ + "ignore::DeprecationWarning" +] diff --git a/drivers/setup.py b/drivers/setup.py new file mode 100644 index 0000000..b908cbe --- /dev/null +++ b/drivers/setup.py @@ -0,0 +1,3 @@ +import setuptools + +setuptools.setup() diff --git a/drivers/tests/__init__.py b/drivers/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/drivers/tests/queries/__init__.py b/drivers/tests/queries/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/drivers/tests/queries/test_utils.py b/drivers/tests/queries/test_utils.py new file mode 100644 index 0000000..0fbefbc --- /dev/null +++ b/drivers/tests/queries/test_utils.py @@ -0,0 +1,50 @@ +from intake_geokube.queries import utils as ut + + +class TestUtils: + def test_find_key_root_level_recusrive_switched_off(self): + assert ut.find_value({"a": 0, "b": 10}, "b", recursive=False) == 10 + + def test_find_key_root_level_recusrive_switched_on(self): + assert ut.find_value({"a": 0, "b": 10}, "b", recursive=True) == 10 + + def test_return_none_on_missing_key_root_level(self): + assert ut.find_value({"a": 0, "b": 10}, "c", recursive=True) is None + + def test_return_none_on_missing_key_another_level(self): + assert ( + ut.find_value({"a": 0, "b": {"c": 10}}, "d", recursive=True) + is None + ) + + def test_find_key_another_level_recursive_switched_off(self): + assert ( + ut.find_value({"a": 0, "b": {"c": "ccc"}}, "c", recursive=False) + is None + ) + + def test_find_key_another_level_recursive_switched_on(self): + assert ( + ut.find_value({"a": 0, "b": {"c": "ccc"}}, "c", recursive=True) + == "ccc" + ) + + def test_find_list_first(self): + assert ( + ut.find_value( + {"a": 0, "b": [{"c": "ccc"}, {"d": "ddd"}]}, + "c", + recursive=True, + ) + == "ccc" + ) + + def test_find_list_not_first(self): + assert ( + ut.find_value( + {"a": 0, "b": [{"d": "ddd"}, {"c": "ccc"}]}, + "c", + recursive=True, + ) + == "ccc" + ) diff --git a/drivers/tests/queries/test_workflow.py b/drivers/tests/queries/test_workflow.py new file mode 100644 index 0000000..1b8f8c3 --- /dev/null +++ b/drivers/tests/queries/test_workflow.py @@ -0,0 +1,61 @@ +import pytest + +from intake_geokube.queries.workflow import Workflow + + +class TestWorkflow: + def test_fail_on_missing_dataset_id(self): + with pytest.raises( + KeyError, + match=r"'dataset_id' key was missing. did you defined it for*", + ): + Workflow.parse({ + "tasks": [{ + "id": 0, + "op": "subset", + "args": { + "product_id": "reanalysis", + }, + }] + }) + + def test_fail_on_missing_product_id(self): + with pytest.raises( + KeyError, + match=r"'product_id' key was missing. did you defined it for*", + ): + Workflow.parse({ + "tasks": [{ + "id": 0, + "op": "subset", + "args": { + "dataset_id": "era5", + }, + }] + }) + + def test_fail_on_nonunique_id(self): + with pytest.raises( + ValueError, + match=r"duplicated key found*", + ): + Workflow.parse({ + "tasks": [ + { + "id": 0, + "op": "subset", + "args": { + "dataset_id": "era5", + "product_id": "reanalysis", + }, + }, + { + "id": 0, + "op": "subset", + "args": { + "dataset_id": "era5", + "product_id": "reanalysis", + }, + }, + ] + }) diff --git a/drivers/tests/sentinel/__init__.py b/drivers/tests/sentinel/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/drivers/tests/sentinel/fixture.py b/drivers/tests/sentinel/fixture.py new file mode 100644 index 0000000..cfbb8bd --- /dev/null +++ b/drivers/tests/sentinel/fixture.py @@ -0,0 +1,11 @@ +import pytest + + +@pytest.fixture +def sentinel_files(): + return [ + "/tmp/pymp-2b5gr07m/162f8f7e-c954-4f69-bb53-ed820aa6432a/S2A_MSIL2A_20231007T100031_N0509_R122_T32TQM_20231007T142901.SAFE/GRANULE/L2A_T32TQM_A043305_20231007T100026/IMG_DATA/R20m/T32TQM_20231007T100031_B01_20m.jp2", + "/tmp/pymp-2b5gr07m/162f8f7e-c954-4f69-bb53-ed820aa6432a/S2A_MSIL2A_20231007T100031_N0509_R122_T32TQM_20231007T142901.SAFE/GRANULE/L2A_T32TQM_A043305_20231007T100026/IMG_DATA/R20m/T32TQM_20231007T100031_B10_20m.jp2", + "/tmp/pymp-2b5gr07m/162f8f7e-c954-4f69-bb53-ed820aa6432a/S2A_MSIL2A_20231007T100031_N0509_R122_T32TQM_20231007T142901.SAFE/GRANULE/L2A_T32TQM_A043305_20231007T100026/IMG_DATA/R30m/T32TQM_20231007T100031_B04_30m.jp2", + "/tmp/pymp-2b5gr07m/162f8f7e-c954-4f69-bb53-ed820aa6432a/S2A_MSIL2A_20231007T100031_N0509_R122_T32TQM_20231007T142901.SAFE/GRANULE/L2A_T32TQM_A043305_20231007T100026/IMG_DATA/R10m/T32TQM_20231007T100031_B12_40m.jp2", + ] diff --git a/drivers/tests/sentinel/test_builder.py b/drivers/tests/sentinel/test_builder.py new file mode 100644 index 0000000..f2e5cc1 --- /dev/null +++ b/drivers/tests/sentinel/test_builder.py @@ -0,0 +1,376 @@ +from multiprocessing import Value +from unittest import mock + +import pytest +from requests import Response, Session + +from intake_geokube.sentinel.odata_builder import ( + HttpMethod, + ODataRequest, + ODataRequestBuilder, + _ODataEntity, + _ODataOperation, + _ODataOrderMixing, + datetime_to_isoformat, +) + + +@pytest.fixture +def odata() -> _ODataEntity: + return _ODataEntity(url="http://url.com/v1") + + +@pytest.fixture +def odata_op(odata) -> _ODataOperation: + return _ODataOperation(odata=odata) + + +class TestHttpMethod: + @pytest.mark.parametrize( + "method,res", [(HttpMethod.GET, "get"), (HttpMethod.POST, "post")] + ) + def test_get_proper_name(self, method, res): + assert method.method_name == res + + +class TestODataRequestBuildable: + def test_build_from_operation(self, odata): + res = _ODataOperation(odata).build() + assert isinstance(res, ODataRequest) + assert res.odata == odata + + +class TestOrderMixin: + @pytest.mark.parametrize("type_", [_ODataOperation]) + def test_proper_class_when_order(self, type_, odata): + res = type_(odata).order(by="ProductionDate") + assert isinstance(res, type_) + + def test_fail_order_on_wrong_superclass(self, odata): + class A(_ODataOrderMixing): + def __init__(self, odata): + self.odata = odata + + with pytest.raises(TypeError, match=r"unexpected type:*"): + A(odata).order(by="a") + + +class TestODataRequest: + def test_convert_filter_param(self, odata_op): + odata_op.filter("a", eq=10).or_().filter("b", lt=100, ge=10).order( + by="a", desc=True + ) + req = ODataRequest(odata_op.odata) + assert req.odata.params["filter"] == [ + "a eq '10'", + "b lt 100", + "b ge 10", + ] + assert ( + req.request_params["filter"] == "a eq '10' or b lt 100 and b ge 10" + ) + assert req.odata.params["orderby"] == ["a desc"] + + +class TestODataRequestBuilder: + def test_create_odata_operation_from_builder(self): + res = ODataRequestBuilder.new(url="http:/url.com") + assert isinstance(res, _ODataOperation) + assert res.odata.url == "http:/url.com/Products" + + +class TestODataOperation: + @pytest.fixture + def odata_request(self) -> ODataRequest: + return ODataRequestBuilder.new("http://aaaa.com").build() + + @pytest.mark.parametrize( + "datestring,result", + [ + ("2002-02-01", "2002-02-01T00:00:00Z"), + ("2001-02-02 12:45", "2001-02-02T12:45:00Z"), + ("1977-12-23 11:00:05", "1977-12-23T11:00:05Z"), + ("1977-12-23T11:00:05", "1977-12-23T11:00:05Z"), + ], + ) + def test_convert_to_isoformat(self, datestring, result): + assert datetime_to_isoformat(datestring) == result + + def testwith_option_equal(self, odata_op): + odata_op.with_option_equal("Name", "some_name") + assert len(odata_op.odata.params) == 1 + assert odata_op.odata.method is HttpMethod.GET + assert odata_op.odata.params["filter"] == ["Name eq 'some_name'"] + + def test_option_containing(self, odata_op): + odata_op.with_option_containing("some_option", "aaa") + assert len(odata_op.odata.params) == 1 + assert odata_op.odata.method is HttpMethod.GET + assert odata_op.odata.params["filter"] == [ + "contains(some_option,'aaa')" + ] + + def test_option_not_containing(self, odata_op): + odata_op.with_option_not_containing("some_option", "aaa") + assert len(odata_op.odata.params) == 1 + assert odata_op.odata.method is HttpMethod.GET + assert odata_op.odata.params["filter"] == [ + "not contains(some_option,'aaa')" + ] + + def testwith_option_equal_list(self, odata_op): + odata_op.with_option_equal_list("Name", ["some_name", "aaa"]) + assert len(odata_op.odata.params) == 0 + assert odata_op.odata.method is HttpMethod.POST + assert odata_op.odata.body == { + "FilterProducts": [{"Name": "some_name"}, {"Name": "aaa"}] + } + + def test_several_options(self, odata_op): + odata_op.with_option_equal("aa", "bb").and_().with_option_lt( + "aaa", "1000" + ) + assert odata_op.odata.method is HttpMethod.GET + assert len(odata_op.odata.params) == 1 + assert odata_op.odata.params["filter"] == ["aa eq 'bb'", "aaa lt 1000"] + + @pytest.mark.parametrize( + "comb", + [ + {"lt": 1, "eq": 10}, + {"le": 1, "eq": 10}, + {"lt": 1, "le": 10}, + {"gt": 1, "ge": 10}, + {"ge": 1, "eq": 10}, + {"gt": 1, "eq": 10}, + {"lt": 1, "eq": 1, "ge": 1}, + ], + ) + def test_filter_fail_on_wrong_arguments_passed(self, comb, odata_op): + with pytest.raises(ValueError, match=r"cannot define *"): + odata_op.filter(name="a", **comb) + + def test_filter_single(self, odata_op): + res = odata_op.filter(name="a", lt=100) + assert res.odata.params["filter"] == ["a lt 100"] + + def test_filter_multiple(self, odata_op): + res = odata_op.filter(name="a", lt=100, gt=10) + assert res.odata.params["filter"] == ["a lt 100", "a gt 10"] + assert res.odata.conj[-1] == "and" + + def test_filter_multiple2(self, odata_op): + res = odata_op.filter(name="a", ge=10, le=100) + assert res.odata.params["filter"] == ["a le 100", "a ge 10"] + assert res.odata.conj[-1] == "and" + + def test_filter_multiple3(self, odata_op): + res = odata_op.filter(name="a", eq=10) + assert res.odata.params["filter"] == ["a eq '10'"] + assert res.odata.conj[-1] == "and" + + @pytest.mark.parametrize("arr", ["111", "111", "02-20", "56:45", "aaa"]) + def test_filter_date_fail_arg_nondateparsable(self, arr, odata_op): + with pytest.raises(ValueError, match=r"cannot parse*"): + odata_op.filter_date("ProductionDate", lt=arr) + + @pytest.mark.parametrize("arr", [(1,), 1, 1.2, [1, 2], {1, 2}]) + def test_filter_date_fail_arg_wrong_type(self, arr, odata_op): + with pytest.raises(TypeError, match=r"type .* is not supported"): + odata_op.filter_date("ProductionDate", lt=arr) + + def test_filter_and_order_ascending(self, odata_op): + odata_op.with_option_gt("aaa", "-50").order( + by="ProductionDate", desc=False + ) + assert odata_op.odata.method is HttpMethod.GET + assert len(odata_op.odata.params) == 2 + assert odata_op.odata.body == {} + assert odata_op.odata.params["filter"] == ["aaa gt -50"] + assert odata_op.odata.params["orderby"] == ["ProductionDate asc"] + + def test_filter_and_order_descending(self, odata_op): + odata_op.with_option_gt("aaa", "-50").order( + by="ProductionDate", desc=True + ) + assert odata_op.odata.method is HttpMethod.GET + assert len(odata_op.odata.params) == 2 + assert odata_op.odata.body == {} + assert odata_op.odata.params["filter"] == ["aaa gt -50"] + assert odata_op.odata.params["orderby"] == ["ProductionDate desc"] + + @mock.patch.object(Session, "send") + def test_request_data(self, send_mock, odata_op): + send_mock.json.return_value = "{'response': 'some response'}" + _ = ( + odata_op.with_option_gt("aaa", "-50") + .order(by="ProductionDate", desc=True) + .build() + .query() + ) + send_mock.assert_called_once() + assert ( + send_mock.call_args_list[0].args[0].url + == "http://url.com/v1?%24filter=aaa+gt+-50&%24orderby=ProductionDate+desc" + ) + + @mock.patch.object(Session, "send") + def test_url_passed_with_extra_slashes(self, send_mock): + builder = ODataRequestBuilder.new( + "https://some_url.com/odata/v1" + ).build() + assert builder.odata.url == "https://some_url.com/odata/v1/Products" + + def test_polygon_fail_on_other_srid_passed(self, odata_op): + with pytest.raises( + NotImplementedError, match=r"currently supported SRID is only*" + ): + odata_op.intersect_polygon( + polygon=[[0, 1], [1, 2], [0, 1]], srid="123" + ) + + def test_polygon_fail_on_polygon_with_more_than_two_coords(self, odata_op): + with pytest.raises( + ValueError, + match=r"polygon should be defined as a 2-element list or tuple*", + ): + odata_op.intersect_polygon(polygon=[[0, 1], [1, 2, 3], [0, 1]]) + + def test_polygon_fail_on_polygon_ending_not_on_start_point(self, odata_op): + with pytest.raises( + ValueError, + match=r"polygon needs to end at the same point it starts!", + ): + odata_op.intersect_polygon(polygon=[[0, 1], [1, 3], [1, 1]]) + + def test_location_fail_on_other_srid_passed(self, odata_op): + with pytest.raises( + NotImplementedError, match=r"currently supported SRID is only*" + ): + odata_op.intersect_point(point=(0.1, 2.0), srid="123") + + def test_location_fail_on_more_than_two_coords(self, odata_op): + with pytest.raises( + ValueError, match=r"point need to have just two elemens*" + ): + odata_op.intersect_point(point=[0, 1, 4]) + + @mock.patch.object(Session, "send") + @pytest.mark.parametrize( + "code,callback", [(200, lambda r: "ok"), (400, lambda r: "bad")] + ) + def test_callback_call_on_defined( + self, send_mock, code, callback, odata_request + ): + response = Response() + response.status_code = code + send_mock.return_value = response + res = odata_request.with_callback(callback, code).query() + assert res == callback(None) + + @mock.patch.object(Session, "send") + def test_return_response_on_missing_callback( + self, send_mock, odata_request + ): + response = Response() + response.status_code = 200 + send_mock.return_value = response + res = odata_request.query() + assert isinstance(res, Response) + + @mock.patch.object(Session, "send") + @pytest.mark.parametrize("code", [200, 300, 305, 400, 500]) + def test_callback_without_http_code(self, send_mock, code, odata_request): + response = Response() + response.status_code = code + send_mock.return_value = response + callback = mock.MagicMock() + _ = odata_request.with_callback(callback).query() + callback.assert_called_with(response) + + def test_operations_with_auto_conjunction(self, odata_op): + res = odata_op.filter("a", lt=10).filter("b", ge="aaa") + assert res.odata.params["filter"] == ["a lt 10", "b ge aaa"] + assert len(res.odata.conj) == 2 + assert res.odata.conj == ["and", "and"] + + def test_operations_with_auto_conjunction_with_several_operations( + self, odata_op + ): + res = ( + odata_op.filter("a", lt=10) + .filter("b", ge="aaa") + .filter_date("ProductioNDate", lt="2000-01-01") + ) + assert res.odata.params["filter"] == [ + "a lt 10", + "b ge aaa", + "ProductioNDate lt 2000-01-01T00:00:00Z", + ] + assert len(res.odata.conj) == 3 + assert res.odata.conj == ["and", "and", "and"] + + def test_operations_with_auto_and_explicit_conjunction_with_several_operations( + self, odata_op + ): + res = ( + odata_op.filter("a", lt=10) + .filter("b", ge="aaa") + .or_() + .filter_date("ProductioNDate", lt="2000-01-01") + ) + assert res.odata.params["filter"] == [ + "a lt 10", + "b ge aaa", + "ProductioNDate lt 2000-01-01T00:00:00Z", + ] + assert len(res.odata.conj) == 3 + assert res.odata.conj == ["and", "or", "and"] + + def test_con_conj_on_single_operation(self, odata_op): + res = odata_op.filter("a", lt=10) + assert res.odata.params["filter"] == ["a lt 10"] + assert len(res.odata.conj) == 1 + + def test_operations_with_explicit_conjunction_and(self, odata_op): + res = odata_op.filter("a", lt=10).and_().filter("b", ge="aaa") + assert res.odata.params["filter"] == ["a lt 10", "b ge aaa"] + assert len(res.odata.conj) == 2 + assert res.odata.conj == ["and", "and"] + + def test_operations_with_explicit_conjunction_or(self, odata_op): + res = odata_op.filter("a", lt=10).or_().filter("b", ge="aaa") + assert res.odata.params["filter"] == ["a lt 10", "b ge aaa"] + assert len(res.odata.conj) == 2 + assert res.odata.conj == ["or", "and"] + + def test_operation_with_idempotent_same_conjunction(self, odata_op): + res = odata_op.filter("a", lt=10).or_().or_().filter("b", ge="aaa") + assert res.odata.params["filter"] == ["a lt 10", "b ge aaa"] + assert len(res.odata.conj) == 2 + assert res.odata.conj == ["or", "and"] + + def test_operation_with_idempotent_other_conjunction(self, odata_op): + res = ( + odata_op.filter("a", lt=10) + .or_() + .or_() + .and_() + .filter("b", ge="aaa") + ) + assert res.odata.params["filter"] == ["a lt 10", "b ge aaa"] + assert len(res.odata.conj) == 2 + assert res.odata.conj == ["and", "and"] + + def test_filter_skip_if_all_arg_nones(self, odata_op): + odata_op = odata_op.filter("a").filter("b") + assert len(odata_op.odata.params) == 0 + assert len(odata_op.odata.conj) == 0 + + def test_filter_containing(self, odata_op): + odata_op = odata_op.filter("a", containing="ggg", not_containing="bbb") + assert odata_op.odata.params["filter"] == [ + "contains(a,'ggg')", + "not contains(a,'bbb')", + ] + assert odata_op.odata.conj == ["and", "and"] diff --git a/drivers/tests/sentinel/test_driver.py b/drivers/tests/sentinel/test_driver.py new file mode 100644 index 0000000..326bab4 --- /dev/null +++ b/drivers/tests/sentinel/test_driver.py @@ -0,0 +1,177 @@ +import os +from unittest import mock + +import pytest +from intake.source.utils import reverse_format + +import intake_geokube.sentinel.driver as drv +from intake_geokube.queries.geoquery import GeoQuery + +from . import fixture as fxt + + +class TestSentinelDriver: + @pytest.mark.parametrize( + "item,res", + [ + ("aaa", 1), + (["aa", "bb"], 2), + (10, 1), + ([10, 100], 2), + (("a", "b"), 2), + ((-1, -5), 2), + ], + ) + def test_get_items_nbr(self, item, res): + assert drv._get_items_nbr({"key": item}, "key") == res + + @pytest.mark.skip(reason="product_id is not mandatory anymore") + def test_validate_query_fail_on_missing_product_id(self): + query = GeoQuery() + with pytest.raises( + ValueError, match=r"\'product_id\' is mandatory filter" + ): + drv._validate_geoquery_for_sentinel(query) + + @pytest.mark.parametrize( + "time", + [ + {"year": [2000, 2014], "month": 10, "day": 14}, + {"year": 2014, "month": [10, 11], "day": 14}, + {"year": 2000, "month": 10, "day": [14, 15, 16]}, + ], + ) + def test_validate_query_fail_on_multiple_year_month_day(self, time): + query = GeoQuery(product_id="aaa", time=time) + with pytest.raises( + ValueError, + match=( + r"valid time combo for sentinel data should contain exactly" + r" one*" + ), + ): + drv._validate_geoquery_for_sentinel(query) + + @pytest.mark.parametrize( + "time", + [ + {"year": 1999, "month": 10, "day": 14}, + {"year": 2014, "month": 10, "day": 14}, + {"year": 2000, "month": 10, "day": 14}, + ], + ) + def test_validate_query_if_time_passed_as_int(self, time): + query = GeoQuery(product_id="aaa", time=time) + drv._validate_geoquery_for_sentinel(query) + + @pytest.mark.parametrize( + "time", + [ + {"year": "1999", "month": "10", "day": "14"}, + {"year": 2014, "month": "10", "day": 14}, + {"year": "2000", "month": 10, "day": 14}, + ], + ) + def test_validate_query_if_time_passed_as_str(self, time): + query = GeoQuery(product_id="aaa", time=time) + drv._validate_geoquery_for_sentinel(query) + + @pytest.mark.parametrize( + "locs", + [{"latitude": 10}, {"longitude": -10}, {"latitude": 5, "aaa": 10}], + ) + def test_validate_query_Fail_on_missing_key(self, locs): + query = GeoQuery(product_id="aa", location=locs) + with pytest.raises( + ValueError, + match=( + r"both \'latitude\' and \'longitude\' must be defined for" + r" locatio" + ), + ): + drv._validate_geoquery_for_sentinel(query) + + @pytest.mark.parametrize( + "locs", + [ + {"latitude": [10, -5], "longitude": [-1, -2]}, + {"latitude": 10, "longitude": [-1, -2]}, + {"latitude": [10, -5], "longitude": -1}, + ], + ) + def test_location_to_valid_point_fail_on_multielement_list_passed( + self, locs + ): + query = GeoQuery(product_id="aa", location=locs) + with pytest.raises( + ValueError, + match=r"location can have just a single point \(single value for*", + ): + drv._location_to_valid_point(query.location) + + @pytest.mark.parametrize( + "path,res", + [ + ( + "/tmp/pymp-2b5gr07m/162f8f7e-c954-4f69-bb53-ed820aa6432a/S2A_MSIL2A_20231007T100031_N0509_R122_T32TQM_20231007T142901.SAFE/GRANULE/L2A_T32TQM_A043305_20231007T100026/IMG_DATA/R20m/T32TQM_20231007T100031_B01_20m.jp2", + { + "product_id": "162f8f7e-c954-4f69-bb53-ed820aa6432a", + "resolution": "R20m", + "band": "B01", + }, + ), + ( + "/tmp/pymp-2b5gr07m/162f8f7e-c954-4f69-bb53-ed820aa6432a/S2A_MSIL2A_20231007T100031_N0509_R122_T32TQM_20231007T142901.SAFE/GRANULE/L2A_T32TQM_A043305_20231007T100026/IMG_DATA/R30m/T32TQM_20231007T100031_B04_30m.jp2", + { + "product_id": "162f8f7e-c954-4f69-bb53-ed820aa6432a", + "resolution": "R30m", + "band": "B04", + }, + ), + ], + ) + def test_zippatern(self, path, res): + zippattern = "/{product_id}/{}.SAFE/GRANULE/{}/IMG_DATA/{resolution}/{}_{}_{band}_{}.jp2" + target_dir = "/tmp/pymp-2b5gr07m" + assert reverse_format(zippattern, path.removeprefix(target_dir)) == res + + @pytest.mark.parametrize( + "path,exp", + [ + ( + "/tmp/pymp-2b5gr07m/162f8f7e-c954-4f69-bb53-ed820aa6432a/S2A_MSIL2A_20231007T100031_N0509_R122_T32TQM_20231007T142901.SAFE/GRANULE/L2A_T32TQM_A043305_20231007T100026/IMG_DATA/R20m/T32TQM_20231007T100031_B01_20m.jp2", + "R20m_B01", + ), + ( + "/tmp/pymp-2b5gr07m/162f8f7e-c954-4f69-bb53-ed820aa6432a/S2A_MSIL2A_20231007T100031_N0509_R122_T32TQM_20231007T142901.SAFE/GRANULE/L2A_T32TQM_A043305_20231007T100026/IMG_DATA/R30m/T32TQM_20231007T100031_B04_30m.jp2", + "R30m_B04", + ), + ], + ) + def test_get_field_name_from_path(self, path, exp): + assert drv._get_field_name_from_path(path) == exp + + @mock.patch.dict(os.environ, {}, clear=True) + def test_fail_if_no_username_passed(self): + with pytest.raises( + KeyError, + match=( + r"missing at least of of the mandatory environmental" + r" variables:" + ), + ): + drv.SentinelDriver({}, "", "", "") + + def test_raise_notimplemented_for_read(self): + with pytest.raises( + NotImplementedError, + match=r"reading metadata is not supported for sentinel data*", + ): + drv.SentinelDriver({}, "", "", "").read() + + def test_raise_notimplemented_for_load(self): + with pytest.raises( + NotImplementedError, + match=r"loading entire product is not supported for sentinel data", + ): + drv.SentinelDriver({}, "", "", "").load() diff --git a/drivers/tests/test_geoquery.py b/drivers/tests/test_geoquery.py new file mode 100644 index 0000000..4cb9daa --- /dev/null +++ b/drivers/tests/test_geoquery.py @@ -0,0 +1,41 @@ +from unittest import mock + +import pytest + +from intake_geokube.queries.geoquery import GeoQuery + + +class TestGeoQuery: + def test_pass_time_as_combo(self): + query = GeoQuery( + time={ + "year": ["2002"], + "month": ["6"], + "day": ["21"], + "hour": ["8", "10"], + } + ) + assert isinstance(query.time, dict) + + def test_pass_time_as_slice(self): + query = GeoQuery(time={"start": "2000-01-01", "stop": "2001-12-21"}) + assert isinstance(query.time, slice) + assert query.time.start == "2000-01-01" + assert query.time.stop == "2001-12-21" + + def test_dump_original_from_time_as_combo(self): + query = GeoQuery( + time={ + "year": ["2002"], + "month": ["6"], + "day": ["21"], + "hour": ["8", "10"], + } + ) + res = query.model_dump_original() + assert isinstance(res["time"], dict) + + def test_dump_original_from_time_as_slice(self): + query = GeoQuery(time={"start": "2000-01-01", "stop": "2001-12-21"}) + res = query.model_dump_original() + assert isinstance(res["time"], dict) From 3a25e5480ecbe77f6124acc4555960dbb401c634 Mon Sep 17 00:00:00 2001 From: Marco Mancini Date: Fri, 12 Jan 2024 10:13:39 +0100 Subject: [PATCH 07/31] Add workflows --- .github/workflows/build-push-docker-prod.yml | 26 ++++++++++++++++ .github/workflows/build-push-docker.yml | 32 ++++++++++++++++++++ 2 files changed, 58 insertions(+) create mode 100644 .github/workflows/build-push-docker-prod.yml create mode 100644 .github/workflows/build-push-docker.yml diff --git a/.github/workflows/build-push-docker-prod.yml b/.github/workflows/build-push-docker-prod.yml new file mode 100644 index 0000000..3cc1e88 --- /dev/null +++ b/.github/workflows/build-push-docker-prod.yml @@ -0,0 +1,26 @@ +name: Build Docker image of the geodds-api component and push to the production repository + +on: + push: + tags: + - 'v*' +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Get release tag + run: echo "RELEASE_TAG=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV + - name: Login to Docker registry + run: echo ${{ secrets.DOCKER_PASSWORD }} | docker login ${{ secrets.DOCKER_PROD_REPO_URL }} -u nologin --password-stdin + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: Build and push + uses: docker/build-push-action@v4 + with: + context: . + file: ./Dockerfile + push: true + tags: | + ${{ secrets.DOCKER_PROD_REPO_URL }}/geodds-api:${{ env.RELEASE_TAG }} + ${{ secrets.DOCKER_PROD_REPO_URL }}/geodds-api:latest diff --git a/.github/workflows/build-push-docker.yml b/.github/workflows/build-push-docker.yml new file mode 100644 index 0000000..6365e65 --- /dev/null +++ b/.github/workflows/build-push-docker.yml @@ -0,0 +1,32 @@ +name: Build Docker image of the geodds-api component and push to the dev repository + +on: + pull_request: + types: [opened, synchronize] + workflow_dispatch: +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set Docker image tag name + run: echo "TAG=$(date +'%Y.%m.%d.%H.%M')" >> $GITHUB_ENV + - name: Login to Scaleway Container Registry + uses: docker/login-action@v2 + with: + username: nologin + password: ${{ secrets.DOCKER_PASSWORD }} + registry: ${{ secrets.DOCKER_DEV_REPO_URL }} + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: Build and push + uses: docker/build-push-action@v4 + with: + context: . + file: ./Dockerfile + push: true + build-args: | + REGISTRY=${{ secrets.DOCKER_DEV_REPO_URL }} + tags: | + ${{ secrets.DOCKER_DEV_REPO_URL }}/geodds-api:${{ env.TAG }} + ${{ secrets.DOCKER_DEV_REPO_URL }}/geodds-api:latest \ No newline at end of file From 291a1e603558052448c57c670ebc87bc32f338d4 Mon Sep 17 00:00:00 2001 From: Marco Mancini Date: Fri, 12 Jan 2024 10:23:53 +0100 Subject: [PATCH 08/31] Remove db folder --- db/Dockerfile | 2 - db/dbmanager/__init__.py | 0 db/dbmanager/dbmanager.py | 183 -------------------------------------- db/scripts/1-init.sql | 66 -------------- db/scripts/2-populate.sql | 2 - 5 files changed, 253 deletions(-) delete mode 100644 db/Dockerfile delete mode 100644 db/dbmanager/__init__.py delete mode 100644 db/dbmanager/dbmanager.py delete mode 100644 db/scripts/1-init.sql delete mode 100644 db/scripts/2-populate.sql diff --git a/db/Dockerfile b/db/Dockerfile deleted file mode 100644 index 8bcf754..0000000 --- a/db/Dockerfile +++ /dev/null @@ -1,2 +0,0 @@ -FROM postgres:14.1 -ADD ./scripts/init.sql /docker-entrypoint-initdb.d/ \ No newline at end of file diff --git a/db/dbmanager/__init__.py b/db/dbmanager/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/db/dbmanager/dbmanager.py b/db/dbmanager/dbmanager.py deleted file mode 100644 index 16b956b..0000000 --- a/db/dbmanager/dbmanager.py +++ /dev/null @@ -1,183 +0,0 @@ -from __future__ import annotations - -from datetime import datetime -from enum import auto, Enum as Enum_, unique - -from sqlalchemy import ( - Column, - create_engine, - DateTime, - Enum, - ForeignKey, - Integer, - JSON, - Sequence, - String -) -from sqlalchemy.orm import declarative_base, relationship, sessionmaker - - -@unique -class RequestStatus(Enum_): - PENDING = auto() - RUNNING = auto() - DONE = auto() - FAILED = auto() - - -class _Repr: - def __repr__(self): - cols = self.__table__.columns.keys() # pylint: disable=no-member - kwa = ', '.join(f'{col}={getattr(self, col)}' for col in cols) - return f'{type(self).__name__}({kwa})' - - -Base = declarative_base(cls=_Repr, name='Base') - - -class Role(Base): - __tablename__ = 'roles' - role_id = Column(Integer, Sequence('role_id_seq'), primary_key=True) - role_name = Column(String(255), nullable=False, unique=True) - - -class User(Base): - __tablename__ = 'users' - user_id = Column(Integer, primary_key=True) - keycloak_id = Column(Integer, nullable=False, unique=True) - api_key = Column(String(255), nullable=False, unique=True) - contact_name = Column(String(255)) - role_id = Column(Integer, ForeignKey('roles.role_id')) - - -class Worker(Base): - __tablename__ = 'workers' - worker_id = Column(Integer, primary_key=True) - status = Column(String(255), nullable=False) - host = Column(String(255)) - dask_scheduler_port = Column(Integer) - dask_dashboard_address = Column(String(10)) - created_on = Column(DateTime, nullable=False) - - -class Request(Base): - __tablename__ = 'requests' - request_id = Column(Integer, primary_key=True) - status = Column(Enum(RequestStatus), nullable=False) - priority = Column(Integer) - user_id = Column(Integer, ForeignKey('users.user_id'), nullable=False) - worker_id = Column(Integer, ForeignKey('workers.worker_id')) - dataset = Column(String(255)) - product = Column(String(255)) - query = Column(JSON()) - estimate_bytes_size = Column(Integer) - download_id = Column(Integer, unique=True) - created_on = Column(DateTime, nullable=False) - last_update = Column(DateTime) - - -class Download(Base): - __tablename__ = 'downloads' - download_id = Column( - Integer, primary_key=True - ) - download_uri = Column(String(255)) - storage_id = Column(Integer) - location_path = Column(String(255)) - bytes_size = Column(Integer) - created_on = Column(DateTime, nullable=False) - - -class Storage(Base): - __tablename__ = 'storages' - storage_id = Column(Integer, primary_key=True) - name = Column(String(255)) - host = Column(String(20)) - protocol = Column(String(10)) - port = Column(Integer) - - -class DBManager: - def __init__( - self, - database: str = 'dds', - host: str = 'db', - port: int = 5432, - user: str = 'dds', - password: str = 'dds' - ) -> None: - url = f'postgresql://{user}:{password}@{host}:{port}/{database}' - self.__engine = engine = create_engine(url, echo=True) - self.__session_maker = sessionmaker(bind=engine) - Base.metadata.create_all(engine) - - def create_request( - self, - user_id: int = 1, - dataset: str | None = None, - product: str | None = None, - query: str | None = None, - worker_id: int | None = None, - priority: str | None = None, - estimate_bytes_size: int | None = None, - download_id: int | None = None, - status: RequestStatus = RequestStatus.PENDING, - ) -> int: - # TODO: Add more request-related parameters to this method. - with self.__session_maker() as session: - request = Request( - status=status, - priority=priority, - user_id=user_id, - worker_id=worker_id, - dataset=dataset, - product=product, - query=query, - estimate_bytes_size=estimate_bytes_size, - download_id=download_id, - created_on=datetime.utcnow() - ) - session.add(request) - session.commit() - return request.request_id - - def update_request( - self, - request_id: int, - worker_id: int, - status: RequestStatus - ) -> int: - with self.__session_maker() as session: - request = session.query(Request).get(request_id) - request.status = status - request.worker_id = worker_id - request.last_update = datetime.utcnow() - session.commit() - return request.request_id - - def get_request_status( - self, - request_id - ) -> RequestStatus: - with self.__session_maker() as session: - request = session.query(Request).get(request_id) - return request.status - - def create_worker( - self, - status: str, - dask_scheduler_port: int, - dask_dashboard_address: int, - host: str = 'localhost' - ) -> int: - with self.__session_maker() as session: - worker = Worker( - status=status, - host=host, - dask_scheduler_port=dask_scheduler_port, - dask_dashboard_address=dask_dashboard_address, - created_on=datetime.utcnow() - ) - session.add(worker) - session.commit() - return worker.worker_id diff --git a/db/scripts/1-init.sql b/db/scripts/1-init.sql deleted file mode 100644 index fafd908..0000000 --- a/db/scripts/1-init.sql +++ /dev/null @@ -1,66 +0,0 @@ --- CREATE USER dds WITH PASSWORD 'dds'; --- CREATE DATABASE dds; --- GRANT ALL PRIVILEGES ON DATABASE dds TO dds; - -CREATE TABLE IF NOT EXISTS roles ( - role_id SERIAL PRIMARY KEY, - role_name VARCHAR (255) UNIQUE NOT NULL -); - -CREATE TABLE IF NOT EXISTS users ( - user_id SERIAL PRIMARY KEY, - keycloak_id INT UNIQUE NOT NULL, - api_key VARCHAR(255) UNIQUE NOT NULL, - contact_name VARCHAR(255), - role_id INT, - CONSTRAINT fk_role - FOREIGN KEY(role_id) - REFERENCES roles(role_id) -); - -CREATE TABLE IF NOT EXISTS workers ( - worker_id SERIAL PRIMARY KEY, - status VARCHAR(255) NOT NULL, - host VARCHAR(255), - dask_scheduler_port INT, - dask_dashboard_address CHAR(10), - created_on TIMESTAMP NOT NULL -); - -CREATE TABLE IF NOT EXISTS requests ( - request_id SERIAL PRIMARY KEY, - status VARCHAR(255) NOT NULL, - priority INT, - user_id INT NOT NULL, - worker_id INT, - dataset VARCHAR(255), - product VARCHAR(255), - query json, - estimate_bytes_size INT, - download_id INT UNIQUE, - created_on TIMESTAMP NOT NULL, - last_update TIMESTAMP, - CONSTRAINT fk_user - FOREIGN KEY(user_id) - REFERENCES users(user_id), - CONSTRAINT fk_worker - FOREIGN KEY(worker_id) - REFERENCES workers(worker_id) -); - -CREATE TABLE IF NOT EXISTS downloads ( - download_id SERIAL PRIMARY KEY, - download_uri VARCHAR(255), - storage_id INT, - location_path VARCHAR(255), - bytes_size INT, - created_on TIMESTAMP NOT NULL -); - -CREATE TABLE IF NOT EXISTS storages ( - storage_id SERIAL PRIMARY KEY, - name VARCHAR(255), - host VARCHAR(20), - protocol VARCHAR(10), - port INT -); \ No newline at end of file diff --git a/db/scripts/2-populate.sql b/db/scripts/2-populate.sql deleted file mode 100644 index 1406ff9..0000000 --- a/db/scripts/2-populate.sql +++ /dev/null @@ -1,2 +0,0 @@ -INSERT INTO roles VALUES (1, 'internal'); -INSERT INTO users VALUES (1, '1234', '1234:1234', 'Mario Rossi', 1); \ No newline at end of file From 1bd8355460c25d950e2bee1e8781d6734b1340d8 Mon Sep 17 00:00:00 2001 From: Jakub Walczak Date: Mon, 15 Jan 2024 09:00:37 +0100 Subject: [PATCH 09/31] Prepare single workflow for docker images of all components --- .github/workflows/build-push-docker-prod.yml | 26 -------- .github/workflows/build-push-docker.yml | 32 ---------- .github/workflows/deploy-staging.yml | 65 ++++++++++++++++++++ api/Dockerfile | 2 +- datastore/Dockerfile | 2 +- drivers/README.md | 2 +- drivers/pyproject.toml | 2 +- executor/Dockerfile | 2 +- 8 files changed, 70 insertions(+), 63 deletions(-) delete mode 100644 .github/workflows/build-push-docker-prod.yml delete mode 100644 .github/workflows/build-push-docker.yml create mode 100644 .github/workflows/deploy-staging.yml diff --git a/.github/workflows/build-push-docker-prod.yml b/.github/workflows/build-push-docker-prod.yml deleted file mode 100644 index 3cc1e88..0000000 --- a/.github/workflows/build-push-docker-prod.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: Build Docker image of the geodds-api component and push to the production repository - -on: - push: - tags: - - 'v*' -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Get release tag - run: echo "RELEASE_TAG=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV - - name: Login to Docker registry - run: echo ${{ secrets.DOCKER_PASSWORD }} | docker login ${{ secrets.DOCKER_PROD_REPO_URL }} -u nologin --password-stdin - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - name: Build and push - uses: docker/build-push-action@v4 - with: - context: . - file: ./Dockerfile - push: true - tags: | - ${{ secrets.DOCKER_PROD_REPO_URL }}/geodds-api:${{ env.RELEASE_TAG }} - ${{ secrets.DOCKER_PROD_REPO_URL }}/geodds-api:latest diff --git a/.github/workflows/build-push-docker.yml b/.github/workflows/build-push-docker.yml deleted file mode 100644 index 6365e65..0000000 --- a/.github/workflows/build-push-docker.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: Build Docker image of the geodds-api component and push to the dev repository - -on: - pull_request: - types: [opened, synchronize] - workflow_dispatch: -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Set Docker image tag name - run: echo "TAG=$(date +'%Y.%m.%d.%H.%M')" >> $GITHUB_ENV - - name: Login to Scaleway Container Registry - uses: docker/login-action@v2 - with: - username: nologin - password: ${{ secrets.DOCKER_PASSWORD }} - registry: ${{ secrets.DOCKER_DEV_REPO_URL }} - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - name: Build and push - uses: docker/build-push-action@v4 - with: - context: . - file: ./Dockerfile - push: true - build-args: | - REGISTRY=${{ secrets.DOCKER_DEV_REPO_URL }} - tags: | - ${{ secrets.DOCKER_DEV_REPO_URL }}/geodds-api:${{ env.TAG }} - ${{ secrets.DOCKER_DEV_REPO_URL }}/geodds-api:latest \ No newline at end of file diff --git a/.github/workflows/deploy-staging.yml b/.github/workflows/deploy-staging.yml new file mode 100644 index 0000000..b45bf4e --- /dev/null +++ b/.github/workflows/deploy-staging.yml @@ -0,0 +1,65 @@ +name: Build Docker images for geolake components and push to the repository + +on: + pull_request: + types: [opened, synchronize] + workflow_dispatch: +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set Docker image tag name + run: echo "TAG=$(date +'%Y.%m.%d.%H.%M')" >> $GITHUB_ENV + - name: Login to Scaleway Container Registry + uses: docker/login-action@v2 + with: + username: nologin + password: ${{ secrets.DOCKER_PASSWORD }} + registry: ${{ vars.DOCKER_REGISTRY }} + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: Build and push drivers + uses: docker/build-push-action@v4 + with: + context: . + file: ./drivers/Dockerfile + push: true + build-args: | + REGISTRY=${{ vars.DOCKER_REGISTRY }} + tags: | + ${{ vars.DOCKER_REGISTRY }}/geolake-drivers:${{ env.TAG }} + ${{ vars.DOCKER_REGISTRY }}/geolake-drivers:latest + - name: Build and push datastore component + uses: docker/build-push-action@v4 + with: + context: . + file: ./datastore/Dockerfile + push: true + build-args: | + REGISTRY=${{ vars.DOCKER_REGISTRY }} + tags: | + ${{ vars.DOCKER_REGISTRY }}/geolake-datastore:${{ env.TAG }} + ${{ vars.DOCKER_REGISTRY }}/geolake-datastore:latest + - name: Build and push api component + uses: docker/build-push-action@v4 + with: + context: . + file: ./api/Dockerfile + push: true + build-args: | + REGISTRY=${{ vars.DOCKER_REGISTRY }} + tags: | + ${{ vars.DOCKER_REGISTRY }}/geolake-api:${{ env.TAG }} + ${{ vars.DOCKER_REGISTRY }}/geolake-api:latest + - name: Build and push executor component + uses: docker/build-push-action@v4 + with: + context: . + file: ./executor/Dockerfile + push: true + build-args: | + REGISTRY=${{ vars.DOCKER_REGISTRY }} + tags: | + ${{ vars.DOCKER_REGISTRY }}/geolake-executor:${{ env.TAG }} + ${{ vars.DOCKER_REGISTRY }}/geolake-executor:latest \ No newline at end of file diff --git a/api/Dockerfile b/api/Dockerfile index 9ee0633..a2cfea0 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,6 +1,6 @@ ARG REGISTRY=rg.nl-ams.scw.cloud/geodds-production ARG TAG=latest -FROM $REGISTRY/geodds-datastore:$TAG +FROM $REGISTRY/geolake-datastore:$TAG WORKDIR /app COPY requirements.txt /code/requirements.txt RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt diff --git a/datastore/Dockerfile b/datastore/Dockerfile index 9ca2496..018ad5e 100644 --- a/datastore/Dockerfile +++ b/datastore/Dockerfile @@ -1,6 +1,6 @@ ARG REGISTRY=rg.nl-ams.scw.cloud/geokube-production ARG TAG=latest -FROM $REGISTRY/intake-geokube:$TAG +FROM $REGISTRY/geolake-drivers:$TAG RUN conda install -c conda-forge --yes --freeze-installed psycopg2 \ && conda clean -afy COPY requirements.txt /app/requirements.txt diff --git a/drivers/README.md b/drivers/README.md index f08349c..ed98e22 100644 --- a/drivers/README.md +++ b/drivers/README.md @@ -1,2 +1,2 @@ -# intake-geokube +# geolake-drivers GeoKube plugin for Intake \ No newline at end of file diff --git a/drivers/pyproject.toml b/drivers/pyproject.toml index ae138ac..2f0a6d5 100644 --- a/drivers/pyproject.toml +++ b/drivers/pyproject.toml @@ -3,7 +3,7 @@ requires = ["setuptools", "wheel"] build-backend = "setuptools.build_meta" [project] -name = "intake-geokube" +name = "geolake-drivers" description = "opengeokube DDS driver." requires-python = ">=3.10" readme = "README.md" diff --git a/executor/Dockerfile b/executor/Dockerfile index 6a946fd..db3cebb 100644 --- a/executor/Dockerfile +++ b/executor/Dockerfile @@ -2,7 +2,7 @@ ARG REGISTRY=rg.nl-ams.scw.cloud/geodds-production ARG TAG=latest ARG SENTINEL_USERNAME=... ARG SENTINEL_PASSWORD=... -FROM $REGISTRY/geodds-datastore:$TAG +FROM $REGISTRY/geolake-datastore:$TAG WORKDIR /app ENV SENTINEL_USERNAME=$SENTINEL_USERNAME ENV SENTINEL_PASSWORD=$SENTINEL_PASSWORD From 29d93e71ee7a93885cf4ae481e661687b83ca2d4 Mon Sep 17 00:00:00 2001 From: Jakub Walczak Date: Mon, 15 Jan 2024 09:04:53 +0100 Subject: [PATCH 10/31] Build wheel for driver --- .github/workflows/deploy-staging.yml | 12 ++++++++++++ drivers/Dockerfile | 6 +++--- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/.github/workflows/deploy-staging.yml b/.github/workflows/deploy-staging.yml index b45bf4e..039f13c 100644 --- a/.github/workflows/deploy-staging.yml +++ b/.github/workflows/deploy-staging.yml @@ -9,6 +9,18 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.x" + - name: Install build + run: >- + python3 -m + pip install + build + --user + - name: Build a binary wheel and a source for drivers + run: python3 -m build ./drivers - name: Set Docker image tag name run: echo "TAG=$(date +'%Y.%m.%d.%H.%M')" >> $GITHUB_ENV - name: Login to Scaleway Container Registry diff --git a/drivers/Dockerfile b/drivers/Dockerfile index d4f9e76..4980d28 100644 --- a/drivers/Dockerfile +++ b/drivers/Dockerfile @@ -3,6 +3,6 @@ ARG TAG=latest FROM $REGISTRY/geokube:$TAG RUN conda install -c conda-forge --yes --freeze-installed intake=0.6.6 RUN conda clean -afy -COPY dist/intake_geokube-1.0b0-py3-none-any.whl / -RUN pip install /intake_geokube-1.0b0-py3-none-any.whl -RUN rm /intake_geokube-1.0b0-py3-none-any.whl +COPY dist/geolake_drivers-1.0b0-py3-none-any.whl / +RUN pip install /geolake_drivers-1.0b0-py3-none-any.whl +RUN rm /geolake_drivers-1.0b0-py3-none-any.whl From 97b1ef331b8cc06588271fa25b61e8d9eeaa6d67 Mon Sep 17 00:00:00 2001 From: Jakub Walczak Date: Mon, 15 Jan 2024 09:15:46 +0100 Subject: [PATCH 11/31] Update path for intake wheel in Docker use --- .github/workflows/deploy-staging.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-staging.yml b/.github/workflows/deploy-staging.yml index 039f13c..8bf91bf 100644 --- a/.github/workflows/deploy-staging.yml +++ b/.github/workflows/deploy-staging.yml @@ -34,7 +34,7 @@ jobs: - name: Build and push drivers uses: docker/build-push-action@v4 with: - context: . + context: ./drivers file: ./drivers/Dockerfile push: true build-args: | From bdb77195972a3f280d78418174b79cfac247fb2d Mon Sep 17 00:00:00 2001 From: Jakub Walczak Date: Mon, 15 Jan 2024 09:27:00 +0100 Subject: [PATCH 12/31] Add action for production --- .github/workflows/build-production.yml | 79 +++++++++++++++++++ .../{deploy-staging.yml => build-staging.yml} | 0 2 files changed, 79 insertions(+) create mode 100644 .github/workflows/build-production.yml rename .github/workflows/{deploy-staging.yml => build-staging.yml} (100%) diff --git a/.github/workflows/build-production.yml b/.github/workflows/build-production.yml new file mode 100644 index 0000000..2b04b9a --- /dev/null +++ b/.github/workflows/build-production.yml @@ -0,0 +1,79 @@ +name: Build Docker images for geolake components and push to the repository + +on: + push: + tags: + - 'v*' +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.x" + - name: Install build + run: >- + python3 -m + pip install + build + --user + - name: Build a binary wheel and a source for drivers + run: python3 -m build ./drivers + - name: Set Docker image tag name + run: echo "TAG=$(date +'%Y.%m.%d.%H.%M')" >> $GITHUB_ENV + - name: Login to Scaleway Container Registry + uses: docker/login-action@v2 + with: + username: nologin + password: ${{ secrets.DOCKER_PASSWORD }} + registry: ${{ vars.DOCKER_REGISTRY }} + - name: Get release tag + run: echo "RELEASE_TAG=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: Build and push drivers + uses: docker/build-push-action@v4 + with: + context: ./drivers + file: ./drivers/Dockerfile + push: true + build-args: | + REGISTRY=${{ vars.DOCKER_REGISTRY }} + tags: | + ${{ vars.DOCKER_REGISTRY }}/geolake-drivers:${{ env.RELEASE_TAG }} + ${{ vars.DOCKER_REGISTRY }}/geolake-drivers:latest + - name: Build and push datastore component + uses: docker/build-push-action@v4 + with: + context: . + file: ./datastore/Dockerfile + push: true + build-args: | + REGISTRY=${{ vars.DOCKER_REGISTRY }} + tags: | + ${{ vars.DOCKER_REGISTRY }}/geolake-datastore:${{ env.RELEASE_TAG }} + ${{ vars.DOCKER_REGISTRY }}/geolake-datastore:latest + - name: Build and push api component + uses: docker/build-push-action@v4 + with: + context: . + file: ./api/Dockerfile + push: true + build-args: | + REGISTRY=${{ vars.DOCKER_REGISTRY }} + tags: | + ${{ vars.DOCKER_REGISTRY }}/geolake-api:${{ env.RELEASE_TAG }} + ${{ vars.DOCKER_REGISTRY }}/geolake-api:latest + - name: Build and push executor component + uses: docker/build-push-action@v4 + with: + context: . + file: ./executor/Dockerfile + push: true + build-args: | + REGISTRY=${{ vars.DOCKER_REGISTRY }} + tags: | + ${{ vars.DOCKER_REGISTRY }}/geolake-executor:${{ env.RELEASE_TAG }} + ${{ vars.DOCKER_REGISTRY }}/geolake-executor:latest \ No newline at end of file diff --git a/.github/workflows/deploy-staging.yml b/.github/workflows/build-staging.yml similarity index 100% rename from .github/workflows/deploy-staging.yml rename to .github/workflows/build-staging.yml From 5f42f3d3dcb2997bd91d0a5de89db35a490f55ae Mon Sep 17 00:00:00 2001 From: Jakub Walczak Date: Mon, 15 Jan 2024 09:33:43 +0100 Subject: [PATCH 13/31] Update docker context --- .github/workflows/build-production.yml | 6 +++--- .github/workflows/build-staging.yml | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/build-production.yml b/.github/workflows/build-production.yml index 2b04b9a..608c92b 100644 --- a/.github/workflows/build-production.yml +++ b/.github/workflows/build-production.yml @@ -47,7 +47,7 @@ jobs: - name: Build and push datastore component uses: docker/build-push-action@v4 with: - context: . + context: ./datastore file: ./datastore/Dockerfile push: true build-args: | @@ -58,7 +58,7 @@ jobs: - name: Build and push api component uses: docker/build-push-action@v4 with: - context: . + context: ./api file: ./api/Dockerfile push: true build-args: | @@ -69,7 +69,7 @@ jobs: - name: Build and push executor component uses: docker/build-push-action@v4 with: - context: . + context: ./executor file: ./executor/Dockerfile push: true build-args: | diff --git a/.github/workflows/build-staging.yml b/.github/workflows/build-staging.yml index 8bf91bf..7c16ff2 100644 --- a/.github/workflows/build-staging.yml +++ b/.github/workflows/build-staging.yml @@ -45,7 +45,7 @@ jobs: - name: Build and push datastore component uses: docker/build-push-action@v4 with: - context: . + context: ./datastore file: ./datastore/Dockerfile push: true build-args: | @@ -56,7 +56,7 @@ jobs: - name: Build and push api component uses: docker/build-push-action@v4 with: - context: . + context: ./api file: ./api/Dockerfile push: true build-args: | @@ -67,7 +67,7 @@ jobs: - name: Build and push executor component uses: docker/build-push-action@v4 with: - context: . + context: ./executor file: ./executor/Dockerfile push: true build-args: | From f1771da5b2c6bd463cbfe3a6f0b012d17e811635 Mon Sep 17 00:00:00 2001 From: Valentina Scardigno Date: Tue, 16 Jan 2024 10:34:05 +0000 Subject: [PATCH 14/31] Fix variable name in staging --- .github/workflows/build-staging.yml | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/.github/workflows/build-staging.yml b/.github/workflows/build-staging.yml index 7c16ff2..288f6e2 100644 --- a/.github/workflows/build-staging.yml +++ b/.github/workflows/build-staging.yml @@ -28,7 +28,7 @@ jobs: with: username: nologin password: ${{ secrets.DOCKER_PASSWORD }} - registry: ${{ vars.DOCKER_REGISTRY }} + registry: ${{ vars.STAGING_DOCKER_REGISTRY }} - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 - name: Build and push drivers @@ -38,10 +38,10 @@ jobs: file: ./drivers/Dockerfile push: true build-args: | - REGISTRY=${{ vars.DOCKER_REGISTRY }} + REGISTRY=${{ vars.STAGING_DOCKER_REGISTRY }} tags: | - ${{ vars.DOCKER_REGISTRY }}/geolake-drivers:${{ env.TAG }} - ${{ vars.DOCKER_REGISTRY }}/geolake-drivers:latest + ${{ vars.STAGING_DOCKER_REGISTRY }}/geolake-drivers:${{ env.TAG }} + ${{ vars.STAGING_DOCKER_REGISTRY }}/geolake-drivers:latest - name: Build and push datastore component uses: docker/build-push-action@v4 with: @@ -49,10 +49,10 @@ jobs: file: ./datastore/Dockerfile push: true build-args: | - REGISTRY=${{ vars.DOCKER_REGISTRY }} + REGISTRY=${{ vars.STAGING_DOCKER_REGISTRY }} tags: | - ${{ vars.DOCKER_REGISTRY }}/geolake-datastore:${{ env.TAG }} - ${{ vars.DOCKER_REGISTRY }}/geolake-datastore:latest + ${{ vars.STAGING_DOCKER_REGISTRY }}/geolake-datastore:${{ env.TAG }} + ${{ vars.STAGING_DOCKER_REGISTRY }}/geolake-datastore:latest - name: Build and push api component uses: docker/build-push-action@v4 with: @@ -60,10 +60,10 @@ jobs: file: ./api/Dockerfile push: true build-args: | - REGISTRY=${{ vars.DOCKER_REGISTRY }} + REGISTRY=${{ vars.STAGING_DOCKER_REGISTRY }} tags: | - ${{ vars.DOCKER_REGISTRY }}/geolake-api:${{ env.TAG }} - ${{ vars.DOCKER_REGISTRY }}/geolake-api:latest + ${{ vars.STAGING_DOCKER_REGISTRY }}/geolake-api:${{ env.TAG }} + ${{ vars.STAGING_DOCKER_REGISTRY }}/geolake-api:latest - name: Build and push executor component uses: docker/build-push-action@v4 with: @@ -71,7 +71,7 @@ jobs: file: ./executor/Dockerfile push: true build-args: | - REGISTRY=${{ vars.DOCKER_REGISTRY }} + REGISTRY=${{ vars.STAGING_DOCKER_REGISTRY }} tags: | - ${{ vars.DOCKER_REGISTRY }}/geolake-executor:${{ env.TAG }} - ${{ vars.DOCKER_REGISTRY }}/geolake-executor:latest \ No newline at end of file + ${{ vars.STAGING_DOCKER_REGISTRY }}/geolake-executor:${{ env.TAG }} + ${{ vars.STAGING_DOCKER_REGISTRY }}/geolake-executor:latest \ No newline at end of file From bf18bfb6b3d026ac451a7414a3a10163a5769bca Mon Sep 17 00:00:00 2001 From: jamesWalczak Date: Mon, 22 Jan 2024 10:55:43 +0100 Subject: [PATCH 15/31] Add docs, part 1 --- docs/docs/about.md | 6 ++++++ docs/docs/api.md | 5 +++++ docs/docs/broker.md | 5 +++++ docs/docs/drivers.md | 5 +++++ docs/docs/executor.md | 5 +++++ docs/docs/img/favicon.svg | 3 +++ docs/docs/img/logo.svg | 3 +++ docs/docs/index.md | 9 ++++++++ docs/docs/installation.md | 1 + docs/mkdocs.yaml | 43 +++++++++++++++++++++++++++++++++++++++ mkdocs.yml | 1 + 11 files changed, 86 insertions(+) create mode 100644 docs/docs/about.md create mode 100644 docs/docs/api.md create mode 100644 docs/docs/broker.md create mode 100644 docs/docs/drivers.md create mode 100644 docs/docs/executor.md create mode 100644 docs/docs/img/favicon.svg create mode 100644 docs/docs/img/logo.svg create mode 100644 docs/docs/index.md create mode 100644 docs/docs/installation.md create mode 100644 docs/mkdocs.yaml create mode 100644 mkdocs.yml diff --git a/docs/docs/about.md b/docs/docs/about.md new file mode 100644 index 0000000..2265138 --- /dev/null +++ b/docs/docs/about.md @@ -0,0 +1,6 @@ +# About + +## Authors + + +## Acknowledegement \ No newline at end of file diff --git a/docs/docs/api.md b/docs/docs/api.md new file mode 100644 index 0000000..836418a --- /dev/null +++ b/docs/docs/api.md @@ -0,0 +1,5 @@ +# REST API + +## Description + +## Docstring \ No newline at end of file diff --git a/docs/docs/broker.md b/docs/docs/broker.md new file mode 100644 index 0000000..c3d9608 --- /dev/null +++ b/docs/docs/broker.md @@ -0,0 +1,5 @@ +# Broker + +## Description + +## Docstring \ No newline at end of file diff --git a/docs/docs/drivers.md b/docs/docs/drivers.md new file mode 100644 index 0000000..a8a0052 --- /dev/null +++ b/docs/docs/drivers.md @@ -0,0 +1,5 @@ +# Drivers + +## Description + +## Docstring \ No newline at end of file diff --git a/docs/docs/executor.md b/docs/docs/executor.md new file mode 100644 index 0000000..a003890 --- /dev/null +++ b/docs/docs/executor.md @@ -0,0 +1,5 @@ +# Executor + +## Description + +## Docstring \ No newline at end of file diff --git a/docs/docs/img/favicon.svg b/docs/docs/img/favicon.svg new file mode 100644 index 0000000..ce7dac3 --- /dev/null +++ b/docs/docs/img/favicon.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/docs/docs/img/logo.svg b/docs/docs/img/logo.svg new file mode 100644 index 0000000..24aa2af --- /dev/null +++ b/docs/docs/img/logo.svg @@ -0,0 +1,3 @@ + + +
L a k e
L a k e
eo
eo
Text is not SVG - cannot display
\ No newline at end of file diff --git a/docs/docs/index.md b/docs/docs/index.md new file mode 100644 index 0000000..0792f5e --- /dev/null +++ b/docs/docs/index.md @@ -0,0 +1,9 @@ + +
+ +
+ +# Welcome to GeoLake +GeoLake is a framework for managing and processing of geospatial data. + +some more description... \ No newline at end of file diff --git a/docs/docs/installation.md b/docs/docs/installation.md new file mode 100644 index 0000000..f34d65b --- /dev/null +++ b/docs/docs/installation.md @@ -0,0 +1 @@ +# Installation \ No newline at end of file diff --git a/docs/mkdocs.yaml b/docs/mkdocs.yaml new file mode 100644 index 0000000..b2a8154 --- /dev/null +++ b/docs/mkdocs.yaml @@ -0,0 +1,43 @@ +site_name: GeoLake +site_description: GeoLake - an environment for managing and processing geospatial data +theme: + name: material + palette: + - media: '(prefers-color-scheme: light)' + scheme: default + primary: blue grey + accent: amber + toggle: + icon: material/lightbulb + name: Switch to dark mode + - media: '(prefers-color-scheme: dark)' + scheme: slate + primary: blue grey + accent: amber + toggle: + icon: material/lightbulb-outline + name: Switch to light mode + features: + - search.suggest + - search.highlight + - content.tooltips + - content.code.annotate + - content.code.copy + - content.code.select + icon: + repo: fontawesome/brands/github-alt + logo: img/favicon.svg + favicon: img/favicon.svg + language: en +repo_name: CMCC-Foundation/geolake +repo_url: https://github.com/CMCC-Foundation/geolake +edit_uri: '' +nav: +- GeoLake: index.md +- installation.md +- References: + - drivers.md + - broker.md + - api.md + - executor.md +- about.md \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 0000000..c97182f --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1 @@ +site_name: My Docs From a3a3e999d3ea44346d75936b87240eb512c8e544 Mon Sep 17 00:00:00 2001 From: Valentina Scardigno Date: Tue, 23 Jan 2024 14:11:27 +0000 Subject: [PATCH 16/31] Change var name for registry --- .github/workflows/build-staging.yml | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/.github/workflows/build-staging.yml b/.github/workflows/build-staging.yml index 288f6e2..7c16ff2 100644 --- a/.github/workflows/build-staging.yml +++ b/.github/workflows/build-staging.yml @@ -28,7 +28,7 @@ jobs: with: username: nologin password: ${{ secrets.DOCKER_PASSWORD }} - registry: ${{ vars.STAGING_DOCKER_REGISTRY }} + registry: ${{ vars.DOCKER_REGISTRY }} - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 - name: Build and push drivers @@ -38,10 +38,10 @@ jobs: file: ./drivers/Dockerfile push: true build-args: | - REGISTRY=${{ vars.STAGING_DOCKER_REGISTRY }} + REGISTRY=${{ vars.DOCKER_REGISTRY }} tags: | - ${{ vars.STAGING_DOCKER_REGISTRY }}/geolake-drivers:${{ env.TAG }} - ${{ vars.STAGING_DOCKER_REGISTRY }}/geolake-drivers:latest + ${{ vars.DOCKER_REGISTRY }}/geolake-drivers:${{ env.TAG }} + ${{ vars.DOCKER_REGISTRY }}/geolake-drivers:latest - name: Build and push datastore component uses: docker/build-push-action@v4 with: @@ -49,10 +49,10 @@ jobs: file: ./datastore/Dockerfile push: true build-args: | - REGISTRY=${{ vars.STAGING_DOCKER_REGISTRY }} + REGISTRY=${{ vars.DOCKER_REGISTRY }} tags: | - ${{ vars.STAGING_DOCKER_REGISTRY }}/geolake-datastore:${{ env.TAG }} - ${{ vars.STAGING_DOCKER_REGISTRY }}/geolake-datastore:latest + ${{ vars.DOCKER_REGISTRY }}/geolake-datastore:${{ env.TAG }} + ${{ vars.DOCKER_REGISTRY }}/geolake-datastore:latest - name: Build and push api component uses: docker/build-push-action@v4 with: @@ -60,10 +60,10 @@ jobs: file: ./api/Dockerfile push: true build-args: | - REGISTRY=${{ vars.STAGING_DOCKER_REGISTRY }} + REGISTRY=${{ vars.DOCKER_REGISTRY }} tags: | - ${{ vars.STAGING_DOCKER_REGISTRY }}/geolake-api:${{ env.TAG }} - ${{ vars.STAGING_DOCKER_REGISTRY }}/geolake-api:latest + ${{ vars.DOCKER_REGISTRY }}/geolake-api:${{ env.TAG }} + ${{ vars.DOCKER_REGISTRY }}/geolake-api:latest - name: Build and push executor component uses: docker/build-push-action@v4 with: @@ -71,7 +71,7 @@ jobs: file: ./executor/Dockerfile push: true build-args: | - REGISTRY=${{ vars.STAGING_DOCKER_REGISTRY }} + REGISTRY=${{ vars.DOCKER_REGISTRY }} tags: | - ${{ vars.STAGING_DOCKER_REGISTRY }}/geolake-executor:${{ env.TAG }} - ${{ vars.STAGING_DOCKER_REGISTRY }}/geolake-executor:latest \ No newline at end of file + ${{ vars.DOCKER_REGISTRY }}/geolake-executor:${{ env.TAG }} + ${{ vars.DOCKER_REGISTRY }}/geolake-executor:latest \ No newline at end of file From 5360b1b8d1c25e3789c07acb8758545b70e981ab Mon Sep 17 00:00:00 2001 From: jamesWalczak Date: Wed, 24 Jan 2024 15:21:10 +0100 Subject: [PATCH 17/31] Update docs --- api/Dockerfile | 2 +- api/app/auth/backend.py | 8 +- api/app/auth/manager.py | 4 +- api/app/auth/models.py | 6 +- api/app/callbacks/on_startup.py | 4 +- api/app/endpoint_handlers/dataset.py | 4 +- api/app/endpoint_handlers/file.py | 6 +- api/app/endpoint_handlers/request.py | 4 +- api/app/exceptions.py | 36 ++++----- api/app/main.py | 48 ++++++------ api/app/validation.py | 4 +- datastore/utils/api_logging.py | 4 +- docker-compose.yaml | 6 +- docs/{docs => }/about.md | 0 docs/api.md | 9 +++ docs/base.md | 2 + docs/{docs => }/broker.md | 0 docs/datastore.md | 3 + docs/docs/api.md | 5 -- docs/docs/drivers.md | 5 -- docs/docs/executor.md | 5 -- docs/docs/img/logo.svg | 3 - docs/executor.md | 5 ++ docs/{docs => }/img/favicon.svg | 0 docs/img/logo.svg | 3 + docs/{docs => }/index.md | 0 docs/{docs => }/installation.md | 0 docs/iot.md | 2 + docs/netcdf.md | 1 + docs/queries.md | 4 + docs/sentinel.md | 7 ++ docs/wrf.md | 2 + drivers/intake_geokube/base.py | 16 ++-- drivers/intake_geokube/iot/driver.py | 9 ++- drivers/intake_geokube/netcdf/driver.py | 34 ++++++++- drivers/intake_geokube/sentinel/driver.py | 13 +++- .../intake_geokube/sentinel/odata_builder.py | 12 ++- drivers/intake_geokube/wrf/driver.py | 2 +- drivers/pyproject.toml | 2 +- executor/Dockerfile | 2 +- executor/app/main.py | 75 ++++++++++++++++++- executor/app/messaging.py | 9 +++ docs/mkdocs.yaml => mkdocs.yaml | 17 ++++- mkdocs.yml | 1 - resources/catalogs/external/e-obs.yaml | 4 +- 45 files changed, 278 insertions(+), 110 deletions(-) rename docs/{docs => }/about.md (100%) create mode 100644 docs/api.md create mode 100644 docs/base.md rename docs/{docs => }/broker.md (100%) create mode 100644 docs/datastore.md delete mode 100644 docs/docs/api.md delete mode 100644 docs/docs/drivers.md delete mode 100644 docs/docs/executor.md delete mode 100644 docs/docs/img/logo.svg create mode 100644 docs/executor.md rename docs/{docs => }/img/favicon.svg (100%) create mode 100644 docs/img/logo.svg rename docs/{docs => }/index.md (100%) rename docs/{docs => }/installation.md (100%) create mode 100644 docs/iot.md create mode 100644 docs/netcdf.md create mode 100644 docs/queries.md create mode 100644 docs/sentinel.md create mode 100644 docs/wrf.md rename docs/mkdocs.yaml => mkdocs.yaml (80%) delete mode 100644 mkdocs.yml diff --git a/api/Dockerfile b/api/Dockerfile index a2cfea0..f0da2fc 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,4 +1,4 @@ -ARG REGISTRY=rg.nl-ams.scw.cloud/geodds-production +ARG REGISTRY=rg.nl-ams.scw.cloud/geogeolake-production ARG TAG=latest FROM $REGISTRY/geolake-datastore:$TAG WORKDIR /app diff --git a/api/app/auth/backend.py b/api/app/auth/backend.py index c172b58..885ee55 100644 --- a/api/app/auth/backend.py +++ b/api/app/auth/backend.py @@ -9,11 +9,11 @@ from dbmanager.dbmanager import DBManager import exceptions as exc -from auth.models import DDSUser +from auth.models import GeoLakeUser from auth import scopes -class DDSAuthenticationBackend(AuthenticationBackend): +class GeoLakeAuthenticationBackend(AuthenticationBackend): """Class managing authentication and authorization""" async def authenticate(self, conn): @@ -25,7 +25,7 @@ async def authenticate(self, conn): def _manage_user_token_auth(self, user_token: str): try: user_id, api_key = self.get_authorization_scheme_param(user_token) - except exc.BaseDDSException as err: + except exc.BaseGeoLakeException as err: raise err.wrap_around_http_exception() user_dto = DBManager().get_user_details(user_id) eligible_scopes = [scopes.AUTHENTICATED] + self._get_scopes_for_user( @@ -35,7 +35,7 @@ def _manage_user_token_auth(self, user_token: str): raise exc.AuthenticationFailed( user_dto ).wrap_around_http_exception() - return AuthCredentials(eligible_scopes), DDSUser(username=user_id) + return AuthCredentials(eligible_scopes), GeoLakeUser(username=user_id) def _get_scopes_for_user(self, user_dto) -> list[str]: if user_dto is None: diff --git a/api/app/auth/manager.py b/api/app/auth/manager.py index 02bf686..4d09791 100644 --- a/api/app/auth/manager.py +++ b/api/app/auth/manager.py @@ -1,10 +1,10 @@ """Module with access/authentication functions""" from typing import Optional -from utils.api_logging import get_dds_logger +from utils.api_logging import get_geolake_logger import exceptions as exc -log = get_dds_logger(__name__) +log = get_geolake_logger(__name__) def is_role_eligible_for_product( diff --git a/api/app/auth/models.py b/api/app/auth/models.py index bff896f..e4c9936 100644 --- a/api/app/auth/models.py +++ b/api/app/auth/models.py @@ -2,7 +2,7 @@ from starlette.authentication import SimpleUser -class DDSUser(SimpleUser): +class GeoLakeUser(SimpleUser): """Immutable class containing information about the authenticated user""" def __init__(self, username: str) -> None: @@ -13,7 +13,7 @@ def id(self): return self.username def __eq__(self, other) -> bool: - if not isinstance(other, DDSUser): + if not isinstance(other, GeoLakeUser): return False if self.username == other.username: return True @@ -23,7 +23,7 @@ def __ne__(self, other): return self != other def __repr__(self): - return f"" + return f"" def __delattr__(self, name): if getattr(self, name, None) is not None: diff --git a/api/app/callbacks/on_startup.py b/api/app/callbacks/on_startup.py index ec883d3..4e25072 100644 --- a/api/app/callbacks/on_startup.py +++ b/api/app/callbacks/on_startup.py @@ -1,9 +1,9 @@ """Module with functions call during API server startup""" -from utils.api_logging import get_dds_logger +from utils.api_logging import get_geolake_logger from datastore.datastore import Datastore -log = get_dds_logger(__name__) +log = get_geolake_logger(__name__) def _load_cache() -> None: diff --git a/api/app/endpoint_handlers/dataset.py b/api/app/endpoint_handlers/dataset.py index c03a54b..25e6afb 100644 --- a/api/app/endpoint_handlers/dataset.py +++ b/api/app/endpoint_handlers/dataset.py @@ -13,7 +13,7 @@ from datastore import exception as datastore_exception from utils.metrics import log_execution_time -from utils.api_logging import get_dds_logger +from utils.api_logging import get_geolake_logger from auth.manager import ( is_role_eligible_for_product, ) @@ -23,7 +23,7 @@ from . import request -log = get_dds_logger(__name__) +log = get_geolake_logger(__name__) data_store = Datastore() MESSAGE_SEPARATOR = os.environ["MESSAGE_SEPARATOR"] diff --git a/api/app/endpoint_handlers/file.py b/api/app/endpoint_handlers/file.py index 04cf562..140975e 100644 --- a/api/app/endpoint_handlers/file.py +++ b/api/app/endpoint_handlers/file.py @@ -4,11 +4,11 @@ from fastapi.responses import FileResponse from dbmanager.dbmanager import DBManager, RequestStatus -from utils.api_logging import get_dds_logger +from utils.api_logging import get_geolake_logger from utils.metrics import log_execution_time import exceptions as exc -log = get_dds_logger(__name__) +log = get_geolake_logger(__name__) @log_execution_time(log) @@ -33,7 +33,7 @@ def download_request_result(request_id: int): Raises ------- RequestNotYetAccomplished - If dds request was not yet finished + If geolake request was not yet finished FileNotFoundError If file was not found """ diff --git a/api/app/endpoint_handlers/request.py b/api/app/endpoint_handlers/request.py index 93a0636..b503f07 100644 --- a/api/app/endpoint_handlers/request.py +++ b/api/app/endpoint_handlers/request.py @@ -1,11 +1,11 @@ """Modules with functions realizing logic for requests-related endpoints""" from dbmanager.dbmanager import DBManager -from utils.api_logging import get_dds_logger +from utils.api_logging import get_geolake_logger from utils.metrics import log_execution_time import exceptions as exc -log = get_dds_logger(__name__) +log = get_geolake_logger(__name__) @log_execution_time(log) diff --git a/api/app/exceptions.py b/api/app/exceptions.py index af4d072..eb19ad7 100644 --- a/api/app/exceptions.py +++ b/api/app/exceptions.py @@ -1,11 +1,11 @@ -"""Module with DDS exceptions definitions""" +"""Module with GeoLake exceptions definitions""" from typing import Optional from fastapi import HTTPException -class BaseDDSException(BaseException): - """Base class for DDS.api exceptions""" +class BaseGeoLakeException(BaseException): + """Base class for GeoLake.api exceptions""" msg: str = "Bad request" code: int = 400 @@ -18,13 +18,13 @@ def wrap_around_http_exception(self) -> HTTPException: ) -class EmptyUserTokenError(BaseDDSException): +class EmptyUserTokenError(BaseGeoLakeException): """Raised if `User-Token` is empty""" msg: str = "User-Token cannot be empty!" -class ImproperUserTokenError(BaseDDSException): +class ImproperUserTokenError(BaseGeoLakeException): """Raised if `User-Token` format is wrong""" msg: str = ( @@ -33,7 +33,7 @@ class ImproperUserTokenError(BaseDDSException): ) -class NoEligibleProductInDatasetError(BaseDDSException): +class NoEligibleProductInDatasetError(BaseGeoLakeException): """No eligible products in the dataset Error""" msg: str = ( @@ -48,7 +48,7 @@ def __init__(self, dataset_id: str, user_roles_names: list[str]) -> None: super().__init__(self.msg) -class MissingKeyInCatalogEntryError(BaseDDSException): +class MissingKeyInCatalogEntryError(BaseGeoLakeException): """Missing key in the catalog entry""" msg: str = ( @@ -60,7 +60,7 @@ def __init__(self, key, dataset): super().__init__(self.msg) -class MaximumAllowedSizeExceededError(BaseDDSException): +class MaximumAllowedSizeExceededError(BaseGeoLakeException): """Estimated size is too big""" msg: str = ( @@ -81,8 +81,8 @@ def __init__( super().__init__(self.msg) -class RequestNotYetAccomplished(BaseDDSException): - """Raised if dds request was not finished yet""" +class RequestNotYetAccomplished(BaseGeoLakeException): + """Raised if geolake request was not finished yet""" msg: str = ( "Request with id: {request_id} does not exist or it is not" @@ -94,7 +94,7 @@ def __init__(self, request_id): super().__init__(self.msg) -class RequestNotFound(BaseDDSException): +class RequestNotFound(BaseGeoLakeException): """If the given request could not be found""" msg: str = "Request with ID '{request_id}' was not found" @@ -104,7 +104,7 @@ def __init__(self, request_id: int) -> None: super().__init__(self.msg) -class RequestStatusNotDone(BaseDDSException): +class RequestStatusNotDone(BaseGeoLakeException): """Raised when the submitted request failed""" msg: str = ( @@ -119,7 +119,7 @@ def __init__(self, request_id, request_status) -> None: super().__init__(self.msg) -class AuthorizationFailed(BaseDDSException): +class AuthorizationFailed(BaseGeoLakeException): """Raised when the user is not authorized for the given resource""" msg: str = "{user} is not authorized for the resource!" @@ -133,7 +133,7 @@ def __init__(self, user_id: Optional[str] = None): super().__init__(self.msg) -class AuthenticationFailed(BaseDDSException): +class AuthenticationFailed(BaseGeoLakeException): """Raised when the key of the provided user differs from the one s tored in the DB""" @@ -145,7 +145,7 @@ def __init__(self, user_id: str): super().__init__(self.msg) -class MissingDatasetError(BaseDDSException): +class MissingDatasetError(BaseGeoLakeException): """Raied if the queried dataset is not present in the catalog""" msg: str = "Dataset '{dataset_id}' does not exist in the catalog!" @@ -155,7 +155,7 @@ def __init__(self, dataset_id: str): super().__init__(self.msg) -class MissingProductError(BaseDDSException): +class MissingProductError(BaseGeoLakeException): """Raised if the requested product is not defined for the dataset""" msg: str = ( @@ -169,7 +169,7 @@ def __init__(self, dataset_id: str, product_id: str): super().__init__(self.msg) -class EmptyDatasetError(BaseDDSException): +class EmptyDatasetError(BaseGeoLakeException): """The size of the requested dataset is zero""" msg: str = "The resulting dataset '{dataset_id}.{product_id}' is empty" @@ -181,7 +181,7 @@ def __init__(self, dataset_id, product_id): ) super().__init__(self.msg) -class ProductRetrievingError(BaseDDSException): +class ProductRetrievingError(BaseGeoLakeException): """Retrieving of the product failed.""" msg: str = "Retrieving of the product '{dataset_id}.{product_id}' failed with the status {status}" diff --git a/api/app/main.py b/api/app/main.py index 2084394..75f310f 100644 --- a/api/app/main.py +++ b/api/app/main.py @@ -1,4 +1,4 @@ -"""Main module with dekube-dds API endpoints defined""" +"""Main module with dekube-geolake API endpoints defined""" __version__ = "2.0" import os from typing import Optional @@ -21,14 +21,14 @@ from intake_geokube.queries.workflow import Workflow from intake_geokube.queries.geoquery import GeoQuery -from utils.api_logging import get_dds_logger +from utils.api_logging import get_geolake_logger import exceptions as exc from endpoint_handlers import ( dataset_handler, file_handler, request_handler, ) -from auth.backend import DDSAuthenticationBackend +from auth.backend import GeoLakeAuthenticationBackend from callbacks import all_onstartup_callbacks from encoders import extend_json_encoders from const import venv, tags @@ -49,14 +49,14 @@ def map_to_geoquery( format_args=format_kwargs, format=format) return query -logger = get_dds_logger(__name__) +logger = get_geolake_logger(__name__) # ======== JSON encoders extension ========= # extend_json_encoders() app = FastAPI( - title="geokube-dds API", - description="REST API for geokube-dds", + title="geokube-geolake API", + description="REST API for geokube-geolake", version=__version__, contact={ "name": "geokube Contributors", @@ -72,7 +72,7 @@ def map_to_geoquery( # ======== Authentication backend ========= # app.add_middleware( - AuthenticationMiddleware, backend=DDSAuthenticationBackend() + AuthenticationMiddleware, backend=GeoLakeAuthenticationBackend() ) # ======== CORS ========= # @@ -107,9 +107,9 @@ def map_to_geoquery( # ======== Endpoints definitions ========= # @app.get("/", tags=[tags.BASIC]) -async def dds_info(): - """Return current version of the DDS API""" - return f"DDS API {__version__}" +async def geolake_info(): + """Return current version of the GeoLake API""" + return f"GeoLake API {__version__}" @app.get("/datasets", tags=[tags.DATASET]) @@ -123,7 +123,7 @@ async def get_datasets(request: Request): return dataset_handler.get_datasets( user_roles_names=request.auth.scopes ) - except exc.BaseDDSException as err: + except exc.BaseGeoLakeException as err: raise err.wrap_around_http_exception() from err @@ -145,7 +145,7 @@ async def get_first_product_details( user_roles_names=request.auth.scopes, dataset_id=dataset_id, ) - except exc.BaseDDSException as err: + except exc.BaseGeoLakeException as err: raise err.wrap_around_http_exception() from err @@ -169,7 +169,7 @@ async def get_product_details( dataset_id=dataset_id, product_id=product_id, ) - except exc.BaseDDSException as err: + except exc.BaseGeoLakeException as err: raise err.wrap_around_http_exception() from err @app.get("/datasets/{dataset_id}/{product_id}/map", tags=[tags.DATASET]) @@ -219,7 +219,7 @@ async def get_map( product_id=product_id, query=query ) - except exc.BaseDDSException as err: + except exc.BaseGeoLakeException as err: raise err.wrap_around_http_exception() from err @app.get("/datasets/{dataset_id}/{product_id}/items/{feature_id}", tags=[tags.DATASET]) @@ -264,7 +264,7 @@ async def get_feature( product_id=product_id, query=query ) - except exc.BaseDDSException as err: + except exc.BaseGeoLakeException as err: raise err.wrap_around_http_exception() from err @app.get("/datasets/{dataset_id}/{product_id}/metadata", tags=[tags.DATASET]) @@ -285,7 +285,7 @@ async def get_metadata( return dataset_handler.get_metadata( dataset_id=dataset_id, product_id=product_id ) - except exc.BaseDDSException as err: + except exc.BaseGeoLakeException as err: raise err.wrap_around_http_exception() from err @@ -312,7 +312,7 @@ async def estimate( query=query, unit=unit, ) - except exc.BaseDDSException as err: + except exc.BaseGeoLakeException as err: raise err.wrap_around_http_exception() from err @@ -339,7 +339,7 @@ async def query( product_id=product_id, query=query, ) - except exc.BaseDDSException as err: + except exc.BaseGeoLakeException as err: raise err.wrap_around_http_exception() from err @@ -360,7 +360,7 @@ async def workflow( user_id=request.user.id, workflow=tasks, ) - except exc.BaseDDSException as err: + except exc.BaseGeoLakeException as err: raise err.wrap_around_http_exception() from err @@ -376,7 +376,7 @@ async def get_requests( app.state.api_http_requests_total.inc({"route": "GET /requests"}) try: return request_handler.get_requests(request.user.id) - except exc.BaseDDSException as err: + except exc.BaseGeoLakeException as err: raise err.wrap_around_http_exception() from err @@ -398,7 +398,7 @@ async def get_request_status( return request_handler.get_request_status( user_id=request.user.id, request_id=request_id ) - except exc.BaseDDSException as err: + except exc.BaseGeoLakeException as err: raise err.wrap_around_http_exception() from err @@ -420,7 +420,7 @@ async def get_request_resulting_size( return request_handler.get_request_resulting_size( request_id=request_id ) - except exc.BaseDDSException as err: + except exc.BaseGeoLakeException as err: raise err.wrap_around_http_exception() from err @@ -440,7 +440,7 @@ async def get_request_uri( ) try: return request_handler.get_request_uri(request_id=request_id) - except exc.BaseDDSException as err: + except exc.BaseGeoLakeException as err: raise err.wrap_around_http_exception() from err @@ -460,7 +460,7 @@ async def download_request_result( ) try: return file_handler.download_request_result(request_id=request_id) - except exc.BaseDDSException as err: + except exc.BaseGeoLakeException as err: raise err.wrap_around_http_exception() from err except FileNotFoundError as err: raise HTTPException( diff --git a/api/app/validation.py b/api/app/validation.py index 51bdbc1..150b173 100644 --- a/api/app/validation.py +++ b/api/app/validation.py @@ -1,12 +1,12 @@ from datastore.datastore import Datastore -from utils.api_logging import get_dds_logger +from utils.api_logging import get_geolake_logger from decorators_factory import assert_parameters_are_defined, bind_arguments from functools import wraps from inspect import signature import exceptions as exc -log = get_dds_logger(__name__) +log = get_geolake_logger(__name__) def assert_product_exists(func): diff --git a/datastore/utils/api_logging.py b/datastore/utils/api_logging.py index 58d148d..ec49291 100644 --- a/datastore/utils/api_logging.py +++ b/datastore/utils/api_logging.py @@ -3,11 +3,11 @@ import logging as default_logging -def get_dds_logger( +def get_geolake_logger( name: str, level: Literal["debug", "info", "warning", "error", "critical"] = "info", ): - """Get DDS logger with the expected format, handlers and formatter. + """Get GeoLake logger with the expected format, handlers and formatter. Parameters ---------- diff --git a/docker-compose.yaml b/docker-compose.yaml index be85d41..01bc7b8 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -41,6 +41,6 @@ services: ports: - 5432:5432 environment: - POSTGRES_DB: dds - POSTGRES_USER: dds - POSTGRES_PASSWORD: dds \ No newline at end of file + POSTGRES_DB: geolake + POSTGRES_USER: geolake + POSTGRES_PASSWORD: geolake \ No newline at end of file diff --git a/docs/docs/about.md b/docs/about.md similarity index 100% rename from docs/docs/about.md rename to docs/about.md diff --git a/docs/api.md b/docs/api.md new file mode 100644 index 0000000..2619ae0 --- /dev/null +++ b/docs/api.md @@ -0,0 +1,9 @@ +::: api.app.main + +::: api.app.endpoint_handlers.dataset + +::: api.app.endpoint_handlers.file + +::: api.app.endpoint_handlers.request + + diff --git a/docs/base.md b/docs/base.md new file mode 100644 index 0000000..99fdf4c --- /dev/null +++ b/docs/base.md @@ -0,0 +1,2 @@ +::: drivers.intake_geokube.base + diff --git a/docs/docs/broker.md b/docs/broker.md similarity index 100% rename from docs/docs/broker.md rename to docs/broker.md diff --git a/docs/datastore.md b/docs/datastore.md new file mode 100644 index 0000000..3e30984 --- /dev/null +++ b/docs/datastore.md @@ -0,0 +1,3 @@ +::: datastore.datastore + +::: dbmanager.dbmanager diff --git a/docs/docs/api.md b/docs/docs/api.md deleted file mode 100644 index 836418a..0000000 --- a/docs/docs/api.md +++ /dev/null @@ -1,5 +0,0 @@ -# REST API - -## Description - -## Docstring \ No newline at end of file diff --git a/docs/docs/drivers.md b/docs/docs/drivers.md deleted file mode 100644 index a8a0052..0000000 --- a/docs/docs/drivers.md +++ /dev/null @@ -1,5 +0,0 @@ -# Drivers - -## Description - -## Docstring \ No newline at end of file diff --git a/docs/docs/executor.md b/docs/docs/executor.md deleted file mode 100644 index a003890..0000000 --- a/docs/docs/executor.md +++ /dev/null @@ -1,5 +0,0 @@ -# Executor - -## Description - -## Docstring \ No newline at end of file diff --git a/docs/docs/img/logo.svg b/docs/docs/img/logo.svg deleted file mode 100644 index 24aa2af..0000000 --- a/docs/docs/img/logo.svg +++ /dev/null @@ -1,3 +0,0 @@ - - -
L a k e
L a k e
eo
eo
Text is not SVG - cannot display
\ No newline at end of file diff --git a/docs/executor.md b/docs/executor.md new file mode 100644 index 0000000..c14a3a4 --- /dev/null +++ b/docs/executor.md @@ -0,0 +1,5 @@ +::: executor.app.main + +::: executor.app.messaging + +::: executor.app.meta \ No newline at end of file diff --git a/docs/docs/img/favicon.svg b/docs/img/favicon.svg similarity index 100% rename from docs/docs/img/favicon.svg rename to docs/img/favicon.svg diff --git a/docs/img/logo.svg b/docs/img/logo.svg new file mode 100644 index 0000000..e7bc09c --- /dev/null +++ b/docs/img/logo.svg @@ -0,0 +1,3 @@ + + +
L a k e
L a k e
eo
eo
Text is not SVG - cannot display
\ No newline at end of file diff --git a/docs/docs/index.md b/docs/index.md similarity index 100% rename from docs/docs/index.md rename to docs/index.md diff --git a/docs/docs/installation.md b/docs/installation.md similarity index 100% rename from docs/docs/installation.md rename to docs/installation.md diff --git a/docs/iot.md b/docs/iot.md new file mode 100644 index 0000000..aaab170 --- /dev/null +++ b/docs/iot.md @@ -0,0 +1,2 @@ +::: drivers.intake_geokube.iot.driver + diff --git a/docs/netcdf.md b/docs/netcdf.md new file mode 100644 index 0000000..8aaf9a9 --- /dev/null +++ b/docs/netcdf.md @@ -0,0 +1 @@ +::: drivers.intake_geokube.netcdf.driver diff --git a/docs/queries.md b/docs/queries.md new file mode 100644 index 0000000..51f8ecb --- /dev/null +++ b/docs/queries.md @@ -0,0 +1,4 @@ +::: drivers.intake_geokube.queries.geoquery + +::: drivers.intake_geokube.queries.workflow + diff --git a/docs/sentinel.md b/docs/sentinel.md new file mode 100644 index 0000000..db04d7b --- /dev/null +++ b/docs/sentinel.md @@ -0,0 +1,7 @@ +::: drivers.intake_geokube.sentinel.driver + + +::: drivers.intake_geokube.sentinel.odata_builder + + +::: drivers.intake_geokube.sentinel.auth diff --git a/docs/wrf.md b/docs/wrf.md new file mode 100644 index 0000000..2c20d04 --- /dev/null +++ b/docs/wrf.md @@ -0,0 +1,2 @@ +::: drivers.intake_geokube.wrf.driver + diff --git a/drivers/intake_geokube/base.py b/drivers/intake_geokube/base.py index e070427..efbefd5 100644 --- a/drivers/intake_geokube/base.py +++ b/drivers/intake_geokube/base.py @@ -16,7 +16,7 @@ class AbstractBaseDriver(ABC, DataSourceBase): - """Abstract base class for all DDS-related drivers.""" + """Abstract base class for all GeoLake-related drivers.""" name: str = _NOT_SET version: str = _NOT_SET @@ -40,10 +40,10 @@ def __init__(self, *, metadata: dict) -> None: @classmethod def __configure_logger(cls) -> logging.Logger: - log = logging.getLogger(f"dds.intake.{cls.__name__}") - level = os.environ.get("DDS_LOG_LEVEL", "INFO") + log = logging.getLogger(f"geolake.intake.{cls.__name__}") + level = os.environ.get("GeoLake_LOG_LEVEL", "INFO") logformat = os.environ.get( - "DDS_LOG_FORMAT", + "GeoLake_LOG_FORMAT", "%(asctime)s %(name)s %(funcName)s %(levelname)s %(message)s", ) log.setLevel(level) # type: ignore[arg-type] @@ -76,13 +76,19 @@ def process(self, query: GeoQuery) -> Any: Parameters ---------- - query: GeoQuery + query: `queries.GeoQuery` A query to use for data processing Results ------- res: Any Result of `query` processing + + Examples + -------- + ```python + >>> data = catalog['dataset']['product'].process(query) + ``` """ data_ = self.read() return self._process_geokube_dataset(data_, query=query, compute=True) diff --git a/drivers/intake_geokube/iot/driver.py b/drivers/intake_geokube/iot/driver.py index 93c52cd..5d11dc5 100644 --- a/drivers/intake_geokube/iot/driver.py +++ b/drivers/intake_geokube/iot/driver.py @@ -101,7 +101,12 @@ def _get_schema(self): return {"stream": str(self.stream)} def read(self) -> streamz.dataframe.core.DataFrame: - """Read IoT data.""" + """Read IoT data. + + Returns + ------- + stream : `streamz.dataframe.DataFrame` + """ self.log.info("reading stream...") self._get_schema() return self.stream @@ -123,7 +128,7 @@ def process(self, query: GeoQuery) -> streamz.dataframe.core.DataFrame: Returns ------- - stream : streamz.dataframe.core.DataFrame + stream : streamz.dataframe.DataFrame A DataFrame object with streamed content """ df = d[0] diff --git a/drivers/intake_geokube/netcdf/driver.py b/drivers/intake_geokube/netcdf/driver.py index e29cbfa..ac9e61f 100644 --- a/drivers/intake_geokube/netcdf/driver.py +++ b/drivers/intake_geokube/netcdf/driver.py @@ -1,4 +1,4 @@ -"""NetCDF driver for DDS.""" +"""NetCDF driver for GeoLake.""" from geokube import open_datacube, open_dataset from geokube.core.datacube import DataCube @@ -48,7 +48,22 @@ def _arguments(self) -> dict: } | self.xarray_kwargs def read(self) -> Dataset | DataCube: - """Read netCDF.""" + """Read netCDF into geokube.Dataset or geokube.Datacube. + + If `pattern` is set for a product, the method would return + a `geokube.Dataset` with `dask.Delayed` objects instead of + `geokube.DataCube`s. + + Returns + ------- + cube : `geokube.Dataset` or `geokube.DataCube` + + Examples + -------- + ```python + >>> data = catalog['era5']['reanalysis'].read() + ``` + """ if self.pattern: return open_dataset( pattern=self.pattern, delay_read_cubes=True, **self._arguments @@ -56,7 +71,20 @@ def read(self) -> Dataset | DataCube: return open_datacube(**self._arguments) def load(self) -> Dataset | DataCube: - """Load netCDF.""" + """Load netCDF into geokube.Dataset or geokube.Datacube. + + All cubes would be computed on loading. + + Returns + ------- + cube : `geokube.Dataset` or `geokube.DataCube` + + Examples + -------- + ```python + >>> data = catalog['era5']['reanalysis'].read() + ``` + """ if self.pattern: return open_dataset( pattern=self.pattern, delay_read_cubes=False, **self._arguments diff --git a/drivers/intake_geokube/sentinel/driver.py b/drivers/intake_geokube/sentinel/driver.py index 4895103..88875c7 100644 --- a/drivers/intake_geokube/sentinel/driver.py +++ b/drivers/intake_geokube/sentinel/driver.py @@ -323,7 +323,18 @@ def load(self) -> NoReturn: ) def process(self, query: GeoQuery) -> Dataset: - """Process query for sentinel data.""" + """Process sentinel data according to the `query`. + + Returns + ------- + cube : `geokube.Dataset` + + Examples + -------- + ```python + >>> data = catalog['sentinel']['prod_name'].process(query) + ``` + """ self.log.info("builder odata request based on passed geoquery...") req = self._build_odata_from_geoquery(query) self.log.info("downloading data...") diff --git a/drivers/intake_geokube/sentinel/odata_builder.py b/drivers/intake_geokube/sentinel/odata_builder.py index 4036810..64bd0a7 100644 --- a/drivers/intake_geokube/sentinel/odata_builder.py +++ b/drivers/intake_geokube/sentinel/odata_builder.py @@ -26,7 +26,17 @@ def datetime_to_isoformat(date: str | datetime) -> str: - """Convert string of datetime object to ISO datetime string.""" + """Convert string of datetime object to ISO datetime string. + + Parameters + ---------- + data : `str` or `datetime` + + Returns + ------- + date_str : str + A ISO-compliant datetime format + """ if isinstance(date, str): try: value = pd.to_datetime([date]).item().isoformat() diff --git a/drivers/intake_geokube/wrf/driver.py b/drivers/intake_geokube/wrf/driver.py index d819760..018748e 100644 --- a/drivers/intake_geokube/wrf/driver.py +++ b/drivers/intake_geokube/wrf/driver.py @@ -1,4 +1,4 @@ -"""WRF driver for DDS.""" +"""WRF driver for GeoLake.""" from functools import partial from typing import Any diff --git a/drivers/pyproject.toml b/drivers/pyproject.toml index 2f0a6d5..ba82fc4 100644 --- a/drivers/pyproject.toml +++ b/drivers/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "geolake-drivers" -description = "opengeokube DDS driver." +description = "opengeokube GeoLake driver." requires-python = ">=3.10" readme = "README.md" license = {file = "LICENSE"} diff --git a/executor/Dockerfile b/executor/Dockerfile index db3cebb..5aed976 100644 --- a/executor/Dockerfile +++ b/executor/Dockerfile @@ -1,4 +1,4 @@ -ARG REGISTRY=rg.nl-ams.scw.cloud/geodds-production +ARG REGISTRY=rg.nl-ams.scw.cloud/geogeolake-production ARG TAG=latest ARG SENTINEL_USERNAME=... ARG SENTINEL_PASSWORD=... diff --git a/executor/app/main.py b/executor/app/main.py index 35b90fe..4bcd076 100644 --- a/executor/app/main.py +++ b/executor/app/main.py @@ -1,3 +1,4 @@ +"""Module with executor logic.""" import os import time import datetime @@ -81,7 +82,7 @@ def rcp85_filename_condition(kube: DataCube, message: Message) -> bool: def get_history_message(): return ( - f"Generated by CMCC DDS version 0.9.0 {str(datetime.datetime.now())}" + f"Generated by CMCC GeoLake version 0.9.0 {str(datetime.datetime.now())}" ) @@ -90,6 +91,17 @@ def persist_datacube( message: Message, base_path: str | os.PathLike, ) -> str | os.PathLike: + """Save `geokube.DataCube` given the `message` and `base_path`. + + Parameters + ---------- + kube : `geokube.DataCube` + A data cube to save + message : `Message` + A message with details like dataset or product ID + base_path : `str` + Base path to save + """ if rcp85_filename_condition(kube, message): path = get_file_name_for_climate_downscaled(kube, message) else: @@ -136,6 +148,19 @@ def persist_dataset( message: Message, base_path: str | os.PathLike, ): + """Save `geokube.Dataset` given the `message` and `base_path`. + + Under the hood uses `persist_datacube` function. + + Parameters + ---------- + kube : `geokube.Dataset` + A data cube to save + message : `Message` + A message with details like dataset or product ID + base_path : `str` + Base path to save + """ def _get_attr_comb(dataframe_item, attrs): return "_".join([dataframe_item[attr_name] for attr_name in attrs]) @@ -214,6 +239,15 @@ def _persist_single_datacube(dataframe_item, base_path, format, format_args=None def process(message: Message, compute: bool): + """Process a message and compute (if needed). + + Parameters + ---------- + message : `Message` + A message to process + compute : bool + A flag to indicate if result should be computed + """ res_path = os.path.join(_BASE_DOWNLOAD_PATH, message.request_id) os.makedirs(res_path, exist_ok=True) match message.type: @@ -247,6 +281,7 @@ def process(message: Message, compute: bool): class Executor(metaclass=LoggableMeta): + """Executor class definition""" _LOG = logging.getLogger("geokube.Executor") def __init__(self, broker, store_path): @@ -259,6 +294,13 @@ def __init__(self, broker, store_path): self._db = DBManager() def create_dask_cluster(self, dask_cluster_opts: dict = None): + """Create a Dask cluster with given options. + + Parameters + ---------- + dask_cluster_opts : optional `dict` + A dictionary with cluster parameters. + """ if dask_cluster_opts is None: dask_cluster_opts = {} dask_cluster_opts["scheduler_port"] = int( @@ -292,6 +334,16 @@ def create_dask_cluster(self, dask_cluster_opts: dict = None): self._nanny = Nanny(self._dask_client.cluster.scheduler.address) def maybe_restart_cluster(self, status: RequestStatus): + """Restart the run Dask cluster when needed. + + Resturt the cluster if request status was set to `TIMEOUT` or + the cluster died. + + Parameters + ---------- + statis : `RequestStatus` + A status of a request being processed by the cluster. + """ if status is RequestStatus.TIMEOUT: self._LOG.info("recreating the cluster due to timeout") self._dask_client.cluster.close() @@ -311,9 +363,7 @@ def maybe_restart_cluster(self, status: RequestStatus): self.create_dask_cluster() def ack_message(self, channel, delivery_tag): - """Note that `channel` must be the same pika channel instance via which - the message being ACKed was retrieved (AMQP protocol constraint). - """ + """Acknowledge the broker message.""" if channel.is_open: channel.basic_ack(delivery_tag) else: @@ -329,6 +379,23 @@ def retry_until_timeout( retries: int = 30, sleep_time: int = 10, ): + """Retry processing the `future` object. + + Parameters + ---------- + future : `Future` + A future object to being computed + message : `Message` + A message object + retries : `int`, default `30` + A number of trials + sleep_time : `int`, default `10` + A number of seconds to sleep between trials + + Returns + ------- + result : `tuple` of (location_path, status, fail_reason) + """ assert retries is not None, "`retries` cannot be `None`" assert sleep_time is not None, "`sleep_time` cannot be `None`" status = fail_reason = location_path = None diff --git a/executor/app/messaging.py b/executor/app/messaging.py index 37ce25a..f135298 100644 --- a/executor/app/messaging.py +++ b/executor/app/messaging.py @@ -1,3 +1,4 @@ +"""Module contains definitions of messages in the executor.""" import os import logging from enum import Enum @@ -14,6 +15,7 @@ class MessageType(Enum): class Message: + """Message class definition.""" _LOG = logging.getLogger("geokube.Message") request_id: int @@ -23,6 +25,13 @@ class Message: content: GeoQuery | Workflow def __init__(self, load: bytes) -> None: + """Create `Message` instances. + + Parameters + ---------- + load : `bytes` + Bytes containing message load + """ self.request_id, msg_type, *query = load.decode().split( MESSAGE_SEPARATOR ) diff --git a/docs/mkdocs.yaml b/mkdocs.yaml similarity index 80% rename from docs/mkdocs.yaml rename to mkdocs.yaml index b2a8154..c192543 100644 --- a/docs/mkdocs.yaml +++ b/mkdocs.yaml @@ -36,8 +36,21 @@ nav: - GeoLake: index.md - installation.md - References: - - drivers.md + - queries.md + - Drivers: + - base.md + - netcdf.md + - sentinel.md + - iot.md + - wrf.md + - datastore.md - broker.md - api.md - executor.md -- about.md \ No newline at end of file +- about.md + +plugins: +- mkdocstrings: + handlers: + python: + paths: [drivers, executor, api, datastore] \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml deleted file mode 100644 index c97182f..0000000 --- a/mkdocs.yml +++ /dev/null @@ -1 +0,0 @@ -site_name: My Docs diff --git a/resources/catalogs/external/e-obs.yaml b/resources/catalogs/external/e-obs.yaml index 2dc4a49..a35d853 100644 --- a/resources/catalogs/external/e-obs.yaml +++ b/resources/catalogs/external/e-obs.yaml @@ -1,7 +1,7 @@ metadata: description: >- E-OBS is a daily gridded land-only observational dataset over Europe. The blended time series from the station network of the European Climate Assessment & Dataset (ECA&D) project form the basis for the E-OBS gridded dataset. All station data are sourced directly from the European National Meteorological and Hydrological Services (NMHSs) or other data holding institutions. For a considerable number of countries the number of stations used is the complete national network and therefore much more dense than the station network that is routinely shared among NMHSs (which is the basis of other gridded datasets). The density of stations gradually increases through collaborations with NMHSs within European research contracts. Initially, in 2008, this gridded dataset was developed to provide validation for the suite of Europe-wide climate model simulations produced as part of the European Union ENSEMBLES project. While E-OBS remains an important dataset for model validation, it is also used more generally for monitoring the climate across Europe, particularly with regard to the assessment of the magnitude and frequency of daily extremes. The position of E-OBS is unique in Europe because of the relatively high spatial horizontal grid spacing, the daily resolution of the dataset, the provision of multiple variables and the length of the dataset. Finally, the station data on which E-OBS is based are available through the ECA&D webpages (where the owner of the data has given permission to do so). In these respects it contrasts with other datasets. The dataset is daily, meaning the observations cover 24 hours per time step. The exact 24-hour period can be different per region. The reason for this is that some data providers measure between midnight to midnight while others might measure from morning to morning. Since E-OBS is an observational dataset, no attempts have been made to adjust time series for this 24-hour offset. It is made sure, where known, that the largest part of the measured 24-hour period corresponds to the day attached to the time step in E-OBS (and ECA&D). - contact: dds-support@cmcc.it + contact: geolake-support@cmcc.it label: E-OBS daily gridded meteorological data for Europe from 1950 to present image: https://diasfiles.cmccos.it/images/e-obs.png doi: https://doi.org/10.24381/cds.151d3ec6 @@ -19,7 +19,7 @@ sources: args: pattern: 'e-obs-ensemble-{ensemble}-var_{var}-resolution_{resolution}-version_{version}.0e.nc' path: '/code/app/resources/netcdfs/e-obs-ensemble-mean-var_*.nc' - id_pattern: '{__ddsapi_name}' + id_pattern: '{__geolakeapi_name}' delay_read_cubes: false metadata_caching: true metadata_cache_path: 'e-obs.cache' From 8d2a761b592dd4bf9e3db222efe94248b25f2701 Mon Sep 17 00:00:00 2001 From: jamesWalczak Date: Wed, 24 Jan 2024 15:26:34 +0100 Subject: [PATCH 18/31] Add authors --- docs/about.md | 9 ++++++++- mkdocs.yaml | 2 +- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/docs/about.md b/docs/about.md index 2265138..2215ef7 100644 --- a/docs/about.md +++ b/docs/about.md @@ -1,6 +1,13 @@ # About ## Authors +Following authors contributed to the `geolake` platform. +| Authors | +|--------------------| +| Marco Mancini ORCID logo | +| Mirko Stojiljković ORCID logo | +| Jakub Walczak ORCID logo | -## Acknowledegement \ No newline at end of file +## Acknowledegement +The `geolake` platform was funded by the CMCC Foundation \ No newline at end of file diff --git a/mkdocs.yaml b/mkdocs.yaml index c192543..e046d80 100644 --- a/mkdocs.yaml +++ b/mkdocs.yaml @@ -34,7 +34,7 @@ repo_url: https://github.com/CMCC-Foundation/geolake edit_uri: '' nav: - GeoLake: index.md -- installation.md +# - installation.md - References: - queries.md - Drivers: From 035526004b58df97a8d2a49f30a490d317d9f4b9 Mon Sep 17 00:00:00 2001 From: jamesWalczak Date: Fri, 26 Jan 2024 10:48:15 +0100 Subject: [PATCH 19/31] Update use dir urls --- mkdocs.yaml | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/mkdocs.yaml b/mkdocs.yaml index e046d80..44d11f0 100644 --- a/mkdocs.yaml +++ b/mkdocs.yaml @@ -7,16 +7,6 @@ theme: scheme: default primary: blue grey accent: amber - toggle: - icon: material/lightbulb - name: Switch to dark mode - - media: '(prefers-color-scheme: dark)' - scheme: slate - primary: blue grey - accent: amber - toggle: - icon: material/lightbulb-outline - name: Switch to light mode features: - search.suggest - search.highlight @@ -32,6 +22,7 @@ theme: repo_name: CMCC-Foundation/geolake repo_url: https://github.com/CMCC-Foundation/geolake edit_uri: '' +use_directory_urls: false nav: - GeoLake: index.md # - installation.md From 6e1dd32f756f1225a46c72f77de91e55b4a6b185 Mon Sep 17 00:00:00 2001 From: jamesWalczak Date: Fri, 26 Jan 2024 10:51:08 +0100 Subject: [PATCH 20/31] Add built sites --- site/404.html | 598 ++ site/about.html | 728 ++ site/api.html | 4366 +++++++++++ site/assets/_mkdocstrings.css | 109 + site/assets/images/favicon.png | Bin 0 -> 1870 bytes .../assets/javascripts/bundle.7389ff0e.min.js | 29 + .../javascripts/bundle.7389ff0e.min.js.map | 7 + .../javascripts/lunr/min/lunr.ar.min.js | 1 + .../javascripts/lunr/min/lunr.da.min.js | 18 + .../javascripts/lunr/min/lunr.de.min.js | 18 + .../javascripts/lunr/min/lunr.du.min.js | 18 + .../javascripts/lunr/min/lunr.el.min.js | 1 + .../javascripts/lunr/min/lunr.es.min.js | 18 + .../javascripts/lunr/min/lunr.fi.min.js | 18 + .../javascripts/lunr/min/lunr.fr.min.js | 18 + .../javascripts/lunr/min/lunr.he.min.js | 1 + .../javascripts/lunr/min/lunr.hi.min.js | 1 + .../javascripts/lunr/min/lunr.hu.min.js | 18 + .../javascripts/lunr/min/lunr.hy.min.js | 1 + .../javascripts/lunr/min/lunr.it.min.js | 18 + .../javascripts/lunr/min/lunr.ja.min.js | 1 + .../javascripts/lunr/min/lunr.jp.min.js | 1 + .../javascripts/lunr/min/lunr.kn.min.js | 1 + .../javascripts/lunr/min/lunr.ko.min.js | 1 + .../javascripts/lunr/min/lunr.multi.min.js | 1 + .../javascripts/lunr/min/lunr.nl.min.js | 18 + .../javascripts/lunr/min/lunr.no.min.js | 18 + .../javascripts/lunr/min/lunr.pt.min.js | 18 + .../javascripts/lunr/min/lunr.ro.min.js | 18 + .../javascripts/lunr/min/lunr.ru.min.js | 18 + .../javascripts/lunr/min/lunr.sa.min.js | 1 + .../lunr/min/lunr.stemmer.support.min.js | 1 + .../javascripts/lunr/min/lunr.sv.min.js | 18 + .../javascripts/lunr/min/lunr.ta.min.js | 1 + .../javascripts/lunr/min/lunr.te.min.js | 1 + .../javascripts/lunr/min/lunr.th.min.js | 1 + .../javascripts/lunr/min/lunr.tr.min.js | 18 + .../javascripts/lunr/min/lunr.vi.min.js | 1 + .../javascripts/lunr/min/lunr.zh.min.js | 1 + site/assets/javascripts/lunr/tinyseg.js | 206 + site/assets/javascripts/lunr/wordcut.js | 6708 +++++++++++++++++ .../workers/search.c011b7c0.min.js | 42 + .../workers/search.c011b7c0.min.js.map | 7 + site/assets/stylesheets/main.50c56a3b.min.css | 1 + .../stylesheets/main.50c56a3b.min.css.map | 1 + .../stylesheets/palette.06af60db.min.css | 1 + .../stylesheets/palette.06af60db.min.css.map | 1 + site/base.html | 1357 ++++ site/broker.html | 712 ++ site/datastore.html | 3334 ++++++++ site/executor.html | 2936 ++++++++ site/img/favicon.svg | 3 + site/img/logo.svg | 3 + site/index.html | 638 ++ site/installation.html | 620 ++ site/iot.html | 1332 ++++ site/netcdf.html | 1230 +++ site/objects.inv | Bin 0 -> 1270 bytes site/queries.html | 1773 +++++ site/sentinel.html | 3188 ++++++++ site/sitemap.xml | 3 + site/sitemap.xml.gz | Bin 0 -> 127 bytes site/wrf.html | 1364 ++++ 63 files changed, 31584 insertions(+) create mode 100644 site/404.html create mode 100644 site/about.html create mode 100644 site/api.html create mode 100644 site/assets/_mkdocstrings.css create mode 100644 site/assets/images/favicon.png create mode 100644 site/assets/javascripts/bundle.7389ff0e.min.js create mode 100644 site/assets/javascripts/bundle.7389ff0e.min.js.map create mode 100644 site/assets/javascripts/lunr/min/lunr.ar.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.da.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.de.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.du.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.el.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.es.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.fi.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.fr.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.he.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.hi.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.hu.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.hy.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.it.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.ja.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.jp.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.kn.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.ko.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.multi.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.nl.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.no.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.pt.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.ro.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.ru.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.sa.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.stemmer.support.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.sv.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.ta.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.te.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.th.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.tr.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.vi.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.zh.min.js create mode 100644 site/assets/javascripts/lunr/tinyseg.js create mode 100644 site/assets/javascripts/lunr/wordcut.js create mode 100644 site/assets/javascripts/workers/search.c011b7c0.min.js create mode 100644 site/assets/javascripts/workers/search.c011b7c0.min.js.map create mode 100644 site/assets/stylesheets/main.50c56a3b.min.css create mode 100644 site/assets/stylesheets/main.50c56a3b.min.css.map create mode 100644 site/assets/stylesheets/palette.06af60db.min.css create mode 100644 site/assets/stylesheets/palette.06af60db.min.css.map create mode 100644 site/base.html create mode 100644 site/broker.html create mode 100644 site/datastore.html create mode 100644 site/executor.html create mode 100644 site/img/favicon.svg create mode 100644 site/img/logo.svg create mode 100644 site/index.html create mode 100644 site/installation.html create mode 100644 site/iot.html create mode 100644 site/netcdf.html create mode 100644 site/objects.inv create mode 100644 site/queries.html create mode 100644 site/sentinel.html create mode 100644 site/sitemap.xml create mode 100644 site/sitemap.xml.gz create mode 100644 site/wrf.html diff --git a/site/404.html b/site/404.html new file mode 100644 index 0000000..fef054d --- /dev/null +++ b/site/404.html @@ -0,0 +1,598 @@ + + + + + + + + + + + + + + + + + + + + + GeoLake + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ +

404 - Not found

+ +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/site/about.html b/site/about.html new file mode 100644 index 0000000..439222f --- /dev/null +++ b/site/about.html @@ -0,0 +1,728 @@ + + + + + + + + + + + + + + + + + + + + + + + About - GeoLake + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + +

About

+

Authors

+

Following authors contributed to the geolake platform.

+ + + + + + + + + + + + + + + + + +
Authors
Marco Mancini ORCID logo
Mirko Stojiljković ORCID logo
Jakub Walczak ORCID logo
+

Acknowledegement

+

The geolake platform was funded by the CMCC Foundation

+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/site/api.html b/site/api.html new file mode 100644 index 0000000..82f9972 --- /dev/null +++ b/site/api.html @@ -0,0 +1,4366 @@ + + + + + + + + + + + + + + + + + + + + + + + + + Api - GeoLake + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + +

Api

+ +
+ + + + +
+ +

Main module with dekube-geolake API endpoints defined

+ + + +
+ + + + + + + + + + +
+ + + +

+ download_request_result(request, request_id) + + + async + + +

+ + +
+ +

Download result of the request

+ +
+ Source code in api/app/main.py +
447
+448
+449
+450
+451
+452
+453
+454
+455
+456
+457
+458
+459
+460
+461
+462
+463
+464
+465
+466
+467
+468
@app.get("/download/{request_id}", tags=[tags.REQUEST])
+@timer(
+    app.state.api_request_duration_seconds,
+    labels={"route": "GET /download/{request_id}"},
+)
+# @requires([scopes.AUTHENTICATED]) # TODO: mange download auth in the web component
+async def download_request_result(
+    request: Request,
+    request_id: int,
+):
+    """Download result of the request"""
+    app.state.api_http_requests_total.inc(
+        {"route": "GET /download/{request_id}"}
+    )
+    try:
+        return file_handler.download_request_result(request_id=request_id)
+    except exc.BaseGeoLakeException as err:
+        raise err.wrap_around_http_exception() from err
+    except FileNotFoundError as err:
+        raise HTTPException(
+            status_code=status.HTTP_404_NOT_FOUND, detail="File was not found!"
+        ) from err
+
+
+
+ +
+ + +
+ + + +

+ estimate(request, dataset_id, product_id, query, unit=None) + + + async + + +

+ + +
+ +

Estimate the resulting size of the query

+ +
+ Source code in api/app/main.py +
292
+293
+294
+295
+296
+297
+298
+299
+300
+301
+302
+303
+304
+305
+306
+307
+308
+309
+310
+311
+312
+313
+314
+315
+316
@app.post("/datasets/{dataset_id}/{product_id}/estimate", tags=[tags.DATASET])
+@timer(
+    app.state.api_request_duration_seconds,
+    labels={"route": "POST /datasets/{dataset_id}/{product_id}/estimate"},
+)
+async def estimate(
+    request: Request,
+    dataset_id: str,
+    product_id: str,
+    query: GeoQuery,
+    unit: str = None,
+):
+    """Estimate the resulting size of the query"""
+    app.state.api_http_requests_total.inc(
+        {"route": "POST /datasets/{dataset_id}/{product_id}/estimate"}
+    )
+    try:
+        return dataset_handler.estimate(
+            dataset_id=dataset_id,
+            product_id=product_id,
+            query=query,
+            unit=unit,
+        )
+    except exc.BaseGeoLakeException as err:
+        raise err.wrap_around_http_exception() from err
+
+
+
+ +
+ + +
+ + + +

+ geolake_info() + + + async + + +

+ + +
+ +

Return current version of the GeoLake API

+ +
+ Source code in api/app/main.py +
109
+110
+111
+112
@app.get("/", tags=[tags.BASIC])
+async def geolake_info():
+    """Return current version of the GeoLake API"""
+    return f"GeoLake API {__version__}"
+
+
+
+ +
+ + +
+ + + +

+ get_datasets(request) + + + async + + +

+ + +
+ +

List all products eligible for a user defined by user_token

+ +
+ Source code in api/app/main.py +
115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
@app.get("/datasets", tags=[tags.DATASET])
+@timer(
+    app.state.api_request_duration_seconds, labels={"route": "GET /datasets"}
+)
+async def get_datasets(request: Request):
+    """List all products eligible for a user defined by user_token"""
+    app.state.api_http_requests_total.inc({"route": "GET /datasets"})
+    try:
+        return dataset_handler.get_datasets(
+            user_roles_names=request.auth.scopes
+        )
+    except exc.BaseGeoLakeException as err:
+        raise err.wrap_around_http_exception() from err
+
+
+
+ +
+ + +
+ + + +

+ get_first_product_details(request, dataset_id) + + + async + + +

+ + +
+ +

Get details for the 1st product of the dataset

+ +
+ Source code in api/app/main.py +
130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
@app.get("/datasets/{dataset_id}", tags=[tags.DATASET])
+@timer(
+    app.state.api_request_duration_seconds,
+    labels={"route": "GET /datasets/{dataset_id}"},
+)
+async def get_first_product_details(
+    request: Request,
+    dataset_id: str,
+):
+    """Get details for the 1st product of the dataset"""
+    app.state.api_http_requests_total.inc(
+        {"route": "GET /datasets/{dataset_id}"}
+    )
+    try:
+        return dataset_handler.get_product_details(
+            user_roles_names=request.auth.scopes,
+            dataset_id=dataset_id,
+        )
+    except exc.BaseGeoLakeException as err:
+        raise err.wrap_around_http_exception() from err
+
+
+
+ +
+ + +
+ + + +

+ get_metadata(request, dataset_id, product_id) + + + async + + +

+ + +
+ +

Get metadata of the given product

+ +
+ Source code in api/app/main.py +
270
+271
+272
+273
+274
+275
+276
+277
+278
+279
+280
+281
+282
+283
+284
+285
+286
+287
+288
+289
@app.get("/datasets/{dataset_id}/{product_id}/metadata", tags=[tags.DATASET])
+@timer(
+    app.state.api_request_duration_seconds,
+    labels={"route": "GET /datasets/{dataset_id}/{product_id}/metadata"},
+)
+async def get_metadata(
+    request: Request,
+    dataset_id: str,
+    product_id: str,
+):
+    """Get metadata of the given product"""
+    app.state.api_http_requests_total.inc(
+        {"route": "GET /datasets/{dataset_id}/{product_id}/metadata"}
+    )
+    try:
+        return dataset_handler.get_metadata(
+            dataset_id=dataset_id, product_id=product_id
+        )
+    except exc.BaseGeoLakeException as err:
+        raise err.wrap_around_http_exception() from err
+
+
+
+ +
+ + +
+ + + +

+ get_product_details(request, dataset_id, product_id) + + + async + + +

+ + +
+ +

Get details for the requested product if user is authorized

+ +
+ Source code in api/app/main.py +
152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
+172
+173
@app.get("/datasets/{dataset_id}/{product_id}", tags=[tags.DATASET])
+@timer(
+    app.state.api_request_duration_seconds,
+    labels={"route": "GET /datasets/{dataset_id}/{product_id}"},
+)
+async def get_product_details(
+    request: Request,
+    dataset_id: str,
+    product_id: str,
+):
+    """Get details for the requested product if user is authorized"""
+    app.state.api_http_requests_total.inc(
+        {"route": "GET /datasets/{dataset_id}/{product_id}"}
+    )
+    try:
+        return dataset_handler.get_product_details(
+            user_roles_names=request.auth.scopes,
+            dataset_id=dataset_id,
+            product_id=product_id,
+        )
+    except exc.BaseGeoLakeException as err:
+        raise err.wrap_around_http_exception() from err
+
+
+
+ +
+ + +
+ + + +

+ get_request_resulting_size(request, request_id) + + + async + + +

+ + +
+ +

Get size of the file being the result of the request

+ +
+ Source code in api/app/main.py +
405
+406
+407
+408
+409
+410
+411
+412
+413
+414
+415
+416
+417
+418
+419
+420
+421
+422
+423
+424
@app.get("/requests/{request_id}/size", tags=[tags.REQUEST])
+@timer(
+    app.state.api_request_duration_seconds,
+    labels={"route": "GET /requests/{request_id}/size"},
+)
+@requires([scopes.AUTHENTICATED])
+async def get_request_resulting_size(
+    request: Request,
+    request_id: int,
+):
+    """Get size of the file being the result of the request"""
+    app.state.api_http_requests_total.inc(
+        {"route": "GET /requests/{request_id}/size"}
+    )
+    try:
+        return request_handler.get_request_resulting_size(
+            request_id=request_id
+        )
+    except exc.BaseGeoLakeException as err:
+        raise err.wrap_around_http_exception() from err
+
+
+
+ +
+ + +
+ + + +

+ get_request_status(request, request_id) + + + async + + +

+ + +
+ +

Get status of the request without authentication

+ +
+ Source code in api/app/main.py +
383
+384
+385
+386
+387
+388
+389
+390
+391
+392
+393
+394
+395
+396
+397
+398
+399
+400
+401
+402
@app.get("/requests/{request_id}/status", tags=[tags.REQUEST])
+@timer(
+    app.state.api_request_duration_seconds,
+    labels={"route": "GET /requests/{request_id}/status"},
+)
+@requires([scopes.AUTHENTICATED])
+async def get_request_status(
+    request: Request,
+    request_id: int,
+):
+    """Get status of the request without authentication"""
+    app.state.api_http_requests_total.inc(
+        {"route": "GET /requests/{request_id}/status"}
+    )
+    try:
+        return request_handler.get_request_status(
+            user_id=request.user.id, request_id=request_id
+        )
+    except exc.BaseGeoLakeException as err:
+        raise err.wrap_around_http_exception() from err
+
+
+
+ +
+ + +
+ + + +

+ get_request_uri(request, request_id) + + + async + + +

+ + +
+ +

Get download URI for the request

+ +
+ Source code in api/app/main.py +
427
+428
+429
+430
+431
+432
+433
+434
+435
+436
+437
+438
+439
+440
+441
+442
+443
+444
@app.get("/requests/{request_id}/uri", tags=[tags.REQUEST])
+@timer(
+    app.state.api_request_duration_seconds,
+    labels={"route": "GET /requests/{request_id}/uri"},
+)
+@requires([scopes.AUTHENTICATED])
+async def get_request_uri(
+    request: Request,
+    request_id: int,
+):
+    """Get download URI for the request"""
+    app.state.api_http_requests_total.inc(
+        {"route": "GET /requests/{request_id}/uri"}
+    )
+    try:
+        return request_handler.get_request_uri(request_id=request_id)
+    except exc.BaseGeoLakeException as err:
+        raise err.wrap_around_http_exception() from err
+
+
+
+ +
+ + +
+ + + +

+ get_requests(request) + + + async + + +

+ + +
+ +

Get all requests for the user

+ +
+ Source code in api/app/main.py +
367
+368
+369
+370
+371
+372
+373
+374
+375
+376
+377
+378
+379
+380
@app.get("/requests", tags=[tags.REQUEST])
+@timer(
+    app.state.api_request_duration_seconds, labels={"route": "GET /requests"}
+)
+@requires([scopes.AUTHENTICATED])
+async def get_requests(
+    request: Request,
+):
+    """Get all requests for the user"""
+    app.state.api_http_requests_total.inc({"route": "GET /requests"})
+    try:
+        return request_handler.get_requests(request.user.id)
+    except exc.BaseGeoLakeException as err:
+        raise err.wrap_around_http_exception() from err
+
+
+
+ +
+ + +
+ + + +

+ query(request, dataset_id, product_id, query) + + + async + + +

+ + +
+ +

Schedule the job of data retrieve

+ +
+ Source code in api/app/main.py +
319
+320
+321
+322
+323
+324
+325
+326
+327
+328
+329
+330
+331
+332
+333
+334
+335
+336
+337
+338
+339
+340
+341
+342
+343
@app.post("/datasets/{dataset_id}/{product_id}/execute", tags=[tags.DATASET])
+@timer(
+    app.state.api_request_duration_seconds,
+    labels={"route": "POST /datasets/{dataset_id}/{product_id}/execute"},
+)
+@requires([scopes.AUTHENTICATED])
+async def query(
+    request: Request,
+    dataset_id: str,
+    product_id: str,
+    query: GeoQuery,
+):
+    """Schedule the job of data retrieve"""
+    app.state.api_http_requests_total.inc(
+        {"route": "POST /datasets/{dataset_id}/{product_id}/execute"}
+    )
+    try:
+        return dataset_handler.async_query(
+            user_id=request.user.id,
+            dataset_id=dataset_id,
+            product_id=product_id,
+            query=query,
+        )
+    except exc.BaseGeoLakeException as err:
+        raise err.wrap_around_http_exception() from err
+
+
+
+ +
+ + +
+ + + +

+ workflow(request, tasks) + + + async + + +

+ + +
+ +

Schedule the job of workflow processing

+ +
+ Source code in api/app/main.py +
346
+347
+348
+349
+350
+351
+352
+353
+354
+355
+356
+357
+358
+359
+360
+361
+362
+363
+364
@app.post("/datasets/workflow", tags=[tags.DATASET])
+@timer(
+    app.state.api_request_duration_seconds,
+    labels={"route": "POST /datasets/workflow"},
+)
+@requires([scopes.AUTHENTICATED])
+async def workflow(
+    request: Request,
+    tasks: Workflow,
+):
+    """Schedule the job of workflow processing"""
+    app.state.api_http_requests_total.inc({"route": "POST /datasets/workflow"})
+    try:
+        return dataset_handler.run_workflow(
+            user_id=request.user.id,
+            workflow=tasks,
+        )
+    except exc.BaseGeoLakeException as err:
+        raise err.wrap_around_http_exception() from err
+
+
+
+ +
+ + + +
+ +
+ +
+ +
+ + + + +
+ +

Modules realizing logic for dataset-related endpoints

+ + + +
+ + + + + + + + + + +
+ + + +

+ async_query(user_id, dataset_id, product_id, query) + +

+ + +
+ +

Realize the logic for the endpoint:

+

POST /datasets/{dataset_id}/{product_id}/execute

+

Query the data and return the ID of the request.

+

Parameters

+

user_id : str + ID of the user executing the query +dataset_id : str + ID of the dataset +product_id : str + ID of the product +query : GeoQuery + Query to perform

+

Returns

+

request_id : int + ID of the request

+

Raises

+

MaximumAllowedSizeExceededError + if the allowed size is below the estimated one +EmptyDatasetError + if estimated size is zero

+ +
+ Source code in api/app/endpoint_handlers/dataset.py +
223
+224
+225
+226
+227
+228
+229
+230
+231
+232
+233
+234
+235
+236
+237
+238
+239
+240
+241
+242
+243
+244
+245
+246
+247
+248
+249
+250
+251
+252
+253
+254
+255
+256
+257
+258
+259
+260
+261
+262
+263
+264
+265
+266
+267
+268
+269
+270
+271
+272
+273
+274
+275
+276
+277
+278
+279
+280
+281
+282
+283
+284
+285
+286
+287
+288
+289
+290
+291
+292
+293
+294
+295
+296
+297
+298
+299
+300
+301
+302
+303
+304
+305
+306
@log_execution_time(log)
+@assert_product_exists
+def async_query(
+    user_id: str,
+    dataset_id: str,
+    product_id: str,
+    query: GeoQuery,
+):
+    """Realize the logic for the endpoint:
+
+    `POST /datasets/{dataset_id}/{product_id}/execute`
+
+    Query the data and return the ID of the request.
+
+    Parameters
+    ----------
+    user_id : str
+        ID of the user executing the query
+    dataset_id : str
+        ID of the dataset
+    product_id : str
+        ID of the product
+    query : GeoQuery
+        Query to perform
+
+    Returns
+    -------
+    request_id : int
+        ID of the request
+
+    Raises
+    -------
+    MaximumAllowedSizeExceededError
+        if the allowed size is below the estimated one
+    EmptyDatasetError
+        if estimated size is zero
+
+    """
+    log.debug("geoquery: %s", query)
+    if _is_etimate_enabled(dataset_id, product_id):
+        estimated_size = estimate(dataset_id, product_id, query, "GB").get("value")
+        allowed_size = data_store.product_metadata(dataset_id, product_id).get(
+            "maximum_query_size_gb", DEFAULT_MAX_REQUEST_SIZE_GB
+        )
+        if estimated_size > allowed_size:
+            raise exc.MaximumAllowedSizeExceededError(
+                dataset_id=dataset_id,
+                product_id=product_id,
+                estimated_size_gb=estimated_size,
+                allowed_size_gb=allowed_size,
+            )
+        if estimated_size == 0.0:
+            raise exc.EmptyDatasetError(
+                dataset_id=dataset_id, product_id=product_id
+            )
+    broker_conn = pika.BlockingConnection(
+        pika.ConnectionParameters(
+            host=os.getenv("BROKER_SERVICE_HOST", "broker")
+        )
+    )
+    broker_channel = broker_conn.channel()
+
+    request_id = DBManager().create_request(
+        user_id=user_id,
+        dataset=dataset_id,
+        product=product_id,
+        query=json.dumps(query.model_dump_original()),
+    )
+
+    # TODO: find a separator; for the moment use "\"
+    message = MESSAGE_SEPARATOR.join(
+        [str(request_id), "query", dataset_id, product_id, query.json()]
+    )
+
+    broker_channel.basic_publish(
+        exchange="",
+        routing_key="query_queue",
+        body=message,
+        properties=pika.BasicProperties(
+            delivery_mode=2,  # make message persistent
+        ),
+    )
+    broker_conn.close()
+    return request_id
+
+
+
+ +
+ + +
+ + + +

+ estimate(dataset_id, product_id, query, unit=None) + +

+ + +
+ +

Realize the logic for the nedpoint:

+

POST /datasets/{dataset_id}/{product_id}/estimate

+

Estimate the size of the resulting data. +No authentication is needed for estimation query.

+

Parameters

+

dataset_id : str + ID of the dataset +product_id : str + ID of the product +query : GeoQuery + Query to perform +unit : str + One of unit [bytes, kB, MB, GB] to present the result. If None, + unit will be inferred.

+

Returns

+

size_details : dict + Estimated size of the query in the form: + python + { + "value": val, + "units": units + }

+ +
+ Source code in api/app/endpoint_handlers/dataset.py +
179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+189
+190
+191
+192
+193
+194
+195
+196
+197
+198
+199
+200
+201
+202
+203
+204
+205
+206
+207
+208
+209
+210
+211
+212
+213
+214
+215
+216
+217
+218
+219
+220
@log_execution_time(log)
+@assert_product_exists
+def estimate(
+    dataset_id: str,
+    product_id: str,
+    query: GeoQuery,
+    unit: Optional[str] = None,
+):
+    """Realize the logic for the nedpoint:
+
+    `POST /datasets/{dataset_id}/{product_id}/estimate`
+
+    Estimate the size of the resulting data.
+    No authentication is needed for estimation query.
+
+    Parameters
+    ----------
+    dataset_id : str
+        ID of the dataset
+    product_id : str
+        ID of the product
+    query : GeoQuery
+        Query to perform
+    unit : str
+        One of unit [bytes, kB, MB, GB] to present the result. If `None`,
+        unit will be inferred.
+
+    Returns
+    -------
+    size_details : dict
+        Estimated size of  the query in the form:
+        ```python
+        {
+            "value": val,
+            "units": units
+        }
+        ```
+    """
+    query_bytes_estimation = data_store.estimate(dataset_id, product_id, query)
+    return make_bytes_readable_dict(
+        size_bytes=query_bytes_estimation, units=unit
+    )
+
+
+
+ +
+ + +
+ + + +

+ get_datasets(user_roles_names) + +

+ + +
+ +

Realize the logic for the endpoint:

+

GET /datasets

+

Get datasets names, their metadata and products names (if eligible for a user). +If no eligible products are found for a dataset, it is not included.

+

Parameters

+

user_roles_names : list of str + List of user's roles

+

Returns

+

datasets : list of dict + A list of dictionaries with datasets information (including metadata and + eligible products lists)

+

Raises

+

MissingKeyInCatalogEntryError + If the dataset catalog entry does not contain the required key

+ +
+ Source code in api/app/endpoint_handlers/dataset.py +
 37
+ 38
+ 39
+ 40
+ 41
+ 42
+ 43
+ 44
+ 45
+ 46
+ 47
+ 48
+ 49
+ 50
+ 51
+ 52
+ 53
+ 54
+ 55
+ 56
+ 57
+ 58
+ 59
+ 60
+ 61
+ 62
+ 63
+ 64
+ 65
+ 66
+ 67
+ 68
+ 69
+ 70
+ 71
+ 72
+ 73
+ 74
+ 75
+ 76
+ 77
+ 78
+ 79
+ 80
+ 81
+ 82
+ 83
+ 84
+ 85
+ 86
+ 87
+ 88
+ 89
+ 90
+ 91
+ 92
+ 93
+ 94
+ 95
+ 96
+ 97
+ 98
+ 99
+100
+101
@log_execution_time(log)
+def get_datasets(user_roles_names: list[str]) -> list[dict]:
+    """Realize the logic for the endpoint:
+
+    `GET /datasets`
+
+    Get datasets names, their metadata and products names (if eligible for a user).
+    If no eligible products are found for a dataset, it is not included.
+
+    Parameters
+    ----------
+    user_roles_names : list of str
+        List of user's roles
+
+    Returns
+    -------
+    datasets : list of dict
+        A list of dictionaries with datasets information (including metadata and
+        eligible products lists)
+
+    Raises
+    -------
+    MissingKeyInCatalogEntryError
+        If the dataset catalog entry does not contain the required key
+    """
+    log.debug(
+        "getting all eligible products for datasets...",
+    )
+    datasets = []
+    for dataset_id in data_store.dataset_list():
+        log.debug(
+            "getting info and eligible products for `%s`",
+            dataset_id,
+        )
+        dataset_info = data_store.dataset_info(dataset_id=dataset_id)
+        try:
+            eligible_prods = {
+                prod_name: prod_info
+                for prod_name, prod_info in dataset_info["products"].items()
+                if is_role_eligible_for_product(
+                    product_role_name=prod_info.get("role"),
+                    user_roles_names=user_roles_names,
+                )
+            }
+        except KeyError as err:
+            log.error(
+                "dataset `%s` does not have products defined",
+                dataset_id,
+                exc_info=True,
+            )
+            raise exc.MissingKeyInCatalogEntryError(
+                key="products", dataset=dataset_id
+            ) from err
+        else:
+            if len(eligible_prods) == 0:
+                log.debug(
+                    "no eligible products for dataset `%s` for the role `%s`."
+                    " dataset skipped",
+                    dataset_id,
+                    user_roles_names,
+                )
+            else:
+                dataset_info["products"] = eligible_prods
+                datasets.append(dataset_info)
+    return datasets
+
+
+
+ +
+ + +
+ + + +

+ get_metadata(dataset_id, product_id) + +

+ + +
+ +

Realize the logic for the endpoint:

+

GET /datasets/{dataset_id}/{product_id}/metadata

+

Get metadata for the product.

+

Parameters

+

dataset_id : str + ID of the dataset +product_id : str + ID of the product

+ +
+ Source code in api/app/endpoint_handlers/dataset.py +
157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
+172
+173
+174
+175
+176
@log_execution_time(log)
+@assert_product_exists
+def get_metadata(dataset_id: str, product_id: str):
+    """Realize the logic for the endpoint:
+
+    `GET /datasets/{dataset_id}/{product_id}/metadata`
+
+    Get metadata for the product.
+
+    Parameters
+    ----------
+    dataset_id : str
+        ID of the dataset
+    product_id : str
+        ID of the product
+    """
+    log.debug(
+        "getting metadata for '{dataset_id}.{product_id}'",
+    )
+    return data_store.product_metadata(dataset_id, product_id)
+
+
+
+ +
+ + +
+ + + +

+ get_product_details(user_roles_names, dataset_id, product_id=None) + +

+ + +
+ +

Realize the logic for the endpoint:

+

GET /datasets/{dataset_id}/{product_id}

+

Get details for the given product indicated by dataset_id +and product_id arguments.

+

Parameters

+

user_roles_names : list of str + List of user's roles +dataset_id : str + ID of the dataset +product_id : optional, str + ID of the product. If None the 1st product will be considered

+

Returns

+

details : dict + Details for the given product

+

Raises

+

AuthorizationFailed + If user is not authorized for the resources

+ +
+ Source code in api/app/endpoint_handlers/dataset.py +
104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
@log_execution_time(log)
+@assert_product_exists
+def get_product_details(
+    user_roles_names: list[str],
+    dataset_id: str,
+    product_id: Optional[str] = None,
+) -> dict:
+    """Realize the logic for the endpoint:
+
+    `GET /datasets/{dataset_id}/{product_id}`
+
+    Get details for the given product indicated by `dataset_id`
+    and `product_id` arguments.
+
+    Parameters
+    ----------
+    user_roles_names : list of str
+        List of user's roles
+    dataset_id : str
+        ID of the dataset
+    product_id : optional, str
+        ID of the product. If `None` the 1st product will be considered
+
+    Returns
+    -------
+    details : dict
+        Details for the given product
+
+    Raises
+    -------
+    AuthorizationFailed
+        If user is not authorized for the resources
+    """
+    log.debug(
+        "getting details for eligible products of `%s`",
+        dataset_id,
+    )
+    try:
+        if product_id:
+            return data_store.product_details(
+                dataset_id=dataset_id,
+                product_id=product_id,
+                role=user_roles_names,
+                use_cache=True,
+            )
+        else:
+            return data_store.first_eligible_product_details(
+                dataset_id=dataset_id, role=user_roles_names, use_cache=True
+            )
+    except datastore_exception.UnauthorizedError as err:
+        raise exc.AuthorizationFailed from err
+
+
+
+ +
+ + +
+ + + +

+ run_workflow(user_id, workflow) + +

+ + +
+ +

Realize the logic for the endpoint:

+

POST /datasets/workflow

+

Schedule the workflow and return the ID of the request.

+

Parameters

+

user_id : str + ID of the user executing the query +workflow : Workflow + Workflow to perform

+

Returns

+

request_id : int + ID of the request

+

Raises

+

MaximumAllowedSizeExceededError + if the allowed size is below the estimated one +EmptyDatasetError + if estimated size is zero

+ +
+ Source code in api/app/endpoint_handlers/dataset.py +
371
+372
+373
+374
+375
+376
+377
+378
+379
+380
+381
+382
+383
+384
+385
+386
+387
+388
+389
+390
+391
+392
+393
+394
+395
+396
+397
+398
+399
+400
+401
+402
+403
+404
+405
+406
+407
+408
+409
+410
+411
+412
+413
+414
+415
+416
+417
+418
+419
+420
+421
+422
+423
+424
+425
+426
+427
+428
+429
+430
@log_execution_time(log)
+def run_workflow(
+    user_id: str,
+    workflow: Workflow,
+):
+    """Realize the logic for the endpoint:
+
+    `POST /datasets/workflow`
+
+    Schedule the workflow and return the ID of the request.
+
+    Parameters
+    ----------
+    user_id : str
+        ID of the user executing the query
+    workflow : Workflow
+        Workflow to perform
+
+    Returns
+    -------
+    request_id : int
+        ID of the request
+
+    Raises
+    -------
+    MaximumAllowedSizeExceededError
+        if the allowed size is below the estimated one
+    EmptyDatasetError
+        if estimated size is zero
+
+    """
+    log.debug("geoquery: %s", workflow)
+    broker_conn = pika.BlockingConnection(
+        pika.ConnectionParameters(
+            host=os.getenv("BROKER_SERVICE_HOST", "broker")
+        )
+    )
+    broker_channel = broker_conn.channel()
+    request_id = DBManager().create_request(
+        user_id=user_id,
+        dataset=workflow.dataset_id,
+        product=workflow.product_id,
+        query=workflow.json(),
+    )
+
+    # TODO: find a separator; for the moment use "\"
+    message = MESSAGE_SEPARATOR.join(
+        [str(request_id), "workflow", workflow.json()]
+    )
+
+    broker_channel.basic_publish(
+        exchange="",
+        routing_key="query_queue",
+        body=message,
+        properties=pika.BasicProperties(
+            delivery_mode=2,  # make message persistent
+        ),
+    )
+    broker_conn.close()
+    return request_id
+
+
+
+ +
+ + +
+ + + +

+ sync_query(user_id, dataset_id, product_id, query) + +

+ + +
+ +

Realize the logic for the endpoint:

+

POST /datasets/{dataset_id}/{product_id}/execute

+

Query the data and return the result of the request.

+

Parameters

+

user_id : str + ID of the user executing the query +dataset_id : str + ID of the dataset +product_id : str + ID of the product +query : GeoQuery + Query to perform

+

Returns

+

request_id : int + ID of the request

+

Raises

+

MaximumAllowedSizeExceededError + if the allowed size is below the estimated one +EmptyDatasetError + if estimated size is zero

+ +
+ Source code in api/app/endpoint_handlers/dataset.py +
308
+309
+310
+311
+312
+313
+314
+315
+316
+317
+318
+319
+320
+321
+322
+323
+324
+325
+326
+327
+328
+329
+330
+331
+332
+333
+334
+335
+336
+337
+338
+339
+340
+341
+342
+343
+344
+345
+346
+347
+348
+349
+350
+351
+352
+353
+354
+355
+356
+357
+358
+359
+360
+361
+362
+363
+364
+365
+366
+367
+368
@log_execution_time(log)
+@assert_product_exists
+def sync_query(
+    user_id: str,
+    dataset_id: str,
+    product_id: str,
+    query: GeoQuery,
+):
+    """Realize the logic for the endpoint:
+
+    `POST /datasets/{dataset_id}/{product_id}/execute`
+
+    Query the data and return the result of the request.
+
+    Parameters
+    ----------
+    user_id : str
+        ID of the user executing the query
+    dataset_id : str
+        ID of the dataset
+    product_id : str
+        ID of the product
+    query : GeoQuery
+        Query to perform
+
+    Returns
+    -------
+    request_id : int
+        ID of the request
+
+    Raises
+    -------
+    MaximumAllowedSizeExceededError
+        if the allowed size is below the estimated one
+    EmptyDatasetError
+        if estimated size is zero
+
+    """
+
+    import time
+    request_id = async_query(user_id, dataset_id, product_id, query)
+    status, _ = DBManager().get_request_status_and_reason(request_id)
+    log.debug("sync query: status: %s", status)
+    while status in (RequestStatus.RUNNING, RequestStatus.QUEUED, 
+                     RequestStatus.PENDING):
+        time.sleep(1)
+        status, _ = DBManager().get_request_status_and_reason(request_id)
+        log.debug("sync query: status: %s", status)
+
+    if status is RequestStatus.DONE:
+        download_details = DBManager().get_download_details_for_request_id(
+                request_id
+        )
+        return FileResponse(
+            path=download_details.location_path,
+            filename=download_details.location_path.split(os.sep)[-1],
+        )
+    raise exc.ProductRetrievingError(
+        dataset_id=dataset_id, 
+        product_id=product_id,
+        status=status.name)
+
+
+
+ +
+ + + +
+ +
+ +
+ +
+ + + + +
+ +

Module with functions to handle file related endpoints

+ + + +
+ + + + + + + + + + +
+ + + +

+ download_request_result(request_id) + +

+ + +
+ +

Realize the logic for the endpoint:

+

GET /download/{request_id}

+

Get location path of the file being the result of +the request with request_id.

+

Parameters

+

request_id : int + ID of the request

+

Returns

+

path : str + The location of the resulting file

+

Raises

+

RequestNotYetAccomplished + If geolake request was not yet finished +FileNotFoundError + If file was not found

+ +
+ Source code in api/app/endpoint_handlers/file.py +
14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
@log_execution_time(log)
+def download_request_result(request_id: int):
+    """Realize the logic for the endpoint:
+
+    `GET /download/{request_id}`
+
+    Get location path of the file being the result of
+    the request with `request_id`.
+
+    Parameters
+    ----------
+    request_id : int
+        ID of the request
+
+    Returns
+    -------
+    path : str
+        The location of the resulting file
+
+    Raises
+    -------
+    RequestNotYetAccomplished
+        If geolake request was not yet finished
+    FileNotFoundError
+        If file was not found
+    """
+    log.debug(
+        "preparing downloads for request id: %s",
+        request_id,
+    )
+    (
+        request_status,
+        _,
+    ) = DBManager().get_request_status_and_reason(request_id=request_id)
+    if request_status is not RequestStatus.DONE:
+        log.debug(
+            "request with id: '%s' does not exist or it is not finished yet!",
+            request_id,
+        )
+        raise exc.RequestNotYetAccomplished(request_id=request_id)
+    download_details = DBManager().get_download_details_for_request(
+        request_id=request_id
+    )
+    if not os.path.exists(download_details.location_path):
+        log.error(
+            "file '%s' does not exists!",
+            download_details.location_path,
+        )
+        raise FileNotFoundError
+    return FileResponse(
+        path=download_details.location_path,
+        filename=download_details.location_path.split(os.sep)[-1],
+    )
+
+
+
+ +
+ + + +
+ +
+ +
+ +
+ + + + +
+ +

Modules with functions realizing logic for requests-related endpoints

+ + + +
+ + + + + + + + + + +
+ + + +

+ get_request_resulting_size(request_id) + +

+ + +
+ +

Realize the logic for the endpoint:

+

GET /requests/{request_id}/size

+

Get size of the file being the result of the request with request_id

+

Parameters

+

request_id : int + ID of the request

+

Returns

+

size : int + Size in bytes

+

Raises

+

RequestNotFound + If the request was not found

+ +
+ Source code in api/app/endpoint_handlers/request.py +
65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
@log_execution_time(log)
+def get_request_resulting_size(request_id: int):
+    """Realize the logic for the endpoint:
+
+    `GET /requests/{request_id}/size`
+
+    Get size of the file being the result of the request with `request_id`
+
+    Parameters
+    ----------
+    request_id : int
+        ID of the request
+
+    Returns
+    -------
+    size : int
+        Size in bytes
+
+    Raises
+    -------
+    RequestNotFound
+        If the request was not found
+    """
+    if request := DBManager().get_request_details(request_id):
+        size = request.download.size_bytes
+        if not size or size == 0:
+            raise exc.EmptyDatasetError(dataset_id=request.dataset, 
+                                        product_id=request.product)
+        return size
+    log.info(
+        "request with id '%s' could not be found",
+        request_id,
+    )
+    raise exc.RequestNotFound(request_id=request_id)
+
+
+
+ +
+ + +
+ + + +

+ get_request_status(user_id, request_id) + +

+ + +
+ +

Realize the logic for the endpoint:

+

GET /requests/{request_id}/status

+

Get request status and the reason of the eventual fail. +The second item is None, it status is other than failed.

+

Parameters

+

user_id : str + ID of the user whose request's status is about to be checed +request_id : int + ID of the request

+

Returns

+

status : tuple + Tuple of status and fail reason.

+ +
+ Source code in api/app/endpoint_handlers/request.py +
32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
@log_execution_time(log)
+def get_request_status(user_id: str, request_id: int):
+    """Realize the logic for the endpoint:
+
+    `GET /requests/{request_id}/status`
+
+    Get request status and the reason of the eventual fail.
+    The second item is `None`, it status is other than failed.
+
+    Parameters
+    ----------
+    user_id : str
+        ID of the user whose request's status is about to be checed
+    request_id : int
+        ID of the request
+
+    Returns
+    -------
+    status : tuple
+        Tuple of status and fail reason.
+    """
+    # NOTE: maybe verification should be added if user checks only him\her requests
+    try:
+        status, reason = DBManager().get_request_status_and_reason(request_id)
+    except IndexError as err:
+        log.error(
+            "request with id: '%s' was not found!",
+            request_id,
+        )
+        raise exc.RequestNotFound(request_id=request_id) from err
+    return {"status": status.name, "fail_reason": reason}
+
+
+
+ +
+ + +
+ + + +

+ get_request_uri(request_id) + +

+ + +
+ +

Realize the logic for the endpoint:

+

GET /requests/{request_id}/uri

+

Get URI for the request.

+

Parameters

+

request_id : int + ID of the request

+

Returns

+

uri : str + URI for the download associated with the given request

+ +
+ Source code in api/app/endpoint_handlers/request.py +
101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
@log_execution_time(log)
+def get_request_uri(request_id: int):
+    """
+    Realize the logic for the endpoint:
+
+    `GET /requests/{request_id}/uri`
+
+    Get URI for the request.
+
+    Parameters
+    ----------
+    request_id : int
+        ID of the request
+
+    Returns
+    -------
+    uri : str
+        URI for the download associated with the given request
+    """
+    try:
+        download_details = DBManager().get_download_details_for_request_id(
+            request_id
+        )
+    except IndexError as err:
+        log.error(
+            "request with id: '%s' was not found!",
+            request_id,
+        )
+        raise exc.RequestNotFound(request_id=request_id) from err
+    if download_details is None:
+        (
+            request_status,
+            _,
+        ) = DBManager().get_request_status_and_reason(request_id)
+        log.info(
+            "download URI not found for request id: '%s'."
+            " Request status is '%s'",
+            request_id,
+            request_status,
+        )
+        raise exc.RequestStatusNotDone(
+            request_id=request_id, request_status=request_status
+        )
+    return download_details.download_uri
+
+
+
+ +
+ + +
+ + + +

+ get_requests(user_id) + +

+ + +
+ +

Realize the logic for the endpoint:

+

GET /requests

+

Get details of all requests for the user.

+

Parameters

+

user_id : str + ID of the user for whom requests are taken

+

Returns

+

requests : list + List of all requests done by the user

+ +
+ Source code in api/app/endpoint_handlers/request.py +
11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
@log_execution_time(log)
+def get_requests(user_id: str):
+    """Realize the logic for the endpoint:
+
+    `GET /requests`
+
+    Get details of all requests for the user.
+
+    Parameters
+    ----------
+    user_id : str
+        ID of the user for whom requests are taken
+
+    Returns
+    -------
+    requests : list
+        List of all requests done by the user
+    """
+    return DBManager().get_requests_for_user_id(user_id=user_id)
+
+
+
+ +
+ + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/site/assets/_mkdocstrings.css b/site/assets/_mkdocstrings.css new file mode 100644 index 0000000..4b7d98b --- /dev/null +++ b/site/assets/_mkdocstrings.css @@ -0,0 +1,109 @@ + +/* Avoid breaking parameter names, etc. in table cells. */ +.doc-contents td code { + word-break: normal !important; +} + +/* No line break before first paragraph of descriptions. */ +.doc-md-description, +.doc-md-description>p:first-child { + display: inline; +} + +/* Max width for docstring sections tables. */ +.doc .md-typeset__table, +.doc .md-typeset__table table { + display: table !important; + width: 100%; +} + +.doc .md-typeset__table tr { + display: table-row; +} + +/* Defaults in Spacy table style. */ +.doc-param-default { + float: right; +} + +/* Symbols in Navigation and ToC. */ +:root, +[data-md-color-scheme="default"] { + --doc-symbol-attribute-fg-color: #953800; + --doc-symbol-function-fg-color: #8250df; + --doc-symbol-method-fg-color: #8250df; + --doc-symbol-class-fg-color: #0550ae; + --doc-symbol-module-fg-color: #5cad0f; + + --doc-symbol-attribute-bg-color: #9538001a; + --doc-symbol-function-bg-color: #8250df1a; + --doc-symbol-method-bg-color: #8250df1a; + --doc-symbol-class-bg-color: #0550ae1a; + --doc-symbol-module-bg-color: #5cad0f1a; +} + +[data-md-color-scheme="slate"] { + --doc-symbol-attribute-fg-color: #ffa657; + --doc-symbol-function-fg-color: #d2a8ff; + --doc-symbol-method-fg-color: #d2a8ff; + --doc-symbol-class-fg-color: #79c0ff; + --doc-symbol-module-fg-color: #baff79; + + --doc-symbol-attribute-bg-color: #ffa6571a; + --doc-symbol-function-bg-color: #d2a8ff1a; + --doc-symbol-method-bg-color: #d2a8ff1a; + --doc-symbol-class-bg-color: #79c0ff1a; + --doc-symbol-module-bg-color: #baff791a; +} + +code.doc-symbol { + border-radius: .1rem; + font-size: .85em; + padding: 0 .3em; + font-weight: bold; +} + +code.doc-symbol-attribute { + color: var(--doc-symbol-attribute-fg-color); + background-color: var(--doc-symbol-attribute-bg-color); +} + +code.doc-symbol-attribute::after { + content: "attr"; +} + +code.doc-symbol-function { + color: var(--doc-symbol-function-fg-color); + background-color: var(--doc-symbol-function-bg-color); +} + +code.doc-symbol-function::after { + content: "func"; +} + +code.doc-symbol-method { + color: var(--doc-symbol-method-fg-color); + background-color: var(--doc-symbol-method-bg-color); +} + +code.doc-symbol-method::after { + content: "meth"; +} + +code.doc-symbol-class { + color: var(--doc-symbol-class-fg-color); + background-color: var(--doc-symbol-class-bg-color); +} + +code.doc-symbol-class::after { + content: "class"; +} + +code.doc-symbol-module { + color: var(--doc-symbol-module-fg-color); + background-color: var(--doc-symbol-module-bg-color); +} + +code.doc-symbol-module::after { + content: "mod"; +} \ No newline at end of file diff --git a/site/assets/images/favicon.png b/site/assets/images/favicon.png new file mode 100644 index 0000000000000000000000000000000000000000..1cf13b9f9d978896599290a74f77d5dbe7d1655c GIT binary patch literal 1870 zcmV-U2eJ5xP)Gc)JR9QMau)O=X#!i9;T z37kk-upj^(fsR36MHs_+1RCI)NNu9}lD0S{B^g8PN?Ww(5|~L#Ng*g{WsqleV}|#l zz8@ri&cTzw_h33bHI+12+kK6WN$h#n5cD8OQt`5kw6p~9H3()bUQ8OS4Q4HTQ=1Ol z_JAocz`fLbT2^{`8n~UAo=#AUOf=SOq4pYkt;XbC&f#7lb$*7=$na!mWCQ`dBQsO0 zLFBSPj*N?#u5&pf2t4XjEGH|=pPQ8xh7tpx;US5Cx_Ju;!O`ya-yF`)b%TEt5>eP1ZX~}sjjA%FJF?h7cX8=b!DZl<6%Cv z*G0uvvU+vmnpLZ2paivG-(cd*y3$hCIcsZcYOGh{$&)A6*XX&kXZd3G8m)G$Zz-LV z^GF3VAW^Mdv!)4OM8EgqRiz~*Cji;uzl2uC9^=8I84vNp;ltJ|q-*uQwGp2ma6cY7 z;`%`!9UXO@fr&Ebapfs34OmS9^u6$)bJxrucutf>`dKPKT%%*d3XlFVKunp9 zasduxjrjs>f8V=D|J=XNZp;_Zy^WgQ$9WDjgY=z@stwiEBm9u5*|34&1Na8BMjjgf3+SHcr`5~>oz1Y?SW^=K z^bTyO6>Gar#P_W2gEMwq)ot3; zREHn~U&Dp0l6YT0&k-wLwYjb?5zGK`W6S2v+K>AM(95m2C20L|3m~rN8dprPr@t)5lsk9Hu*W z?pS990s;Ez=+Rj{x7p``4>+c0G5^pYnB1^!TL=(?HLHZ+HicG{~4F1d^5Awl_2!1jICM-!9eoLhbbT^;yHcefyTAaqRcY zmuctDopPT!%k+}x%lZRKnzykr2}}XfG_ne?nRQO~?%hkzo;@RN{P6o`&mMUWBYMTe z6i8ChtjX&gXl`nvrU>jah)2iNM%JdjqoaeaU%yVn!^70x-flljp6Q5tK}5}&X8&&G zX3fpb3E(!rH=zVI_9Gjl45w@{(ITqngWFe7@9{mX;tO25Z_8 zQHEpI+FkTU#4xu>RkN>b3Tnc3UpWzPXWm#o55GKF09j^Mh~)K7{QqbO_~(@CVq! zS<8954|P8mXN2MRs86xZ&Q4EfM@JB94b=(YGuk)s&^jiSF=t3*oNK3`rD{H`yQ?d; ztE=laAUoZx5?RC8*WKOj`%LXEkgDd>&^Q4M^z`%u0rg-It=hLCVsq!Z%^6eB-OvOT zFZ28TN&cRmgU}Elrnk43)!>Z1FCPL2K$7}gwzIc48NX}#!A1BpJP?#v5wkNprhV** z?Cpalt1oH&{r!o3eSKc&ap)iz2BTn_VV`4>9M^b3;(YY}4>#ML6{~(4mH+?%07*qo IM6N<$f(jP3KmY&$ literal 0 HcmV?d00001 diff --git a/site/assets/javascripts/bundle.7389ff0e.min.js b/site/assets/javascripts/bundle.7389ff0e.min.js new file mode 100644 index 0000000..c7df719 --- /dev/null +++ b/site/assets/javascripts/bundle.7389ff0e.min.js @@ -0,0 +1,29 @@ +"use strict";(()=>{var Mi=Object.create;var gr=Object.defineProperty;var Li=Object.getOwnPropertyDescriptor;var _i=Object.getOwnPropertyNames,Ft=Object.getOwnPropertySymbols,Ai=Object.getPrototypeOf,xr=Object.prototype.hasOwnProperty,ro=Object.prototype.propertyIsEnumerable;var to=(e,t,r)=>t in e?gr(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,P=(e,t)=>{for(var r in t||(t={}))xr.call(t,r)&&to(e,r,t[r]);if(Ft)for(var r of Ft(t))ro.call(t,r)&&to(e,r,t[r]);return e};var oo=(e,t)=>{var r={};for(var o in e)xr.call(e,o)&&t.indexOf(o)<0&&(r[o]=e[o]);if(e!=null&&Ft)for(var o of Ft(e))t.indexOf(o)<0&&ro.call(e,o)&&(r[o]=e[o]);return r};var yr=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports);var Ci=(e,t,r,o)=>{if(t&&typeof t=="object"||typeof t=="function")for(let n of _i(t))!xr.call(e,n)&&n!==r&&gr(e,n,{get:()=>t[n],enumerable:!(o=Li(t,n))||o.enumerable});return e};var jt=(e,t,r)=>(r=e!=null?Mi(Ai(e)):{},Ci(t||!e||!e.__esModule?gr(r,"default",{value:e,enumerable:!0}):r,e));var no=(e,t,r)=>new Promise((o,n)=>{var i=c=>{try{a(r.next(c))}catch(p){n(p)}},s=c=>{try{a(r.throw(c))}catch(p){n(p)}},a=c=>c.done?o(c.value):Promise.resolve(c.value).then(i,s);a((r=r.apply(e,t)).next())});var ao=yr((Er,io)=>{(function(e,t){typeof Er=="object"&&typeof io!="undefined"?t():typeof define=="function"&&define.amd?define(t):t()})(Er,function(){"use strict";function e(r){var o=!0,n=!1,i=null,s={text:!0,search:!0,url:!0,tel:!0,email:!0,password:!0,number:!0,date:!0,month:!0,week:!0,time:!0,datetime:!0,"datetime-local":!0};function a(C){return!!(C&&C!==document&&C.nodeName!=="HTML"&&C.nodeName!=="BODY"&&"classList"in C&&"contains"in C.classList)}function c(C){var ct=C.type,Ve=C.tagName;return!!(Ve==="INPUT"&&s[ct]&&!C.readOnly||Ve==="TEXTAREA"&&!C.readOnly||C.isContentEditable)}function p(C){C.classList.contains("focus-visible")||(C.classList.add("focus-visible"),C.setAttribute("data-focus-visible-added",""))}function l(C){C.hasAttribute("data-focus-visible-added")&&(C.classList.remove("focus-visible"),C.removeAttribute("data-focus-visible-added"))}function f(C){C.metaKey||C.altKey||C.ctrlKey||(a(r.activeElement)&&p(r.activeElement),o=!0)}function u(C){o=!1}function d(C){a(C.target)&&(o||c(C.target))&&p(C.target)}function y(C){a(C.target)&&(C.target.classList.contains("focus-visible")||C.target.hasAttribute("data-focus-visible-added"))&&(n=!0,window.clearTimeout(i),i=window.setTimeout(function(){n=!1},100),l(C.target))}function b(C){document.visibilityState==="hidden"&&(n&&(o=!0),D())}function D(){document.addEventListener("mousemove",J),document.addEventListener("mousedown",J),document.addEventListener("mouseup",J),document.addEventListener("pointermove",J),document.addEventListener("pointerdown",J),document.addEventListener("pointerup",J),document.addEventListener("touchmove",J),document.addEventListener("touchstart",J),document.addEventListener("touchend",J)}function Q(){document.removeEventListener("mousemove",J),document.removeEventListener("mousedown",J),document.removeEventListener("mouseup",J),document.removeEventListener("pointermove",J),document.removeEventListener("pointerdown",J),document.removeEventListener("pointerup",J),document.removeEventListener("touchmove",J),document.removeEventListener("touchstart",J),document.removeEventListener("touchend",J)}function J(C){C.target.nodeName&&C.target.nodeName.toLowerCase()==="html"||(o=!1,Q())}document.addEventListener("keydown",f,!0),document.addEventListener("mousedown",u,!0),document.addEventListener("pointerdown",u,!0),document.addEventListener("touchstart",u,!0),document.addEventListener("visibilitychange",b,!0),D(),r.addEventListener("focus",d,!0),r.addEventListener("blur",y,!0),r.nodeType===Node.DOCUMENT_FRAGMENT_NODE&&r.host?r.host.setAttribute("data-js-focus-visible",""):r.nodeType===Node.DOCUMENT_NODE&&(document.documentElement.classList.add("js-focus-visible"),document.documentElement.setAttribute("data-js-focus-visible",""))}if(typeof window!="undefined"&&typeof document!="undefined"){window.applyFocusVisiblePolyfill=e;var t;try{t=new CustomEvent("focus-visible-polyfill-ready")}catch(r){t=document.createEvent("CustomEvent"),t.initCustomEvent("focus-visible-polyfill-ready",!1,!1,{})}window.dispatchEvent(t)}typeof document!="undefined"&&e(document)})});var Kr=yr((kt,qr)=>{/*! + * clipboard.js v2.0.11 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */(function(t,r){typeof kt=="object"&&typeof qr=="object"?qr.exports=r():typeof define=="function"&&define.amd?define([],r):typeof kt=="object"?kt.ClipboardJS=r():t.ClipboardJS=r()})(kt,function(){return function(){var e={686:function(o,n,i){"use strict";i.d(n,{default:function(){return Oi}});var s=i(279),a=i.n(s),c=i(370),p=i.n(c),l=i(817),f=i.n(l);function u(V){try{return document.execCommand(V)}catch(_){return!1}}var d=function(_){var O=f()(_);return u("cut"),O},y=d;function b(V){var _=document.documentElement.getAttribute("dir")==="rtl",O=document.createElement("textarea");O.style.fontSize="12pt",O.style.border="0",O.style.padding="0",O.style.margin="0",O.style.position="absolute",O.style[_?"right":"left"]="-9999px";var $=window.pageYOffset||document.documentElement.scrollTop;return O.style.top="".concat($,"px"),O.setAttribute("readonly",""),O.value=V,O}var D=function(_,O){var $=b(_);O.container.appendChild($);var N=f()($);return u("copy"),$.remove(),N},Q=function(_){var O=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body},$="";return typeof _=="string"?$=D(_,O):_ instanceof HTMLInputElement&&!["text","search","url","tel","password"].includes(_==null?void 0:_.type)?$=D(_.value,O):($=f()(_),u("copy")),$},J=Q;function C(V){"@babel/helpers - typeof";return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?C=function(O){return typeof O}:C=function(O){return O&&typeof Symbol=="function"&&O.constructor===Symbol&&O!==Symbol.prototype?"symbol":typeof O},C(V)}var ct=function(){var _=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{},O=_.action,$=O===void 0?"copy":O,N=_.container,Y=_.target,ke=_.text;if($!=="copy"&&$!=="cut")throw new Error('Invalid "action" value, use either "copy" or "cut"');if(Y!==void 0)if(Y&&C(Y)==="object"&&Y.nodeType===1){if($==="copy"&&Y.hasAttribute("disabled"))throw new Error('Invalid "target" attribute. Please use "readonly" instead of "disabled" attribute');if($==="cut"&&(Y.hasAttribute("readonly")||Y.hasAttribute("disabled")))throw new Error(`Invalid "target" attribute. You can't cut text from elements with "readonly" or "disabled" attributes`)}else throw new Error('Invalid "target" value, use a valid Element');if(ke)return J(ke,{container:N});if(Y)return $==="cut"?y(Y):J(Y,{container:N})},Ve=ct;function Fe(V){"@babel/helpers - typeof";return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?Fe=function(O){return typeof O}:Fe=function(O){return O&&typeof Symbol=="function"&&O.constructor===Symbol&&O!==Symbol.prototype?"symbol":typeof O},Fe(V)}function vi(V,_){if(!(V instanceof _))throw new TypeError("Cannot call a class as a function")}function eo(V,_){for(var O=0;O<_.length;O++){var $=_[O];$.enumerable=$.enumerable||!1,$.configurable=!0,"value"in $&&($.writable=!0),Object.defineProperty(V,$.key,$)}}function gi(V,_,O){return _&&eo(V.prototype,_),O&&eo(V,O),V}function xi(V,_){if(typeof _!="function"&&_!==null)throw new TypeError("Super expression must either be null or a function");V.prototype=Object.create(_&&_.prototype,{constructor:{value:V,writable:!0,configurable:!0}}),_&&br(V,_)}function br(V,_){return br=Object.setPrototypeOf||function($,N){return $.__proto__=N,$},br(V,_)}function yi(V){var _=Ti();return function(){var $=Rt(V),N;if(_){var Y=Rt(this).constructor;N=Reflect.construct($,arguments,Y)}else N=$.apply(this,arguments);return Ei(this,N)}}function Ei(V,_){return _&&(Fe(_)==="object"||typeof _=="function")?_:wi(V)}function wi(V){if(V===void 0)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return V}function Ti(){if(typeof Reflect=="undefined"||!Reflect.construct||Reflect.construct.sham)return!1;if(typeof Proxy=="function")return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],function(){})),!0}catch(V){return!1}}function Rt(V){return Rt=Object.setPrototypeOf?Object.getPrototypeOf:function(O){return O.__proto__||Object.getPrototypeOf(O)},Rt(V)}function vr(V,_){var O="data-clipboard-".concat(V);if(_.hasAttribute(O))return _.getAttribute(O)}var Si=function(V){xi(O,V);var _=yi(O);function O($,N){var Y;return vi(this,O),Y=_.call(this),Y.resolveOptions(N),Y.listenClick($),Y}return gi(O,[{key:"resolveOptions",value:function(){var N=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{};this.action=typeof N.action=="function"?N.action:this.defaultAction,this.target=typeof N.target=="function"?N.target:this.defaultTarget,this.text=typeof N.text=="function"?N.text:this.defaultText,this.container=Fe(N.container)==="object"?N.container:document.body}},{key:"listenClick",value:function(N){var Y=this;this.listener=p()(N,"click",function(ke){return Y.onClick(ke)})}},{key:"onClick",value:function(N){var Y=N.delegateTarget||N.currentTarget,ke=this.action(Y)||"copy",It=Ve({action:ke,container:this.container,target:this.target(Y),text:this.text(Y)});this.emit(It?"success":"error",{action:ke,text:It,trigger:Y,clearSelection:function(){Y&&Y.focus(),window.getSelection().removeAllRanges()}})}},{key:"defaultAction",value:function(N){return vr("action",N)}},{key:"defaultTarget",value:function(N){var Y=vr("target",N);if(Y)return document.querySelector(Y)}},{key:"defaultText",value:function(N){return vr("text",N)}},{key:"destroy",value:function(){this.listener.destroy()}}],[{key:"copy",value:function(N){var Y=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body};return J(N,Y)}},{key:"cut",value:function(N){return y(N)}},{key:"isSupported",value:function(){var N=arguments.length>0&&arguments[0]!==void 0?arguments[0]:["copy","cut"],Y=typeof N=="string"?[N]:N,ke=!!document.queryCommandSupported;return Y.forEach(function(It){ke=ke&&!!document.queryCommandSupported(It)}),ke}}]),O}(a()),Oi=Si},828:function(o){var n=9;if(typeof Element!="undefined"&&!Element.prototype.matches){var i=Element.prototype;i.matches=i.matchesSelector||i.mozMatchesSelector||i.msMatchesSelector||i.oMatchesSelector||i.webkitMatchesSelector}function s(a,c){for(;a&&a.nodeType!==n;){if(typeof a.matches=="function"&&a.matches(c))return a;a=a.parentNode}}o.exports=s},438:function(o,n,i){var s=i(828);function a(l,f,u,d,y){var b=p.apply(this,arguments);return l.addEventListener(u,b,y),{destroy:function(){l.removeEventListener(u,b,y)}}}function c(l,f,u,d,y){return typeof l.addEventListener=="function"?a.apply(null,arguments):typeof u=="function"?a.bind(null,document).apply(null,arguments):(typeof l=="string"&&(l=document.querySelectorAll(l)),Array.prototype.map.call(l,function(b){return a(b,f,u,d,y)}))}function p(l,f,u,d){return function(y){y.delegateTarget=s(y.target,f),y.delegateTarget&&d.call(l,y)}}o.exports=c},879:function(o,n){n.node=function(i){return i!==void 0&&i instanceof HTMLElement&&i.nodeType===1},n.nodeList=function(i){var s=Object.prototype.toString.call(i);return i!==void 0&&(s==="[object NodeList]"||s==="[object HTMLCollection]")&&"length"in i&&(i.length===0||n.node(i[0]))},n.string=function(i){return typeof i=="string"||i instanceof String},n.fn=function(i){var s=Object.prototype.toString.call(i);return s==="[object Function]"}},370:function(o,n,i){var s=i(879),a=i(438);function c(u,d,y){if(!u&&!d&&!y)throw new Error("Missing required arguments");if(!s.string(d))throw new TypeError("Second argument must be a String");if(!s.fn(y))throw new TypeError("Third argument must be a Function");if(s.node(u))return p(u,d,y);if(s.nodeList(u))return l(u,d,y);if(s.string(u))return f(u,d,y);throw new TypeError("First argument must be a String, HTMLElement, HTMLCollection, or NodeList")}function p(u,d,y){return u.addEventListener(d,y),{destroy:function(){u.removeEventListener(d,y)}}}function l(u,d,y){return Array.prototype.forEach.call(u,function(b){b.addEventListener(d,y)}),{destroy:function(){Array.prototype.forEach.call(u,function(b){b.removeEventListener(d,y)})}}}function f(u,d,y){return a(document.body,u,d,y)}o.exports=c},817:function(o){function n(i){var s;if(i.nodeName==="SELECT")i.focus(),s=i.value;else if(i.nodeName==="INPUT"||i.nodeName==="TEXTAREA"){var a=i.hasAttribute("readonly");a||i.setAttribute("readonly",""),i.select(),i.setSelectionRange(0,i.value.length),a||i.removeAttribute("readonly"),s=i.value}else{i.hasAttribute("contenteditable")&&i.focus();var c=window.getSelection(),p=document.createRange();p.selectNodeContents(i),c.removeAllRanges(),c.addRange(p),s=c.toString()}return s}o.exports=n},279:function(o){function n(){}n.prototype={on:function(i,s,a){var c=this.e||(this.e={});return(c[i]||(c[i]=[])).push({fn:s,ctx:a}),this},once:function(i,s,a){var c=this;function p(){c.off(i,p),s.apply(a,arguments)}return p._=s,this.on(i,p,a)},emit:function(i){var s=[].slice.call(arguments,1),a=((this.e||(this.e={}))[i]||[]).slice(),c=0,p=a.length;for(c;c{"use strict";/*! + * escape-html + * Copyright(c) 2012-2013 TJ Holowaychuk + * Copyright(c) 2015 Andreas Lubbe + * Copyright(c) 2015 Tiancheng "Timothy" Gu + * MIT Licensed + */var Wa=/["'&<>]/;Vn.exports=Ua;function Ua(e){var t=""+e,r=Wa.exec(t);if(!r)return t;var o,n="",i=0,s=0;for(i=r.index;i0&&i[i.length-1])&&(p[0]===6||p[0]===2)){r=0;continue}if(p[0]===3&&(!i||p[1]>i[0]&&p[1]=e.length&&(e=void 0),{value:e&&e[o++],done:!e}}};throw new TypeError(t?"Object is not iterable.":"Symbol.iterator is not defined.")}function z(e,t){var r=typeof Symbol=="function"&&e[Symbol.iterator];if(!r)return e;var o=r.call(e),n,i=[],s;try{for(;(t===void 0||t-- >0)&&!(n=o.next()).done;)i.push(n.value)}catch(a){s={error:a}}finally{try{n&&!n.done&&(r=o.return)&&r.call(o)}finally{if(s)throw s.error}}return i}function K(e,t,r){if(r||arguments.length===2)for(var o=0,n=t.length,i;o1||a(u,d)})})}function a(u,d){try{c(o[u](d))}catch(y){f(i[0][3],y)}}function c(u){u.value instanceof ot?Promise.resolve(u.value.v).then(p,l):f(i[0][2],u)}function p(u){a("next",u)}function l(u){a("throw",u)}function f(u,d){u(d),i.shift(),i.length&&a(i[0][0],i[0][1])}}function po(e){if(!Symbol.asyncIterator)throw new TypeError("Symbol.asyncIterator is not defined.");var t=e[Symbol.asyncIterator],r;return t?t.call(e):(e=typeof be=="function"?be(e):e[Symbol.iterator](),r={},o("next"),o("throw"),o("return"),r[Symbol.asyncIterator]=function(){return this},r);function o(i){r[i]=e[i]&&function(s){return new Promise(function(a,c){s=e[i](s),n(a,c,s.done,s.value)})}}function n(i,s,a,c){Promise.resolve(c).then(function(p){i({value:p,done:a})},s)}}function k(e){return typeof e=="function"}function pt(e){var t=function(o){Error.call(o),o.stack=new Error().stack},r=e(t);return r.prototype=Object.create(Error.prototype),r.prototype.constructor=r,r}var Ut=pt(function(e){return function(r){e(this),this.message=r?r.length+` errors occurred during unsubscription: +`+r.map(function(o,n){return n+1+") "+o.toString()}).join(` + `):"",this.name="UnsubscriptionError",this.errors=r}});function ze(e,t){if(e){var r=e.indexOf(t);0<=r&&e.splice(r,1)}}var je=function(){function e(t){this.initialTeardown=t,this.closed=!1,this._parentage=null,this._finalizers=null}return e.prototype.unsubscribe=function(){var t,r,o,n,i;if(!this.closed){this.closed=!0;var s=this._parentage;if(s)if(this._parentage=null,Array.isArray(s))try{for(var a=be(s),c=a.next();!c.done;c=a.next()){var p=c.value;p.remove(this)}}catch(b){t={error:b}}finally{try{c&&!c.done&&(r=a.return)&&r.call(a)}finally{if(t)throw t.error}}else s.remove(this);var l=this.initialTeardown;if(k(l))try{l()}catch(b){i=b instanceof Ut?b.errors:[b]}var f=this._finalizers;if(f){this._finalizers=null;try{for(var u=be(f),d=u.next();!d.done;d=u.next()){var y=d.value;try{lo(y)}catch(b){i=i!=null?i:[],b instanceof Ut?i=K(K([],z(i)),z(b.errors)):i.push(b)}}}catch(b){o={error:b}}finally{try{d&&!d.done&&(n=u.return)&&n.call(u)}finally{if(o)throw o.error}}}if(i)throw new Ut(i)}},e.prototype.add=function(t){var r;if(t&&t!==this)if(this.closed)lo(t);else{if(t instanceof e){if(t.closed||t._hasParent(this))return;t._addParent(this)}(this._finalizers=(r=this._finalizers)!==null&&r!==void 0?r:[]).push(t)}},e.prototype._hasParent=function(t){var r=this._parentage;return r===t||Array.isArray(r)&&r.includes(t)},e.prototype._addParent=function(t){var r=this._parentage;this._parentage=Array.isArray(r)?(r.push(t),r):r?[r,t]:t},e.prototype._removeParent=function(t){var r=this._parentage;r===t?this._parentage=null:Array.isArray(r)&&ze(r,t)},e.prototype.remove=function(t){var r=this._finalizers;r&&ze(r,t),t instanceof e&&t._removeParent(this)},e.EMPTY=function(){var t=new e;return t.closed=!0,t}(),e}();var Tr=je.EMPTY;function Nt(e){return e instanceof je||e&&"closed"in e&&k(e.remove)&&k(e.add)&&k(e.unsubscribe)}function lo(e){k(e)?e():e.unsubscribe()}var He={onUnhandledError:null,onStoppedNotification:null,Promise:void 0,useDeprecatedSynchronousErrorHandling:!1,useDeprecatedNextContext:!1};var lt={setTimeout:function(e,t){for(var r=[],o=2;o0},enumerable:!1,configurable:!0}),t.prototype._trySubscribe=function(r){return this._throwIfClosed(),e.prototype._trySubscribe.call(this,r)},t.prototype._subscribe=function(r){return this._throwIfClosed(),this._checkFinalizedStatuses(r),this._innerSubscribe(r)},t.prototype._innerSubscribe=function(r){var o=this,n=this,i=n.hasError,s=n.isStopped,a=n.observers;return i||s?Tr:(this.currentObservers=null,a.push(r),new je(function(){o.currentObservers=null,ze(a,r)}))},t.prototype._checkFinalizedStatuses=function(r){var o=this,n=o.hasError,i=o.thrownError,s=o.isStopped;n?r.error(i):s&&r.complete()},t.prototype.asObservable=function(){var r=new I;return r.source=this,r},t.create=function(r,o){return new xo(r,o)},t}(I);var xo=function(e){se(t,e);function t(r,o){var n=e.call(this)||this;return n.destination=r,n.source=o,n}return t.prototype.next=function(r){var o,n;(n=(o=this.destination)===null||o===void 0?void 0:o.next)===null||n===void 0||n.call(o,r)},t.prototype.error=function(r){var o,n;(n=(o=this.destination)===null||o===void 0?void 0:o.error)===null||n===void 0||n.call(o,r)},t.prototype.complete=function(){var r,o;(o=(r=this.destination)===null||r===void 0?void 0:r.complete)===null||o===void 0||o.call(r)},t.prototype._subscribe=function(r){var o,n;return(n=(o=this.source)===null||o===void 0?void 0:o.subscribe(r))!==null&&n!==void 0?n:Tr},t}(x);var St={now:function(){return(St.delegate||Date).now()},delegate:void 0};var Ot=function(e){se(t,e);function t(r,o,n){r===void 0&&(r=1/0),o===void 0&&(o=1/0),n===void 0&&(n=St);var i=e.call(this)||this;return i._bufferSize=r,i._windowTime=o,i._timestampProvider=n,i._buffer=[],i._infiniteTimeWindow=!0,i._infiniteTimeWindow=o===1/0,i._bufferSize=Math.max(1,r),i._windowTime=Math.max(1,o),i}return t.prototype.next=function(r){var o=this,n=o.isStopped,i=o._buffer,s=o._infiniteTimeWindow,a=o._timestampProvider,c=o._windowTime;n||(i.push(r),!s&&i.push(a.now()+c)),this._trimBuffer(),e.prototype.next.call(this,r)},t.prototype._subscribe=function(r){this._throwIfClosed(),this._trimBuffer();for(var o=this._innerSubscribe(r),n=this,i=n._infiniteTimeWindow,s=n._buffer,a=s.slice(),c=0;c0?e.prototype.requestAsyncId.call(this,r,o,n):(r.actions.push(this),r._scheduled||(r._scheduled=ut.requestAnimationFrame(function(){return r.flush(void 0)})))},t.prototype.recycleAsyncId=function(r,o,n){var i;if(n===void 0&&(n=0),n!=null?n>0:this.delay>0)return e.prototype.recycleAsyncId.call(this,r,o,n);var s=r.actions;o!=null&&((i=s[s.length-1])===null||i===void 0?void 0:i.id)!==o&&(ut.cancelAnimationFrame(o),r._scheduled=void 0)},t}(zt);var wo=function(e){se(t,e);function t(){return e!==null&&e.apply(this,arguments)||this}return t.prototype.flush=function(r){this._active=!0;var o=this._scheduled;this._scheduled=void 0;var n=this.actions,i;r=r||n.shift();do if(i=r.execute(r.state,r.delay))break;while((r=n[0])&&r.id===o&&n.shift());if(this._active=!1,i){for(;(r=n[0])&&r.id===o&&n.shift();)r.unsubscribe();throw i}},t}(qt);var ge=new wo(Eo);var M=new I(function(e){return e.complete()});function Kt(e){return e&&k(e.schedule)}function Cr(e){return e[e.length-1]}function Ge(e){return k(Cr(e))?e.pop():void 0}function Ae(e){return Kt(Cr(e))?e.pop():void 0}function Qt(e,t){return typeof Cr(e)=="number"?e.pop():t}var dt=function(e){return e&&typeof e.length=="number"&&typeof e!="function"};function Yt(e){return k(e==null?void 0:e.then)}function Bt(e){return k(e[ft])}function Gt(e){return Symbol.asyncIterator&&k(e==null?void 0:e[Symbol.asyncIterator])}function Jt(e){return new TypeError("You provided "+(e!==null&&typeof e=="object"?"an invalid object":"'"+e+"'")+" where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.")}function Wi(){return typeof Symbol!="function"||!Symbol.iterator?"@@iterator":Symbol.iterator}var Xt=Wi();function Zt(e){return k(e==null?void 0:e[Xt])}function er(e){return co(this,arguments,function(){var r,o,n,i;return Wt(this,function(s){switch(s.label){case 0:r=e.getReader(),s.label=1;case 1:s.trys.push([1,,9,10]),s.label=2;case 2:return[4,ot(r.read())];case 3:return o=s.sent(),n=o.value,i=o.done,i?[4,ot(void 0)]:[3,5];case 4:return[2,s.sent()];case 5:return[4,ot(n)];case 6:return[4,s.sent()];case 7:return s.sent(),[3,2];case 8:return[3,10];case 9:return r.releaseLock(),[7];case 10:return[2]}})})}function tr(e){return k(e==null?void 0:e.getReader)}function F(e){if(e instanceof I)return e;if(e!=null){if(Bt(e))return Ui(e);if(dt(e))return Ni(e);if(Yt(e))return Di(e);if(Gt(e))return To(e);if(Zt(e))return Vi(e);if(tr(e))return zi(e)}throw Jt(e)}function Ui(e){return new I(function(t){var r=e[ft]();if(k(r.subscribe))return r.subscribe(t);throw new TypeError("Provided object does not correctly implement Symbol.observable")})}function Ni(e){return new I(function(t){for(var r=0;r=2;return function(o){return o.pipe(e?v(function(n,i){return e(n,i,o)}):pe,ue(1),r?$e(t):Uo(function(){return new or}))}}function Rr(e){return e<=0?function(){return M}:g(function(t,r){var o=[];t.subscribe(E(r,function(n){o.push(n),e=2,!0))}function de(e){e===void 0&&(e={});var t=e.connector,r=t===void 0?function(){return new x}:t,o=e.resetOnError,n=o===void 0?!0:o,i=e.resetOnComplete,s=i===void 0?!0:i,a=e.resetOnRefCountZero,c=a===void 0?!0:a;return function(p){var l,f,u,d=0,y=!1,b=!1,D=function(){f==null||f.unsubscribe(),f=void 0},Q=function(){D(),l=u=void 0,y=b=!1},J=function(){var C=l;Q(),C==null||C.unsubscribe()};return g(function(C,ct){d++,!b&&!y&&D();var Ve=u=u!=null?u:r();ct.add(function(){d--,d===0&&!b&&!y&&(f=jr(J,c))}),Ve.subscribe(ct),!l&&d>0&&(l=new it({next:function(Fe){return Ve.next(Fe)},error:function(Fe){b=!0,D(),f=jr(Q,n,Fe),Ve.error(Fe)},complete:function(){y=!0,D(),f=jr(Q,s),Ve.complete()}}),F(C).subscribe(l))})(p)}}function jr(e,t){for(var r=[],o=2;oe.next(document)),e}function W(e,t=document){return Array.from(t.querySelectorAll(e))}function U(e,t=document){let r=ce(e,t);if(typeof r=="undefined")throw new ReferenceError(`Missing element: expected "${e}" to be present`);return r}function ce(e,t=document){return t.querySelector(e)||void 0}function Ie(){return document.activeElement instanceof HTMLElement&&document.activeElement||void 0}var ca=L(h(document.body,"focusin"),h(document.body,"focusout")).pipe(ye(1),q(void 0),m(()=>Ie()||document.body),Z(1));function vt(e){return ca.pipe(m(t=>e.contains(t)),X())}function qo(e,t){return L(h(e,"mouseenter").pipe(m(()=>!0)),h(e,"mouseleave").pipe(m(()=>!1))).pipe(t?ye(t):pe,q(!1))}function Ue(e){return{x:e.offsetLeft,y:e.offsetTop}}function Ko(e){return L(h(window,"load"),h(window,"resize")).pipe(Le(0,ge),m(()=>Ue(e)),q(Ue(e)))}function ir(e){return{x:e.scrollLeft,y:e.scrollTop}}function et(e){return L(h(e,"scroll"),h(window,"resize")).pipe(Le(0,ge),m(()=>ir(e)),q(ir(e)))}function Qo(e,t){if(typeof t=="string"||typeof t=="number")e.innerHTML+=t.toString();else if(t instanceof Node)e.appendChild(t);else if(Array.isArray(t))for(let r of t)Qo(e,r)}function S(e,t,...r){let o=document.createElement(e);if(t)for(let n of Object.keys(t))typeof t[n]!="undefined"&&(typeof t[n]!="boolean"?o.setAttribute(n,t[n]):o.setAttribute(n,""));for(let n of r)Qo(o,n);return o}function ar(e){if(e>999){let t=+((e-950)%1e3>99);return`${((e+1e-6)/1e3).toFixed(t)}k`}else return e.toString()}function gt(e){let t=S("script",{src:e});return H(()=>(document.head.appendChild(t),L(h(t,"load"),h(t,"error").pipe(w(()=>kr(()=>new ReferenceError(`Invalid script: ${e}`))))).pipe(m(()=>{}),A(()=>document.head.removeChild(t)),ue(1))))}var Yo=new x,pa=H(()=>typeof ResizeObserver=="undefined"?gt("https://unpkg.com/resize-observer-polyfill"):R(void 0)).pipe(m(()=>new ResizeObserver(e=>{for(let t of e)Yo.next(t)})),w(e=>L(Ke,R(e)).pipe(A(()=>e.disconnect()))),Z(1));function le(e){return{width:e.offsetWidth,height:e.offsetHeight}}function Se(e){return pa.pipe(T(t=>t.observe(e)),w(t=>Yo.pipe(v(({target:r})=>r===e),A(()=>t.unobserve(e)),m(()=>le(e)))),q(le(e)))}function xt(e){return{width:e.scrollWidth,height:e.scrollHeight}}function sr(e){let t=e.parentElement;for(;t&&(e.scrollWidth<=t.scrollWidth&&e.scrollHeight<=t.scrollHeight);)t=(e=t).parentElement;return t?e:void 0}var Bo=new x,la=H(()=>R(new IntersectionObserver(e=>{for(let t of e)Bo.next(t)},{threshold:0}))).pipe(w(e=>L(Ke,R(e)).pipe(A(()=>e.disconnect()))),Z(1));function yt(e){return la.pipe(T(t=>t.observe(e)),w(t=>Bo.pipe(v(({target:r})=>r===e),A(()=>t.unobserve(e)),m(({isIntersecting:r})=>r))))}function Go(e,t=16){return et(e).pipe(m(({y:r})=>{let o=le(e),n=xt(e);return r>=n.height-o.height-t}),X())}var cr={drawer:U("[data-md-toggle=drawer]"),search:U("[data-md-toggle=search]")};function Jo(e){return cr[e].checked}function Ye(e,t){cr[e].checked!==t&&cr[e].click()}function Ne(e){let t=cr[e];return h(t,"change").pipe(m(()=>t.checked),q(t.checked))}function ma(e,t){switch(e.constructor){case HTMLInputElement:return e.type==="radio"?/^Arrow/.test(t):!0;case HTMLSelectElement:case HTMLTextAreaElement:return!0;default:return e.isContentEditable}}function fa(){return L(h(window,"compositionstart").pipe(m(()=>!0)),h(window,"compositionend").pipe(m(()=>!1))).pipe(q(!1))}function Xo(){let e=h(window,"keydown").pipe(v(t=>!(t.metaKey||t.ctrlKey)),m(t=>({mode:Jo("search")?"search":"global",type:t.key,claim(){t.preventDefault(),t.stopPropagation()}})),v(({mode:t,type:r})=>{if(t==="global"){let o=Ie();if(typeof o!="undefined")return!ma(o,r)}return!0}),de());return fa().pipe(w(t=>t?M:e))}function me(){return new URL(location.href)}function st(e,t=!1){if(G("navigation.instant")&&!t){let r=S("a",{href:e.href});document.body.appendChild(r),r.click(),r.remove()}else location.href=e.href}function Zo(){return new x}function en(){return location.hash.slice(1)}function pr(e){let t=S("a",{href:e});t.addEventListener("click",r=>r.stopPropagation()),t.click()}function ua(e){return L(h(window,"hashchange"),e).pipe(m(en),q(en()),v(t=>t.length>0),Z(1))}function tn(e){return ua(e).pipe(m(t=>ce(`[id="${t}"]`)),v(t=>typeof t!="undefined"))}function At(e){let t=matchMedia(e);return nr(r=>t.addListener(()=>r(t.matches))).pipe(q(t.matches))}function rn(){let e=matchMedia("print");return L(h(window,"beforeprint").pipe(m(()=>!0)),h(window,"afterprint").pipe(m(()=>!1))).pipe(q(e.matches))}function Dr(e,t){return e.pipe(w(r=>r?t():M))}function lr(e,t){return new I(r=>{let o=new XMLHttpRequest;o.open("GET",`${e}`),o.responseType="blob",o.addEventListener("load",()=>{o.status>=200&&o.status<300?(r.next(o.response),r.complete()):r.error(new Error(o.statusText))}),o.addEventListener("error",()=>{r.error(new Error("Network Error"))}),o.addEventListener("abort",()=>{r.error(new Error("Request aborted"))}),typeof(t==null?void 0:t.progress$)!="undefined"&&(o.addEventListener("progress",n=>{if(n.lengthComputable)t.progress$.next(n.loaded/n.total*100);else{let i=Number(o.getResponseHeader("Content-Length"))||0;t.progress$.next(n.loaded/i*100)}}),t.progress$.next(5)),o.send()})}function De(e,t){return lr(e,t).pipe(w(r=>r.text()),m(r=>JSON.parse(r)),Z(1))}function on(e,t){let r=new DOMParser;return lr(e,t).pipe(w(o=>o.text()),m(o=>r.parseFromString(o,"text/xml")),Z(1))}function nn(){return{x:Math.max(0,scrollX),y:Math.max(0,scrollY)}}function an(){return L(h(window,"scroll",{passive:!0}),h(window,"resize",{passive:!0})).pipe(m(nn),q(nn()))}function sn(){return{width:innerWidth,height:innerHeight}}function cn(){return h(window,"resize",{passive:!0}).pipe(m(sn),q(sn()))}function pn(){return B([an(),cn()]).pipe(m(([e,t])=>({offset:e,size:t})),Z(1))}function mr(e,{viewport$:t,header$:r}){let o=t.pipe(te("size")),n=B([o,r]).pipe(m(()=>Ue(e)));return B([r,t,n]).pipe(m(([{height:i},{offset:s,size:a},{x:c,y:p}])=>({offset:{x:s.x-c,y:s.y-p+i},size:a})))}function da(e){return h(e,"message",t=>t.data)}function ha(e){let t=new x;return t.subscribe(r=>e.postMessage(r)),t}function ln(e,t=new Worker(e)){let r=da(t),o=ha(t),n=new x;n.subscribe(o);let i=o.pipe(ee(),oe(!0));return n.pipe(ee(),Re(r.pipe(j(i))),de())}var ba=U("#__config"),Et=JSON.parse(ba.textContent);Et.base=`${new URL(Et.base,me())}`;function he(){return Et}function G(e){return Et.features.includes(e)}function we(e,t){return typeof t!="undefined"?Et.translations[e].replace("#",t.toString()):Et.translations[e]}function Oe(e,t=document){return U(`[data-md-component=${e}]`,t)}function ne(e,t=document){return W(`[data-md-component=${e}]`,t)}function va(e){let t=U(".md-typeset > :first-child",e);return h(t,"click",{once:!0}).pipe(m(()=>U(".md-typeset",e)),m(r=>({hash:__md_hash(r.innerHTML)})))}function mn(e){if(!G("announce.dismiss")||!e.childElementCount)return M;if(!e.hidden){let t=U(".md-typeset",e);__md_hash(t.innerHTML)===__md_get("__announce")&&(e.hidden=!0)}return H(()=>{let t=new x;return t.subscribe(({hash:r})=>{e.hidden=!0,__md_set("__announce",r)}),va(e).pipe(T(r=>t.next(r)),A(()=>t.complete()),m(r=>P({ref:e},r)))})}function ga(e,{target$:t}){return t.pipe(m(r=>({hidden:r!==e})))}function fn(e,t){let r=new x;return r.subscribe(({hidden:o})=>{e.hidden=o}),ga(e,t).pipe(T(o=>r.next(o)),A(()=>r.complete()),m(o=>P({ref:e},o)))}function Ct(e,t){return t==="inline"?S("div",{class:"md-tooltip md-tooltip--inline",id:e,role:"tooltip"},S("div",{class:"md-tooltip__inner md-typeset"})):S("div",{class:"md-tooltip",id:e,role:"tooltip"},S("div",{class:"md-tooltip__inner md-typeset"}))}function un(e,t){if(t=t?`${t}_annotation_${e}`:void 0,t){let r=t?`#${t}`:void 0;return S("aside",{class:"md-annotation",tabIndex:0},Ct(t),S("a",{href:r,class:"md-annotation__index",tabIndex:-1},S("span",{"data-md-annotation-id":e})))}else return S("aside",{class:"md-annotation",tabIndex:0},Ct(t),S("span",{class:"md-annotation__index",tabIndex:-1},S("span",{"data-md-annotation-id":e})))}function dn(e){return S("button",{class:"md-clipboard md-icon",title:we("clipboard.copy"),"data-clipboard-target":`#${e} > code`})}function Vr(e,t){let r=t&2,o=t&1,n=Object.keys(e.terms).filter(c=>!e.terms[c]).reduce((c,p)=>[...c,S("del",null,p)," "],[]).slice(0,-1),i=he(),s=new URL(e.location,i.base);G("search.highlight")&&s.searchParams.set("h",Object.entries(e.terms).filter(([,c])=>c).reduce((c,[p])=>`${c} ${p}`.trim(),""));let{tags:a}=he();return S("a",{href:`${s}`,class:"md-search-result__link",tabIndex:-1},S("article",{class:"md-search-result__article md-typeset","data-md-score":e.score.toFixed(2)},r>0&&S("div",{class:"md-search-result__icon md-icon"}),r>0&&S("h1",null,e.title),r<=0&&S("h2",null,e.title),o>0&&e.text.length>0&&e.text,e.tags&&e.tags.map(c=>{let p=a?c in a?`md-tag-icon md-tag--${a[c]}`:"md-tag-icon":"";return S("span",{class:`md-tag ${p}`},c)}),o>0&&n.length>0&&S("p",{class:"md-search-result__terms"},we("search.result.term.missing"),": ",...n)))}function hn(e){let t=e[0].score,r=[...e],o=he(),n=r.findIndex(l=>!`${new URL(l.location,o.base)}`.includes("#")),[i]=r.splice(n,1),s=r.findIndex(l=>l.scoreVr(l,1)),...c.length?[S("details",{class:"md-search-result__more"},S("summary",{tabIndex:-1},S("div",null,c.length>0&&c.length===1?we("search.result.more.one"):we("search.result.more.other",c.length))),...c.map(l=>Vr(l,1)))]:[]];return S("li",{class:"md-search-result__item"},p)}function bn(e){return S("ul",{class:"md-source__facts"},Object.entries(e).map(([t,r])=>S("li",{class:`md-source__fact md-source__fact--${t}`},typeof r=="number"?ar(r):r)))}function zr(e){let t=`tabbed-control tabbed-control--${e}`;return S("div",{class:t,hidden:!0},S("button",{class:"tabbed-button",tabIndex:-1,"aria-hidden":"true"}))}function vn(e){return S("div",{class:"md-typeset__scrollwrap"},S("div",{class:"md-typeset__table"},e))}function xa(e){let t=he(),r=new URL(`../${e.version}/`,t.base);return S("li",{class:"md-version__item"},S("a",{href:`${r}`,class:"md-version__link"},e.title))}function gn(e,t){return S("div",{class:"md-version"},S("button",{class:"md-version__current","aria-label":we("select.version")},t.title),S("ul",{class:"md-version__list"},e.map(xa)))}var ya=0;function Ea(e,t){document.body.append(e);let{width:r}=le(e);e.style.setProperty("--md-tooltip-width",`${r}px`),e.remove();let o=sr(t),n=typeof o!="undefined"?et(o):R({x:0,y:0}),i=L(vt(t),qo(t)).pipe(X());return B([i,n]).pipe(m(([s,a])=>{let{x:c,y:p}=Ue(t),l=le(t),f=t.closest("table");return f&&t.parentElement&&(c+=f.offsetLeft+t.parentElement.offsetLeft,p+=f.offsetTop+t.parentElement.offsetTop),{active:s,offset:{x:c-a.x+l.width/2-r/2,y:p-a.y+l.height+8}}}))}function Be(e){let t=e.title;if(!t.length)return M;let r=`__tooltip_${ya++}`,o=Ct(r,"inline"),n=U(".md-typeset",o);return n.innerHTML=t,H(()=>{let i=new x;return i.subscribe({next({offset:s}){o.style.setProperty("--md-tooltip-x",`${s.x}px`),o.style.setProperty("--md-tooltip-y",`${s.y}px`)},complete(){o.style.removeProperty("--md-tooltip-x"),o.style.removeProperty("--md-tooltip-y")}}),L(i.pipe(v(({active:s})=>s)),i.pipe(ye(250),v(({active:s})=>!s))).subscribe({next({active:s}){s?(e.insertAdjacentElement("afterend",o),e.setAttribute("aria-describedby",r),e.removeAttribute("title")):(o.remove(),e.removeAttribute("aria-describedby"),e.setAttribute("title",t))},complete(){o.remove(),e.removeAttribute("aria-describedby"),e.setAttribute("title",t)}}),i.pipe(Le(16,ge)).subscribe(({active:s})=>{o.classList.toggle("md-tooltip--active",s)}),i.pipe(_t(125,ge),v(()=>!!e.offsetParent),m(()=>e.offsetParent.getBoundingClientRect()),m(({x:s})=>s)).subscribe({next(s){s?o.style.setProperty("--md-tooltip-0",`${-s}px`):o.style.removeProperty("--md-tooltip-0")},complete(){o.style.removeProperty("--md-tooltip-0")}}),Ea(o,e).pipe(T(s=>i.next(s)),A(()=>i.complete()),m(s=>P({ref:e},s)))}).pipe(qe(ie))}function wa(e,t){let r=H(()=>B([Ko(e),et(t)])).pipe(m(([{x:o,y:n},i])=>{let{width:s,height:a}=le(e);return{x:o-i.x+s/2,y:n-i.y+a/2}}));return vt(e).pipe(w(o=>r.pipe(m(n=>({active:o,offset:n})),ue(+!o||1/0))))}function xn(e,t,{target$:r}){let[o,n]=Array.from(e.children);return H(()=>{let i=new x,s=i.pipe(ee(),oe(!0));return i.subscribe({next({offset:a}){e.style.setProperty("--md-tooltip-x",`${a.x}px`),e.style.setProperty("--md-tooltip-y",`${a.y}px`)},complete(){e.style.removeProperty("--md-tooltip-x"),e.style.removeProperty("--md-tooltip-y")}}),yt(e).pipe(j(s)).subscribe(a=>{e.toggleAttribute("data-md-visible",a)}),L(i.pipe(v(({active:a})=>a)),i.pipe(ye(250),v(({active:a})=>!a))).subscribe({next({active:a}){a?e.prepend(o):o.remove()},complete(){e.prepend(o)}}),i.pipe(Le(16,ge)).subscribe(({active:a})=>{o.classList.toggle("md-tooltip--active",a)}),i.pipe(_t(125,ge),v(()=>!!e.offsetParent),m(()=>e.offsetParent.getBoundingClientRect()),m(({x:a})=>a)).subscribe({next(a){a?e.style.setProperty("--md-tooltip-0",`${-a}px`):e.style.removeProperty("--md-tooltip-0")},complete(){e.style.removeProperty("--md-tooltip-0")}}),h(n,"click").pipe(j(s),v(a=>!(a.metaKey||a.ctrlKey))).subscribe(a=>{a.stopPropagation(),a.preventDefault()}),h(n,"mousedown").pipe(j(s),ae(i)).subscribe(([a,{active:c}])=>{var p;if(a.button!==0||a.metaKey||a.ctrlKey)a.preventDefault();else if(c){a.preventDefault();let l=e.parentElement.closest(".md-annotation");l instanceof HTMLElement?l.focus():(p=Ie())==null||p.blur()}}),r.pipe(j(s),v(a=>a===o),Qe(125)).subscribe(()=>e.focus()),wa(e,t).pipe(T(a=>i.next(a)),A(()=>i.complete()),m(a=>P({ref:e},a)))})}function Ta(e){return e.tagName==="CODE"?W(".c, .c1, .cm",e):[e]}function Sa(e){let t=[];for(let r of Ta(e)){let o=[],n=document.createNodeIterator(r,NodeFilter.SHOW_TEXT);for(let i=n.nextNode();i;i=n.nextNode())o.push(i);for(let i of o){let s;for(;s=/(\(\d+\))(!)?/.exec(i.textContent);){let[,a,c]=s;if(typeof c=="undefined"){let p=i.splitText(s.index);i=p.splitText(a.length),t.push(p)}else{i.textContent=a,t.push(i);break}}}}return t}function yn(e,t){t.append(...Array.from(e.childNodes))}function fr(e,t,{target$:r,print$:o}){let n=t.closest("[id]"),i=n==null?void 0:n.id,s=new Map;for(let a of Sa(t)){let[,c]=a.textContent.match(/\((\d+)\)/);ce(`:scope > li:nth-child(${c})`,e)&&(s.set(c,un(c,i)),a.replaceWith(s.get(c)))}return s.size===0?M:H(()=>{let a=new x,c=a.pipe(ee(),oe(!0)),p=[];for(let[l,f]of s)p.push([U(".md-typeset",f),U(`:scope > li:nth-child(${l})`,e)]);return o.pipe(j(c)).subscribe(l=>{e.hidden=!l,e.classList.toggle("md-annotation-list",l);for(let[f,u]of p)l?yn(f,u):yn(u,f)}),L(...[...s].map(([,l])=>xn(l,t,{target$:r}))).pipe(A(()=>a.complete()),de())})}function En(e){if(e.nextElementSibling){let t=e.nextElementSibling;if(t.tagName==="OL")return t;if(t.tagName==="P"&&!t.children.length)return En(t)}}function wn(e,t){return H(()=>{let r=En(e);return typeof r!="undefined"?fr(r,e,t):M})}var Tn=jt(Kr());var Oa=0;function Sn(e){if(e.nextElementSibling){let t=e.nextElementSibling;if(t.tagName==="OL")return t;if(t.tagName==="P"&&!t.children.length)return Sn(t)}}function Ma(e){return Se(e).pipe(m(({width:t})=>({scrollable:xt(e).width>t})),te("scrollable"))}function On(e,t){let{matches:r}=matchMedia("(hover)"),o=H(()=>{let n=new x,i=n.pipe(Rr(1));n.subscribe(({scrollable:c})=>{c&&r?e.setAttribute("tabindex","0"):e.removeAttribute("tabindex")});let s=[];if(Tn.default.isSupported()&&(e.closest(".copy")||G("content.code.copy")&&!e.closest(".no-copy"))){let c=e.closest("pre");c.id=`__code_${Oa++}`;let p=dn(c.id);c.insertBefore(p,e),G("content.tooltips")&&s.push(Be(p))}let a=e.closest(".highlight");if(a instanceof HTMLElement){let c=Sn(a);if(typeof c!="undefined"&&(a.classList.contains("annotate")||G("content.code.annotate"))){let p=fr(c,e,t);s.push(Se(a).pipe(j(i),m(({width:l,height:f})=>l&&f),X(),w(l=>l?p:M)))}}return Ma(e).pipe(T(c=>n.next(c)),A(()=>n.complete()),m(c=>P({ref:e},c)),Re(...s))});return G("content.lazy")?yt(e).pipe(v(n=>n),ue(1),w(()=>o)):o}function La(e,{target$:t,print$:r}){let o=!0;return L(t.pipe(m(n=>n.closest("details:not([open])")),v(n=>e===n),m(()=>({action:"open",reveal:!0}))),r.pipe(v(n=>n||!o),T(()=>o=e.open),m(n=>({action:n?"open":"close"}))))}function Mn(e,t){return H(()=>{let r=new x;return r.subscribe(({action:o,reveal:n})=>{e.toggleAttribute("open",o==="open"),n&&e.scrollIntoView()}),La(e,t).pipe(T(o=>r.next(o)),A(()=>r.complete()),m(o=>P({ref:e},o)))})}var Ln=".node circle,.node ellipse,.node path,.node polygon,.node rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}marker{fill:var(--md-mermaid-edge-color)!important}.edgeLabel .label rect{fill:#0000}.label{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.label foreignObject{line-height:normal;overflow:visible}.label div .edgeLabel{color:var(--md-mermaid-label-fg-color)}.edgeLabel,.edgeLabel rect,.label div .edgeLabel{background-color:var(--md-mermaid-label-bg-color)}.edgeLabel,.edgeLabel rect{fill:var(--md-mermaid-label-bg-color);color:var(--md-mermaid-edge-color)}.edgePath .path,.flowchart-link{stroke:var(--md-mermaid-edge-color);stroke-width:.05rem}.edgePath .arrowheadPath{fill:var(--md-mermaid-edge-color);stroke:none}.cluster rect{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}.cluster span{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}g #flowchart-circleEnd,g #flowchart-circleStart,g #flowchart-crossEnd,g #flowchart-crossStart,g #flowchart-pointEnd,g #flowchart-pointStart{stroke:none}g.classGroup line,g.classGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.classGroup text{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.classLabel .box{fill:var(--md-mermaid-label-bg-color);background-color:var(--md-mermaid-label-bg-color);opacity:1}.classLabel .label{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.node .divider{stroke:var(--md-mermaid-node-fg-color)}.relation{stroke:var(--md-mermaid-edge-color)}.cardinality{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.cardinality text{fill:inherit!important}defs #classDiagram-compositionEnd,defs #classDiagram-compositionStart,defs #classDiagram-dependencyEnd,defs #classDiagram-dependencyStart,defs #classDiagram-extensionEnd,defs #classDiagram-extensionStart{fill:var(--md-mermaid-edge-color)!important;stroke:var(--md-mermaid-edge-color)!important}defs #classDiagram-aggregationEnd,defs #classDiagram-aggregationStart{fill:var(--md-mermaid-label-bg-color)!important;stroke:var(--md-mermaid-edge-color)!important}g.stateGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.stateGroup .state-title{fill:var(--md-mermaid-label-fg-color)!important;font-family:var(--md-mermaid-font-family)}g.stateGroup .composit{fill:var(--md-mermaid-label-bg-color)}.nodeLabel{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.node circle.state-end,.node circle.state-start,.start-state{fill:var(--md-mermaid-edge-color);stroke:none}.end-state-inner,.end-state-outer{fill:var(--md-mermaid-edge-color)}.end-state-inner,.node circle.state-end{stroke:var(--md-mermaid-label-bg-color)}.transition{stroke:var(--md-mermaid-edge-color)}[id^=state-fork] rect,[id^=state-join] rect{fill:var(--md-mermaid-edge-color)!important;stroke:none!important}.statediagram-cluster.statediagram-cluster .inner{fill:var(--md-default-bg-color)}.statediagram-cluster rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}.statediagram-state rect.divider{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}defs #statediagram-barbEnd{stroke:var(--md-mermaid-edge-color)}.attributeBoxEven,.attributeBoxOdd{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}.entityBox{fill:var(--md-mermaid-label-bg-color);stroke:var(--md-mermaid-node-fg-color)}.entityLabel{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.relationshipLabelBox{fill:var(--md-mermaid-label-bg-color);fill-opacity:1;background-color:var(--md-mermaid-label-bg-color);opacity:1}.relationshipLabel{fill:var(--md-mermaid-label-fg-color)}.relationshipLine{stroke:var(--md-mermaid-edge-color)}defs #ONE_OR_MORE_END *,defs #ONE_OR_MORE_START *,defs #ONLY_ONE_END *,defs #ONLY_ONE_START *,defs #ZERO_OR_MORE_END *,defs #ZERO_OR_MORE_START *,defs #ZERO_OR_ONE_END *,defs #ZERO_OR_ONE_START *{stroke:var(--md-mermaid-edge-color)!important}defs #ZERO_OR_MORE_END circle,defs #ZERO_OR_MORE_START circle{fill:var(--md-mermaid-label-bg-color)}.actor{fill:var(--md-mermaid-sequence-actor-bg-color);stroke:var(--md-mermaid-sequence-actor-border-color)}text.actor>tspan{fill:var(--md-mermaid-sequence-actor-fg-color);font-family:var(--md-mermaid-font-family)}line{stroke:var(--md-mermaid-sequence-actor-line-color)}.actor-man circle,.actor-man line{fill:var(--md-mermaid-sequence-actorman-bg-color);stroke:var(--md-mermaid-sequence-actorman-line-color)}.messageLine0,.messageLine1{stroke:var(--md-mermaid-sequence-message-line-color)}.note{fill:var(--md-mermaid-sequence-note-bg-color);stroke:var(--md-mermaid-sequence-note-border-color)}.loopText,.loopText>tspan,.messageText,.noteText>tspan{stroke:none;font-family:var(--md-mermaid-font-family)!important}.messageText{fill:var(--md-mermaid-sequence-message-fg-color)}.loopText,.loopText>tspan{fill:var(--md-mermaid-sequence-loop-fg-color)}.noteText>tspan{fill:var(--md-mermaid-sequence-note-fg-color)}#arrowhead path{fill:var(--md-mermaid-sequence-message-line-color);stroke:none}.loopLine{fill:var(--md-mermaid-sequence-loop-bg-color);stroke:var(--md-mermaid-sequence-loop-border-color)}.labelBox{fill:var(--md-mermaid-sequence-label-bg-color);stroke:none}.labelText,.labelText>span{fill:var(--md-mermaid-sequence-label-fg-color);font-family:var(--md-mermaid-font-family)}.sequenceNumber{fill:var(--md-mermaid-sequence-number-fg-color)}rect.rect{fill:var(--md-mermaid-sequence-box-bg-color);stroke:none}rect.rect+text.text{fill:var(--md-mermaid-sequence-box-fg-color)}defs #sequencenumber{fill:var(--md-mermaid-sequence-number-bg-color)!important}";var Qr,Aa=0;function Ca(){return typeof mermaid=="undefined"||mermaid instanceof Element?gt("https://unpkg.com/mermaid@10.6.1/dist/mermaid.min.js"):R(void 0)}function _n(e){return e.classList.remove("mermaid"),Qr||(Qr=Ca().pipe(T(()=>mermaid.initialize({startOnLoad:!1,themeCSS:Ln,sequence:{actorFontSize:"16px",messageFontSize:"16px",noteFontSize:"16px"}})),m(()=>{}),Z(1))),Qr.subscribe(()=>no(this,null,function*(){e.classList.add("mermaid");let t=`__mermaid_${Aa++}`,r=S("div",{class:"mermaid"}),o=e.textContent,{svg:n,fn:i}=yield mermaid.render(t,o),s=r.attachShadow({mode:"closed"});s.innerHTML=n,e.replaceWith(r),i==null||i(s)})),Qr.pipe(m(()=>({ref:e})))}var An=S("table");function Cn(e){return e.replaceWith(An),An.replaceWith(vn(e)),R({ref:e})}function ka(e){let t=e.find(r=>r.checked)||e[0];return L(...e.map(r=>h(r,"change").pipe(m(()=>U(`label[for="${r.id}"]`))))).pipe(q(U(`label[for="${t.id}"]`)),m(r=>({active:r})))}function kn(e,{viewport$:t,target$:r}){let o=U(".tabbed-labels",e),n=W(":scope > input",e),i=zr("prev");e.append(i);let s=zr("next");return e.append(s),H(()=>{let a=new x,c=a.pipe(ee(),oe(!0));B([a,Se(e)]).pipe(j(c),Le(1,ge)).subscribe({next([{active:p},l]){let f=Ue(p),{width:u}=le(p);e.style.setProperty("--md-indicator-x",`${f.x}px`),e.style.setProperty("--md-indicator-width",`${u}px`);let d=ir(o);(f.xd.x+l.width)&&o.scrollTo({left:Math.max(0,f.x-16),behavior:"smooth"})},complete(){e.style.removeProperty("--md-indicator-x"),e.style.removeProperty("--md-indicator-width")}}),B([et(o),Se(o)]).pipe(j(c)).subscribe(([p,l])=>{let f=xt(o);i.hidden=p.x<16,s.hidden=p.x>f.width-l.width-16}),L(h(i,"click").pipe(m(()=>-1)),h(s,"click").pipe(m(()=>1))).pipe(j(c)).subscribe(p=>{let{width:l}=le(o);o.scrollBy({left:l*p,behavior:"smooth"})}),r.pipe(j(c),v(p=>n.includes(p))).subscribe(p=>p.click()),o.classList.add("tabbed-labels--linked");for(let p of n){let l=U(`label[for="${p.id}"]`);l.replaceChildren(S("a",{href:`#${l.htmlFor}`,tabIndex:-1},...Array.from(l.childNodes))),h(l.firstElementChild,"click").pipe(j(c),v(f=>!(f.metaKey||f.ctrlKey)),T(f=>{f.preventDefault(),f.stopPropagation()})).subscribe(()=>{history.replaceState({},"",`#${l.htmlFor}`),l.click()})}return G("content.tabs.link")&&a.pipe(Ee(1),ae(t)).subscribe(([{active:p},{offset:l}])=>{let f=p.innerText.trim();if(p.hasAttribute("data-md-switching"))p.removeAttribute("data-md-switching");else{let u=e.offsetTop-l.y;for(let y of W("[data-tabs]"))for(let b of W(":scope > input",y)){let D=U(`label[for="${b.id}"]`);if(D!==p&&D.innerText.trim()===f){D.setAttribute("data-md-switching",""),b.click();break}}window.scrollTo({top:e.offsetTop-u});let d=__md_get("__tabs")||[];__md_set("__tabs",[...new Set([f,...d])])}}),a.pipe(j(c)).subscribe(()=>{for(let p of W("audio, video",e))p.pause()}),ka(n).pipe(T(p=>a.next(p)),A(()=>a.complete()),m(p=>P({ref:e},p)))}).pipe(qe(ie))}function Hn(e,{viewport$:t,target$:r,print$:o}){return L(...W(".annotate:not(.highlight)",e).map(n=>wn(n,{target$:r,print$:o})),...W("pre:not(.mermaid) > code",e).map(n=>On(n,{target$:r,print$:o})),...W("pre.mermaid",e).map(n=>_n(n)),...W("table:not([class])",e).map(n=>Cn(n)),...W("details",e).map(n=>Mn(n,{target$:r,print$:o})),...W("[data-tabs]",e).map(n=>kn(n,{viewport$:t,target$:r})),...W("[title]",e).filter(()=>G("content.tooltips")).map(n=>Be(n)))}function Ha(e,{alert$:t}){return t.pipe(w(r=>L(R(!0),R(!1).pipe(Qe(2e3))).pipe(m(o=>({message:r,active:o})))))}function $n(e,t){let r=U(".md-typeset",e);return H(()=>{let o=new x;return o.subscribe(({message:n,active:i})=>{e.classList.toggle("md-dialog--active",i),r.textContent=n}),Ha(e,t).pipe(T(n=>o.next(n)),A(()=>o.complete()),m(n=>P({ref:e},n)))})}function $a({viewport$:e}){if(!G("header.autohide"))return R(!1);let t=e.pipe(m(({offset:{y:n}})=>n),Ce(2,1),m(([n,i])=>[nMath.abs(i-n.y)>100),m(([,[n]])=>n),X()),o=Ne("search");return B([e,o]).pipe(m(([{offset:n},i])=>n.y>400&&!i),X(),w(n=>n?r:R(!1)),q(!1))}function Pn(e,t){return H(()=>B([Se(e),$a(t)])).pipe(m(([{height:r},o])=>({height:r,hidden:o})),X((r,o)=>r.height===o.height&&r.hidden===o.hidden),Z(1))}function Rn(e,{header$:t,main$:r}){return H(()=>{let o=new x,n=o.pipe(ee(),oe(!0));o.pipe(te("active"),Ze(t)).subscribe(([{active:s},{hidden:a}])=>{e.classList.toggle("md-header--shadow",s&&!a),e.hidden=a});let i=fe(W("[title]",e)).pipe(v(()=>G("content.tooltips")),re(s=>Be(s)));return r.subscribe(o),t.pipe(j(n),m(s=>P({ref:e},s)),Re(i.pipe(j(n))))})}function Pa(e,{viewport$:t,header$:r}){return mr(e,{viewport$:t,header$:r}).pipe(m(({offset:{y:o}})=>{let{height:n}=le(e);return{active:o>=n}}),te("active"))}function In(e,t){return H(()=>{let r=new x;r.subscribe({next({active:n}){e.classList.toggle("md-header__title--active",n)},complete(){e.classList.remove("md-header__title--active")}});let o=ce(".md-content h1");return typeof o=="undefined"?M:Pa(o,t).pipe(T(n=>r.next(n)),A(()=>r.complete()),m(n=>P({ref:e},n)))})}function Fn(e,{viewport$:t,header$:r}){let o=r.pipe(m(({height:i})=>i),X()),n=o.pipe(w(()=>Se(e).pipe(m(({height:i})=>({top:e.offsetTop,bottom:e.offsetTop+i})),te("bottom"))));return B([o,n,t]).pipe(m(([i,{top:s,bottom:a},{offset:{y:c},size:{height:p}}])=>(p=Math.max(0,p-Math.max(0,s-c,i)-Math.max(0,p+c-a)),{offset:s-i,height:p,active:s-i<=c})),X((i,s)=>i.offset===s.offset&&i.height===s.height&&i.active===s.active))}function Ra(e){let t=__md_get("__palette")||{index:e.findIndex(o=>matchMedia(o.getAttribute("data-md-color-media")).matches)},r=Math.max(0,Math.min(t.index,e.length-1));return R(...e).pipe(re(o=>h(o,"change").pipe(m(()=>o))),q(e[r]),m(o=>({index:e.indexOf(o),color:{media:o.getAttribute("data-md-color-media"),scheme:o.getAttribute("data-md-color-scheme"),primary:o.getAttribute("data-md-color-primary"),accent:o.getAttribute("data-md-color-accent")}})),Z(1))}function jn(e){let t=W("input",e),r=S("meta",{name:"theme-color"});document.head.appendChild(r);let o=S("meta",{name:"color-scheme"});document.head.appendChild(o);let n=At("(prefers-color-scheme: light)");return H(()=>{let i=new x;return i.subscribe(s=>{if(document.body.setAttribute("data-md-color-switching",""),s.color.media==="(prefers-color-scheme)"){let a=matchMedia("(prefers-color-scheme: light)"),c=document.querySelector(a.matches?"[data-md-color-media='(prefers-color-scheme: light)']":"[data-md-color-media='(prefers-color-scheme: dark)']");s.color.scheme=c.getAttribute("data-md-color-scheme"),s.color.primary=c.getAttribute("data-md-color-primary"),s.color.accent=c.getAttribute("data-md-color-accent")}for(let[a,c]of Object.entries(s.color))document.body.setAttribute(`data-md-color-${a}`,c);for(let a=0;a{let s=Oe("header"),a=window.getComputedStyle(s);return o.content=a.colorScheme,a.backgroundColor.match(/\d+/g).map(c=>(+c).toString(16).padStart(2,"0")).join("")})).subscribe(s=>r.content=`#${s}`),i.pipe(Me(ie)).subscribe(()=>{document.body.removeAttribute("data-md-color-switching")}),Ra(t).pipe(j(n.pipe(Ee(1))),at(),T(s=>i.next(s)),A(()=>i.complete()),m(s=>P({ref:e},s)))})}function Wn(e,{progress$:t}){return H(()=>{let r=new x;return r.subscribe(({value:o})=>{e.style.setProperty("--md-progress-value",`${o}`)}),t.pipe(T(o=>r.next({value:o})),A(()=>r.complete()),m(o=>({ref:e,value:o})))})}var Yr=jt(Kr());function Ia(e){e.setAttribute("data-md-copying","");let t=e.closest("[data-copy]"),r=t?t.getAttribute("data-copy"):e.innerText;return e.removeAttribute("data-md-copying"),r.trimEnd()}function Un({alert$:e}){Yr.default.isSupported()&&new I(t=>{new Yr.default("[data-clipboard-target], [data-clipboard-text]",{text:r=>r.getAttribute("data-clipboard-text")||Ia(U(r.getAttribute("data-clipboard-target")))}).on("success",r=>t.next(r))}).pipe(T(t=>{t.trigger.focus()}),m(()=>we("clipboard.copied"))).subscribe(e)}function Fa(e){if(e.length<2)return[""];let[t,r]=[...e].sort((n,i)=>n.length-i.length).map(n=>n.replace(/[^/]+$/,"")),o=0;if(t===r)o=t.length;else for(;t.charCodeAt(o)===r.charCodeAt(o);)o++;return e.map(n=>n.replace(t.slice(0,o),""))}function ur(e){let t=__md_get("__sitemap",sessionStorage,e);if(t)return R(t);{let r=he();return on(new URL("sitemap.xml",e||r.base)).pipe(m(o=>Fa(W("loc",o).map(n=>n.textContent))),xe(()=>M),$e([]),T(o=>__md_set("__sitemap",o,sessionStorage,e)))}}function Nn(e){let t=ce("[rel=canonical]",e);typeof t!="undefined"&&(t.href=t.href.replace("//localhost:","//127.0.0.1:"));let r=new Map;for(let o of W(":scope > *",e)){let n=o.outerHTML;for(let i of["href","src"]){let s=o.getAttribute(i);if(s===null)continue;let a=new URL(s,t==null?void 0:t.href),c=o.cloneNode();c.setAttribute(i,`${a}`),n=c.outerHTML;break}r.set(n,o)}return r}function Dn({location$:e,viewport$:t,progress$:r}){let o=he();if(location.protocol==="file:")return M;let n=ur().pipe(m(l=>l.map(f=>`${new URL(f,o.base)}`))),i=h(document.body,"click").pipe(ae(n),w(([l,f])=>{if(!(l.target instanceof Element))return M;let u=l.target.closest("a");if(u===null)return M;if(u.target||l.metaKey||l.ctrlKey)return M;let d=new URL(u.href);return d.search=d.hash="",f.includes(`${d}`)?(l.preventDefault(),R(new URL(u.href))):M}),de());i.pipe(ue(1)).subscribe(()=>{let l=ce("link[rel=icon]");typeof l!="undefined"&&(l.href=l.href)}),h(window,"beforeunload").subscribe(()=>{history.scrollRestoration="auto"}),i.pipe(ae(t)).subscribe(([l,{offset:f}])=>{history.scrollRestoration="manual",history.replaceState(f,""),history.pushState(null,"",l)}),i.subscribe(e);let s=e.pipe(q(me()),te("pathname"),Ee(1),w(l=>lr(l,{progress$:r}).pipe(xe(()=>(st(l,!0),M))))),a=new DOMParser,c=s.pipe(w(l=>l.text()),w(l=>{let f=a.parseFromString(l,"text/html");for(let b of["[data-md-component=announce]","[data-md-component=container]","[data-md-component=header-topic]","[data-md-component=outdated]","[data-md-component=logo]","[data-md-component=skip]",...G("navigation.tabs.sticky")?["[data-md-component=tabs]"]:[]]){let D=ce(b),Q=ce(b,f);typeof D!="undefined"&&typeof Q!="undefined"&&D.replaceWith(Q)}let u=Nn(document.head),d=Nn(f.head);for(let[b,D]of d)D.getAttribute("rel")==="stylesheet"||D.hasAttribute("src")||(u.has(b)?u.delete(b):document.head.appendChild(D));for(let b of u.values())b.getAttribute("rel")==="stylesheet"||b.hasAttribute("src")||b.remove();let y=Oe("container");return We(W("script",y)).pipe(w(b=>{let D=f.createElement("script");if(b.src){for(let Q of b.getAttributeNames())D.setAttribute(Q,b.getAttribute(Q));return b.replaceWith(D),new I(Q=>{D.onload=()=>Q.complete()})}else return D.textContent=b.textContent,b.replaceWith(D),M}),ee(),oe(f))}),de());return h(window,"popstate").pipe(m(me)).subscribe(e),e.pipe(q(me()),Ce(2,1),v(([l,f])=>l.pathname===f.pathname&&l.hash!==f.hash),m(([,l])=>l)).subscribe(l=>{var f,u;history.state!==null||!l.hash?window.scrollTo(0,(u=(f=history.state)==null?void 0:f.y)!=null?u:0):(history.scrollRestoration="auto",pr(l.hash),history.scrollRestoration="manual")}),e.pipe(Ir(i),q(me()),Ce(2,1),v(([l,f])=>l.pathname===f.pathname&&l.hash===f.hash),m(([,l])=>l)).subscribe(l=>{history.scrollRestoration="auto",pr(l.hash),history.scrollRestoration="manual",history.back()}),c.pipe(ae(e)).subscribe(([,l])=>{var f,u;history.state!==null||!l.hash?window.scrollTo(0,(u=(f=history.state)==null?void 0:f.y)!=null?u:0):pr(l.hash)}),t.pipe(te("offset"),ye(100)).subscribe(({offset:l})=>{history.replaceState(l,"")}),c}var qn=jt(zn());function Kn(e){let t=e.separator.split("|").map(n=>n.replace(/(\(\?[!=<][^)]+\))/g,"").length===0?"\uFFFD":n).join("|"),r=new RegExp(t,"img"),o=(n,i,s)=>`${i}${s}`;return n=>{n=n.replace(/[\s*+\-:~^]+/g," ").trim();let i=new RegExp(`(^|${e.separator}|)(${n.replace(/[|\\{}()[\]^$+*?.-]/g,"\\$&").replace(r,"|")})`,"img");return s=>(0,qn.default)(s).replace(i,o).replace(/<\/mark>(\s+)]*>/img,"$1")}}function Ht(e){return e.type===1}function dr(e){return e.type===3}function Qn(e,t){let r=ln(e);return L(R(location.protocol!=="file:"),Ne("search")).pipe(Pe(o=>o),w(()=>t)).subscribe(({config:o,docs:n})=>r.next({type:0,data:{config:o,docs:n,options:{suggest:G("search.suggest")}}})),r}function Yn({document$:e}){let t=he(),r=De(new URL("../versions.json",t.base)).pipe(xe(()=>M)),o=r.pipe(m(n=>{let[,i]=t.base.match(/([^/]+)\/?$/);return n.find(({version:s,aliases:a})=>s===i||a.includes(i))||n[0]}));r.pipe(m(n=>new Map(n.map(i=>[`${new URL(`../${i.version}/`,t.base)}`,i]))),w(n=>h(document.body,"click").pipe(v(i=>!i.metaKey&&!i.ctrlKey),ae(o),w(([i,s])=>{if(i.target instanceof Element){let a=i.target.closest("a");if(a&&!a.target&&n.has(a.href)){let c=a.href;return!i.target.closest(".md-version")&&n.get(c)===s?M:(i.preventDefault(),R(c))}}return M}),w(i=>{let{version:s}=n.get(i);return ur(new URL(i)).pipe(m(a=>{let p=me().href.replace(t.base,"");return a.includes(p.split("#")[0])?new URL(`../${s}/${p}`,t.base):new URL(i)}))})))).subscribe(n=>st(n,!0)),B([r,o]).subscribe(([n,i])=>{U(".md-header__topic").appendChild(gn(n,i))}),e.pipe(w(()=>o)).subscribe(n=>{var s;let i=__md_get("__outdated",sessionStorage);if(i===null){i=!0;let a=((s=t.version)==null?void 0:s.default)||"latest";Array.isArray(a)||(a=[a]);e:for(let c of a)for(let p of n.aliases.concat(n.version))if(new RegExp(c,"i").test(p)){i=!1;break e}__md_set("__outdated",i,sessionStorage)}if(i)for(let a of ne("outdated"))a.hidden=!1})}function Da(e,{worker$:t}){let{searchParams:r}=me();r.has("q")&&(Ye("search",!0),e.value=r.get("q"),e.focus(),Ne("search").pipe(Pe(i=>!i)).subscribe(()=>{let i=me();i.searchParams.delete("q"),history.replaceState({},"",`${i}`)}));let o=vt(e),n=L(t.pipe(Pe(Ht)),h(e,"keyup"),o).pipe(m(()=>e.value),X());return B([n,o]).pipe(m(([i,s])=>({value:i,focus:s})),Z(1))}function Bn(e,{worker$:t}){let r=new x,o=r.pipe(ee(),oe(!0));B([t.pipe(Pe(Ht)),r],(i,s)=>s).pipe(te("value")).subscribe(({value:i})=>t.next({type:2,data:i})),r.pipe(te("focus")).subscribe(({focus:i})=>{i&&Ye("search",i)}),h(e.form,"reset").pipe(j(o)).subscribe(()=>e.focus());let n=U("header [for=__search]");return h(n,"click").subscribe(()=>e.focus()),Da(e,{worker$:t}).pipe(T(i=>r.next(i)),A(()=>r.complete()),m(i=>P({ref:e},i)),Z(1))}function Gn(e,{worker$:t,query$:r}){let o=new x,n=Go(e.parentElement).pipe(v(Boolean)),i=e.parentElement,s=U(":scope > :first-child",e),a=U(":scope > :last-child",e);Ne("search").subscribe(l=>a.setAttribute("role",l?"list":"presentation")),o.pipe(ae(r),Wr(t.pipe(Pe(Ht)))).subscribe(([{items:l},{value:f}])=>{switch(l.length){case 0:s.textContent=f.length?we("search.result.none"):we("search.result.placeholder");break;case 1:s.textContent=we("search.result.one");break;default:let u=ar(l.length);s.textContent=we("search.result.other",u)}});let c=o.pipe(T(()=>a.innerHTML=""),w(({items:l})=>L(R(...l.slice(0,10)),R(...l.slice(10)).pipe(Ce(4),Nr(n),w(([f])=>f)))),m(hn),de());return c.subscribe(l=>a.appendChild(l)),c.pipe(re(l=>{let f=ce("details",l);return typeof f=="undefined"?M:h(f,"toggle").pipe(j(o),m(()=>f))})).subscribe(l=>{l.open===!1&&l.offsetTop<=i.scrollTop&&i.scrollTo({top:l.offsetTop})}),t.pipe(v(dr),m(({data:l})=>l)).pipe(T(l=>o.next(l)),A(()=>o.complete()),m(l=>P({ref:e},l)))}function Va(e,{query$:t}){return t.pipe(m(({value:r})=>{let o=me();return o.hash="",r=r.replace(/\s+/g,"+").replace(/&/g,"%26").replace(/=/g,"%3D"),o.search=`q=${r}`,{url:o}}))}function Jn(e,t){let r=new x,o=r.pipe(ee(),oe(!0));return r.subscribe(({url:n})=>{e.setAttribute("data-clipboard-text",e.href),e.href=`${n}`}),h(e,"click").pipe(j(o)).subscribe(n=>n.preventDefault()),Va(e,t).pipe(T(n=>r.next(n)),A(()=>r.complete()),m(n=>P({ref:e},n)))}function Xn(e,{worker$:t,keyboard$:r}){let o=new x,n=Oe("search-query"),i=L(h(n,"keydown"),h(n,"focus")).pipe(Me(ie),m(()=>n.value),X());return o.pipe(Ze(i),m(([{suggest:a},c])=>{let p=c.split(/([\s-]+)/);if(a!=null&&a.length&&p[p.length-1]){let l=a[a.length-1];l.startsWith(p[p.length-1])&&(p[p.length-1]=l)}else p.length=0;return p})).subscribe(a=>e.innerHTML=a.join("").replace(/\s/g," ")),r.pipe(v(({mode:a})=>a==="search")).subscribe(a=>{switch(a.type){case"ArrowRight":e.innerText.length&&n.selectionStart===n.value.length&&(n.value=e.innerText);break}}),t.pipe(v(dr),m(({data:a})=>a)).pipe(T(a=>o.next(a)),A(()=>o.complete()),m(()=>({ref:e})))}function Zn(e,{index$:t,keyboard$:r}){let o=he();try{let n=Qn(o.search,t),i=Oe("search-query",e),s=Oe("search-result",e);h(e,"click").pipe(v(({target:c})=>c instanceof Element&&!!c.closest("a"))).subscribe(()=>Ye("search",!1)),r.pipe(v(({mode:c})=>c==="search")).subscribe(c=>{let p=Ie();switch(c.type){case"Enter":if(p===i){let l=new Map;for(let f of W(":first-child [href]",s)){let u=f.firstElementChild;l.set(f,parseFloat(u.getAttribute("data-md-score")))}if(l.size){let[[f]]=[...l].sort(([,u],[,d])=>d-u);f.click()}c.claim()}break;case"Escape":case"Tab":Ye("search",!1),i.blur();break;case"ArrowUp":case"ArrowDown":if(typeof p=="undefined")i.focus();else{let l=[i,...W(":not(details) > [href], summary, details[open] [href]",s)],f=Math.max(0,(Math.max(0,l.indexOf(p))+l.length+(c.type==="ArrowUp"?-1:1))%l.length);l[f].focus()}c.claim();break;default:i!==Ie()&&i.focus()}}),r.pipe(v(({mode:c})=>c==="global")).subscribe(c=>{switch(c.type){case"f":case"s":case"/":i.focus(),i.select(),c.claim();break}});let a=Bn(i,{worker$:n});return L(a,Gn(s,{worker$:n,query$:a})).pipe(Re(...ne("search-share",e).map(c=>Jn(c,{query$:a})),...ne("search-suggest",e).map(c=>Xn(c,{worker$:n,keyboard$:r}))))}catch(n){return e.hidden=!0,Ke}}function ei(e,{index$:t,location$:r}){return B([t,r.pipe(q(me()),v(o=>!!o.searchParams.get("h")))]).pipe(m(([o,n])=>Kn(o.config)(n.searchParams.get("h"))),m(o=>{var s;let n=new Map,i=document.createNodeIterator(e,NodeFilter.SHOW_TEXT);for(let a=i.nextNode();a;a=i.nextNode())if((s=a.parentElement)!=null&&s.offsetHeight){let c=a.textContent,p=o(c);p.length>c.length&&n.set(a,p)}for(let[a,c]of n){let{childNodes:p}=S("span",null,c);a.replaceWith(...Array.from(p))}return{ref:e,nodes:n}}))}function za(e,{viewport$:t,main$:r}){let o=e.closest(".md-grid"),n=o.offsetTop-o.parentElement.offsetTop;return B([r,t]).pipe(m(([{offset:i,height:s},{offset:{y:a}}])=>(s=s+Math.min(n,Math.max(0,a-i))-n,{height:s,locked:a>=i+n})),X((i,s)=>i.height===s.height&&i.locked===s.locked))}function Br(e,o){var n=o,{header$:t}=n,r=oo(n,["header$"]);let i=U(".md-sidebar__scrollwrap",e),{y:s}=Ue(i);return H(()=>{let a=new x,c=a.pipe(ee(),oe(!0)),p=a.pipe(Le(0,ge));return p.pipe(ae(t)).subscribe({next([{height:l},{height:f}]){i.style.height=`${l-2*s}px`,e.style.top=`${f}px`},complete(){i.style.height="",e.style.top=""}}),p.pipe(Pe()).subscribe(()=>{for(let l of W(".md-nav__link--active[href]",e)){if(!l.clientHeight)continue;let f=l.closest(".md-sidebar__scrollwrap");if(typeof f!="undefined"){let u=l.offsetTop-f.offsetTop,{height:d}=le(f);f.scrollTo({top:u-d/2})}}}),fe(W("label[tabindex]",e)).pipe(re(l=>h(l,"click").pipe(Me(ie),m(()=>l),j(c)))).subscribe(l=>{let f=U(`[id="${l.htmlFor}"]`);U(`[aria-labelledby="${l.id}"]`).setAttribute("aria-expanded",`${f.checked}`)}),za(e,r).pipe(T(l=>a.next(l)),A(()=>a.complete()),m(l=>P({ref:e},l)))})}function ti(e,t){if(typeof t!="undefined"){let r=`https://api.github.com/repos/${e}/${t}`;return Lt(De(`${r}/releases/latest`).pipe(xe(()=>M),m(o=>({version:o.tag_name})),$e({})),De(r).pipe(xe(()=>M),m(o=>({stars:o.stargazers_count,forks:o.forks_count})),$e({}))).pipe(m(([o,n])=>P(P({},o),n)))}else{let r=`https://api.github.com/users/${e}`;return De(r).pipe(m(o=>({repositories:o.public_repos})),$e({}))}}function ri(e,t){let r=`https://${e}/api/v4/projects/${encodeURIComponent(t)}`;return De(r).pipe(xe(()=>M),m(({star_count:o,forks_count:n})=>({stars:o,forks:n})),$e({}))}function oi(e){let t=e.match(/^.+github\.com\/([^/]+)\/?([^/]+)?/i);if(t){let[,r,o]=t;return ti(r,o)}if(t=e.match(/^.+?([^/]*gitlab[^/]+)\/(.+?)\/?$/i),t){let[,r,o]=t;return ri(r,o)}return M}var qa;function Ka(e){return qa||(qa=H(()=>{let t=__md_get("__source",sessionStorage);if(t)return R(t);if(ne("consent").length){let o=__md_get("__consent");if(!(o&&o.github))return M}return oi(e.href).pipe(T(o=>__md_set("__source",o,sessionStorage)))}).pipe(xe(()=>M),v(t=>Object.keys(t).length>0),m(t=>({facts:t})),Z(1)))}function ni(e){let t=U(":scope > :last-child",e);return H(()=>{let r=new x;return r.subscribe(({facts:o})=>{t.appendChild(bn(o)),t.classList.add("md-source__repository--active")}),Ka(e).pipe(T(o=>r.next(o)),A(()=>r.complete()),m(o=>P({ref:e},o)))})}function Qa(e,{viewport$:t,header$:r}){return Se(document.body).pipe(w(()=>mr(e,{header$:r,viewport$:t})),m(({offset:{y:o}})=>({hidden:o>=10})),te("hidden"))}function ii(e,t){return H(()=>{let r=new x;return r.subscribe({next({hidden:o}){e.hidden=o},complete(){e.hidden=!1}}),(G("navigation.tabs.sticky")?R({hidden:!1}):Qa(e,t)).pipe(T(o=>r.next(o)),A(()=>r.complete()),m(o=>P({ref:e},o)))})}function Ya(e,{viewport$:t,header$:r}){let o=new Map,n=W("[href^=\\#]",e);for(let a of n){let c=decodeURIComponent(a.hash.substring(1)),p=ce(`[id="${c}"]`);typeof p!="undefined"&&o.set(a,p)}let i=r.pipe(te("height"),m(({height:a})=>{let c=Oe("main"),p=U(":scope > :first-child",c);return a+.8*(p.offsetTop-c.offsetTop)}),de());return Se(document.body).pipe(te("height"),w(a=>H(()=>{let c=[];return R([...o].reduce((p,[l,f])=>{for(;c.length&&o.get(c[c.length-1]).tagName>=f.tagName;)c.pop();let u=f.offsetTop;for(;!u&&f.parentElement;)f=f.parentElement,u=f.offsetTop;let d=f.offsetParent;for(;d;d=d.offsetParent)u+=d.offsetTop;return p.set([...c=[...c,l]].reverse(),u)},new Map))}).pipe(m(c=>new Map([...c].sort(([,p],[,l])=>p-l))),Ze(i),w(([c,p])=>t.pipe(Fr(([l,f],{offset:{y:u},size:d})=>{let y=u+d.height>=Math.floor(a.height);for(;f.length;){let[,b]=f[0];if(b-p=u&&!y)f=[l.pop(),...f];else break}return[l,f]},[[],[...c]]),X((l,f)=>l[0]===f[0]&&l[1]===f[1])))))).pipe(m(([a,c])=>({prev:a.map(([p])=>p),next:c.map(([p])=>p)})),q({prev:[],next:[]}),Ce(2,1),m(([a,c])=>a.prev.length{let i=new x,s=i.pipe(ee(),oe(!0));if(i.subscribe(({prev:a,next:c})=>{for(let[p]of c)p.classList.remove("md-nav__link--passed"),p.classList.remove("md-nav__link--active");for(let[p,[l]]of a.entries())l.classList.add("md-nav__link--passed"),l.classList.toggle("md-nav__link--active",p===a.length-1)}),G("toc.follow")){let a=L(t.pipe(ye(1),m(()=>{})),t.pipe(ye(250),m(()=>"smooth")));i.pipe(v(({prev:c})=>c.length>0),Ze(o.pipe(Me(ie))),ae(a)).subscribe(([[{prev:c}],p])=>{let[l]=c[c.length-1];if(l.offsetHeight){let f=sr(l);if(typeof f!="undefined"){let u=l.offsetTop-f.offsetTop,{height:d}=le(f);f.scrollTo({top:u-d/2,behavior:p})}}})}return G("navigation.tracking")&&t.pipe(j(s),te("offset"),ye(250),Ee(1),j(n.pipe(Ee(1))),at({delay:250}),ae(i)).subscribe(([,{prev:a}])=>{let c=me(),p=a[a.length-1];if(p&&p.length){let[l]=p,{hash:f}=new URL(l.href);c.hash!==f&&(c.hash=f,history.replaceState({},"",`${c}`))}else c.hash="",history.replaceState({},"",`${c}`)}),Ya(e,{viewport$:t,header$:r}).pipe(T(a=>i.next(a)),A(()=>i.complete()),m(a=>P({ref:e},a)))})}function Ba(e,{viewport$:t,main$:r,target$:o}){let n=t.pipe(m(({offset:{y:s}})=>s),Ce(2,1),m(([s,a])=>s>a&&a>0),X()),i=r.pipe(m(({active:s})=>s));return B([i,n]).pipe(m(([s,a])=>!(s&&a)),X(),j(o.pipe(Ee(1))),oe(!0),at({delay:250}),m(s=>({hidden:s})))}function si(e,{viewport$:t,header$:r,main$:o,target$:n}){let i=new x,s=i.pipe(ee(),oe(!0));return i.subscribe({next({hidden:a}){e.hidden=a,a?(e.setAttribute("tabindex","-1"),e.blur()):e.removeAttribute("tabindex")},complete(){e.style.top="",e.hidden=!0,e.removeAttribute("tabindex")}}),r.pipe(j(s),te("height")).subscribe(({height:a})=>{e.style.top=`${a+16}px`}),h(e,"click").subscribe(a=>{a.preventDefault(),window.scrollTo({top:0})}),Ba(e,{viewport$:t,main$:o,target$:n}).pipe(T(a=>i.next(a)),A(()=>i.complete()),m(a=>P({ref:e},a)))}function ci({document$:e}){e.pipe(w(()=>W(".md-ellipsis")),re(t=>yt(t).pipe(j(e.pipe(Ee(1))),v(r=>r),m(()=>t),ue(1))),v(t=>t.offsetWidth{let r=t.innerText,o=t.closest("a")||t;return o.title=r,Be(o).pipe(j(e.pipe(Ee(1))),A(()=>o.removeAttribute("title")))})).subscribe(),e.pipe(w(()=>W(".md-status")),re(t=>Be(t))).subscribe()}function pi({document$:e,tablet$:t}){e.pipe(w(()=>W(".md-toggle--indeterminate")),T(r=>{r.indeterminate=!0,r.checked=!1}),re(r=>h(r,"change").pipe(Ur(()=>r.classList.contains("md-toggle--indeterminate")),m(()=>r))),ae(t)).subscribe(([r,o])=>{r.classList.remove("md-toggle--indeterminate"),o&&(r.checked=!1)})}function Ga(){return/(iPad|iPhone|iPod)/.test(navigator.userAgent)}function li({document$:e}){e.pipe(w(()=>W("[data-md-scrollfix]")),T(t=>t.removeAttribute("data-md-scrollfix")),v(Ga),re(t=>h(t,"touchstart").pipe(m(()=>t)))).subscribe(t=>{let r=t.scrollTop;r===0?t.scrollTop=1:r+t.offsetHeight===t.scrollHeight&&(t.scrollTop=r-1)})}function mi({viewport$:e,tablet$:t}){B([Ne("search"),t]).pipe(m(([r,o])=>r&&!o),w(r=>R(r).pipe(Qe(r?400:100))),ae(e)).subscribe(([r,{offset:{y:o}}])=>{if(r)document.body.setAttribute("data-md-scrolllock",""),document.body.style.top=`-${o}px`;else{let n=-1*parseInt(document.body.style.top,10);document.body.removeAttribute("data-md-scrolllock"),document.body.style.top="",n&&window.scrollTo(0,n)}})}Object.entries||(Object.entries=function(e){let t=[];for(let r of Object.keys(e))t.push([r,e[r]]);return t});Object.values||(Object.values=function(e){let t=[];for(let r of Object.keys(e))t.push(e[r]);return t});typeof Element!="undefined"&&(Element.prototype.scrollTo||(Element.prototype.scrollTo=function(e,t){typeof e=="object"?(this.scrollLeft=e.left,this.scrollTop=e.top):(this.scrollLeft=e,this.scrollTop=t)}),Element.prototype.replaceWith||(Element.prototype.replaceWith=function(...e){let t=this.parentNode;if(t){e.length===0&&t.removeChild(this);for(let r=e.length-1;r>=0;r--){let o=e[r];typeof o=="string"?o=document.createTextNode(o):o.parentNode&&o.parentNode.removeChild(o),r?t.insertBefore(this.previousSibling,o):t.replaceChild(o,this)}}}));function Ja(){return location.protocol==="file:"?gt(`${new URL("search/search_index.js",Gr.base)}`).pipe(m(()=>__index),Z(1)):De(new URL("search/search_index.json",Gr.base))}document.documentElement.classList.remove("no-js");document.documentElement.classList.add("js");var rt=zo(),Pt=Zo(),wt=tn(Pt),Jr=Xo(),_e=pn(),hr=At("(min-width: 960px)"),ui=At("(min-width: 1220px)"),di=rn(),Gr=he(),hi=document.forms.namedItem("search")?Ja():Ke,Xr=new x;Un({alert$:Xr});var Zr=new x;G("navigation.instant")&&Dn({location$:Pt,viewport$:_e,progress$:Zr}).subscribe(rt);var fi;((fi=Gr.version)==null?void 0:fi.provider)==="mike"&&Yn({document$:rt});L(Pt,wt).pipe(Qe(125)).subscribe(()=>{Ye("drawer",!1),Ye("search",!1)});Jr.pipe(v(({mode:e})=>e==="global")).subscribe(e=>{switch(e.type){case"p":case",":let t=ce("link[rel=prev]");typeof t!="undefined"&&st(t);break;case"n":case".":let r=ce("link[rel=next]");typeof r!="undefined"&&st(r);break;case"Enter":let o=Ie();o instanceof HTMLLabelElement&&o.click()}});ci({document$:rt});pi({document$:rt,tablet$:hr});li({document$:rt});mi({viewport$:_e,tablet$:hr});var tt=Pn(Oe("header"),{viewport$:_e}),$t=rt.pipe(m(()=>Oe("main")),w(e=>Fn(e,{viewport$:_e,header$:tt})),Z(1)),Xa=L(...ne("consent").map(e=>fn(e,{target$:wt})),...ne("dialog").map(e=>$n(e,{alert$:Xr})),...ne("header").map(e=>Rn(e,{viewport$:_e,header$:tt,main$:$t})),...ne("palette").map(e=>jn(e)),...ne("progress").map(e=>Wn(e,{progress$:Zr})),...ne("search").map(e=>Zn(e,{index$:hi,keyboard$:Jr})),...ne("source").map(e=>ni(e))),Za=H(()=>L(...ne("announce").map(e=>mn(e)),...ne("content").map(e=>Hn(e,{viewport$:_e,target$:wt,print$:di})),...ne("content").map(e=>G("search.highlight")?ei(e,{index$:hi,location$:Pt}):M),...ne("header-title").map(e=>In(e,{viewport$:_e,header$:tt})),...ne("sidebar").map(e=>e.getAttribute("data-md-type")==="navigation"?Dr(ui,()=>Br(e,{viewport$:_e,header$:tt,main$:$t})):Dr(hr,()=>Br(e,{viewport$:_e,header$:tt,main$:$t}))),...ne("tabs").map(e=>ii(e,{viewport$:_e,header$:tt})),...ne("toc").map(e=>ai(e,{viewport$:_e,header$:tt,main$:$t,target$:wt})),...ne("top").map(e=>si(e,{viewport$:_e,header$:tt,main$:$t,target$:wt})))),bi=rt.pipe(w(()=>Za),Re(Xa),Z(1));bi.subscribe();window.document$=rt;window.location$=Pt;window.target$=wt;window.keyboard$=Jr;window.viewport$=_e;window.tablet$=hr;window.screen$=ui;window.print$=di;window.alert$=Xr;window.progress$=Zr;window.component$=bi;})(); +//# sourceMappingURL=bundle.7389ff0e.min.js.map + diff --git a/site/assets/javascripts/bundle.7389ff0e.min.js.map b/site/assets/javascripts/bundle.7389ff0e.min.js.map new file mode 100644 index 0000000..dbee324 --- /dev/null +++ b/site/assets/javascripts/bundle.7389ff0e.min.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["node_modules/focus-visible/dist/focus-visible.js", "node_modules/clipboard/dist/clipboard.js", "node_modules/escape-html/index.js", "src/templates/assets/javascripts/bundle.ts", "node_modules/rxjs/node_modules/tslib/tslib.es6.js", "node_modules/rxjs/src/internal/util/isFunction.ts", "node_modules/rxjs/src/internal/util/createErrorClass.ts", "node_modules/rxjs/src/internal/util/UnsubscriptionError.ts", "node_modules/rxjs/src/internal/util/arrRemove.ts", "node_modules/rxjs/src/internal/Subscription.ts", "node_modules/rxjs/src/internal/config.ts", "node_modules/rxjs/src/internal/scheduler/timeoutProvider.ts", "node_modules/rxjs/src/internal/util/reportUnhandledError.ts", "node_modules/rxjs/src/internal/util/noop.ts", "node_modules/rxjs/src/internal/NotificationFactories.ts", "node_modules/rxjs/src/internal/util/errorContext.ts", "node_modules/rxjs/src/internal/Subscriber.ts", "node_modules/rxjs/src/internal/symbol/observable.ts", "node_modules/rxjs/src/internal/util/identity.ts", "node_modules/rxjs/src/internal/util/pipe.ts", "node_modules/rxjs/src/internal/Observable.ts", "node_modules/rxjs/src/internal/util/lift.ts", "node_modules/rxjs/src/internal/operators/OperatorSubscriber.ts", "node_modules/rxjs/src/internal/scheduler/animationFrameProvider.ts", "node_modules/rxjs/src/internal/util/ObjectUnsubscribedError.ts", "node_modules/rxjs/src/internal/Subject.ts", "node_modules/rxjs/src/internal/scheduler/dateTimestampProvider.ts", "node_modules/rxjs/src/internal/ReplaySubject.ts", "node_modules/rxjs/src/internal/scheduler/Action.ts", "node_modules/rxjs/src/internal/scheduler/intervalProvider.ts", "node_modules/rxjs/src/internal/scheduler/AsyncAction.ts", "node_modules/rxjs/src/internal/Scheduler.ts", "node_modules/rxjs/src/internal/scheduler/AsyncScheduler.ts", "node_modules/rxjs/src/internal/scheduler/async.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameAction.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameScheduler.ts", "node_modules/rxjs/src/internal/scheduler/animationFrame.ts", "node_modules/rxjs/src/internal/observable/empty.ts", "node_modules/rxjs/src/internal/util/isScheduler.ts", "node_modules/rxjs/src/internal/util/args.ts", "node_modules/rxjs/src/internal/util/isArrayLike.ts", "node_modules/rxjs/src/internal/util/isPromise.ts", "node_modules/rxjs/src/internal/util/isInteropObservable.ts", "node_modules/rxjs/src/internal/util/isAsyncIterable.ts", "node_modules/rxjs/src/internal/util/throwUnobservableError.ts", "node_modules/rxjs/src/internal/symbol/iterator.ts", "node_modules/rxjs/src/internal/util/isIterable.ts", "node_modules/rxjs/src/internal/util/isReadableStreamLike.ts", "node_modules/rxjs/src/internal/observable/innerFrom.ts", "node_modules/rxjs/src/internal/util/executeSchedule.ts", "node_modules/rxjs/src/internal/operators/observeOn.ts", "node_modules/rxjs/src/internal/operators/subscribeOn.ts", "node_modules/rxjs/src/internal/scheduled/scheduleObservable.ts", "node_modules/rxjs/src/internal/scheduled/schedulePromise.ts", "node_modules/rxjs/src/internal/scheduled/scheduleArray.ts", "node_modules/rxjs/src/internal/scheduled/scheduleIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleAsyncIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleReadableStreamLike.ts", "node_modules/rxjs/src/internal/scheduled/scheduled.ts", "node_modules/rxjs/src/internal/observable/from.ts", "node_modules/rxjs/src/internal/observable/of.ts", "node_modules/rxjs/src/internal/observable/throwError.ts", "node_modules/rxjs/src/internal/util/EmptyError.ts", "node_modules/rxjs/src/internal/util/isDate.ts", "node_modules/rxjs/src/internal/operators/map.ts", "node_modules/rxjs/src/internal/util/mapOneOrManyArgs.ts", "node_modules/rxjs/src/internal/util/argsArgArrayOrObject.ts", "node_modules/rxjs/src/internal/util/createObject.ts", "node_modules/rxjs/src/internal/observable/combineLatest.ts", "node_modules/rxjs/src/internal/operators/mergeInternals.ts", "node_modules/rxjs/src/internal/operators/mergeMap.ts", "node_modules/rxjs/src/internal/operators/mergeAll.ts", "node_modules/rxjs/src/internal/operators/concatAll.ts", "node_modules/rxjs/src/internal/observable/concat.ts", "node_modules/rxjs/src/internal/observable/defer.ts", "node_modules/rxjs/src/internal/observable/fromEvent.ts", "node_modules/rxjs/src/internal/observable/fromEventPattern.ts", "node_modules/rxjs/src/internal/observable/timer.ts", "node_modules/rxjs/src/internal/observable/merge.ts", "node_modules/rxjs/src/internal/observable/never.ts", "node_modules/rxjs/src/internal/util/argsOrArgArray.ts", "node_modules/rxjs/src/internal/operators/filter.ts", "node_modules/rxjs/src/internal/observable/zip.ts", "node_modules/rxjs/src/internal/operators/audit.ts", "node_modules/rxjs/src/internal/operators/auditTime.ts", "node_modules/rxjs/src/internal/operators/bufferCount.ts", "node_modules/rxjs/src/internal/operators/catchError.ts", "node_modules/rxjs/src/internal/operators/scanInternals.ts", "node_modules/rxjs/src/internal/operators/combineLatest.ts", "node_modules/rxjs/src/internal/operators/combineLatestWith.ts", "node_modules/rxjs/src/internal/operators/debounceTime.ts", "node_modules/rxjs/src/internal/operators/defaultIfEmpty.ts", "node_modules/rxjs/src/internal/operators/take.ts", "node_modules/rxjs/src/internal/operators/ignoreElements.ts", "node_modules/rxjs/src/internal/operators/mapTo.ts", "node_modules/rxjs/src/internal/operators/delayWhen.ts", "node_modules/rxjs/src/internal/operators/delay.ts", "node_modules/rxjs/src/internal/operators/distinctUntilChanged.ts", "node_modules/rxjs/src/internal/operators/distinctUntilKeyChanged.ts", "node_modules/rxjs/src/internal/operators/throwIfEmpty.ts", "node_modules/rxjs/src/internal/operators/endWith.ts", "node_modules/rxjs/src/internal/operators/finalize.ts", "node_modules/rxjs/src/internal/operators/first.ts", "node_modules/rxjs/src/internal/operators/takeLast.ts", "node_modules/rxjs/src/internal/operators/merge.ts", "node_modules/rxjs/src/internal/operators/mergeWith.ts", "node_modules/rxjs/src/internal/operators/repeat.ts", "node_modules/rxjs/src/internal/operators/sample.ts", "node_modules/rxjs/src/internal/operators/scan.ts", "node_modules/rxjs/src/internal/operators/share.ts", "node_modules/rxjs/src/internal/operators/shareReplay.ts", "node_modules/rxjs/src/internal/operators/skip.ts", "node_modules/rxjs/src/internal/operators/skipUntil.ts", "node_modules/rxjs/src/internal/operators/startWith.ts", "node_modules/rxjs/src/internal/operators/switchMap.ts", "node_modules/rxjs/src/internal/operators/takeUntil.ts", "node_modules/rxjs/src/internal/operators/takeWhile.ts", "node_modules/rxjs/src/internal/operators/tap.ts", "node_modules/rxjs/src/internal/operators/throttle.ts", "node_modules/rxjs/src/internal/operators/throttleTime.ts", "node_modules/rxjs/src/internal/operators/withLatestFrom.ts", "node_modules/rxjs/src/internal/operators/zip.ts", "node_modules/rxjs/src/internal/operators/zipWith.ts", "src/templates/assets/javascripts/browser/document/index.ts", "src/templates/assets/javascripts/browser/element/_/index.ts", "src/templates/assets/javascripts/browser/element/focus/index.ts", "src/templates/assets/javascripts/browser/element/hover/index.ts", "src/templates/assets/javascripts/browser/element/offset/_/index.ts", "src/templates/assets/javascripts/browser/element/offset/content/index.ts", "src/templates/assets/javascripts/utilities/h/index.ts", "src/templates/assets/javascripts/utilities/round/index.ts", "src/templates/assets/javascripts/browser/script/index.ts", "src/templates/assets/javascripts/browser/element/size/_/index.ts", "src/templates/assets/javascripts/browser/element/size/content/index.ts", "src/templates/assets/javascripts/browser/element/visibility/index.ts", "src/templates/assets/javascripts/browser/toggle/index.ts", "src/templates/assets/javascripts/browser/keyboard/index.ts", "src/templates/assets/javascripts/browser/location/_/index.ts", "src/templates/assets/javascripts/browser/location/hash/index.ts", "src/templates/assets/javascripts/browser/media/index.ts", "src/templates/assets/javascripts/browser/request/index.ts", "src/templates/assets/javascripts/browser/viewport/offset/index.ts", "src/templates/assets/javascripts/browser/viewport/size/index.ts", "src/templates/assets/javascripts/browser/viewport/_/index.ts", "src/templates/assets/javascripts/browser/viewport/at/index.ts", "src/templates/assets/javascripts/browser/worker/index.ts", "src/templates/assets/javascripts/_/index.ts", "src/templates/assets/javascripts/components/_/index.ts", "src/templates/assets/javascripts/components/announce/index.ts", "src/templates/assets/javascripts/components/consent/index.ts", "src/templates/assets/javascripts/templates/tooltip/index.tsx", "src/templates/assets/javascripts/templates/annotation/index.tsx", "src/templates/assets/javascripts/templates/clipboard/index.tsx", "src/templates/assets/javascripts/templates/search/index.tsx", "src/templates/assets/javascripts/templates/source/index.tsx", "src/templates/assets/javascripts/templates/tabbed/index.tsx", "src/templates/assets/javascripts/templates/table/index.tsx", "src/templates/assets/javascripts/templates/version/index.tsx", "src/templates/assets/javascripts/components/tooltip/index.ts", "src/templates/assets/javascripts/components/content/annotation/_/index.ts", "src/templates/assets/javascripts/components/content/annotation/list/index.ts", "src/templates/assets/javascripts/components/content/annotation/block/index.ts", "src/templates/assets/javascripts/components/content/code/_/index.ts", "src/templates/assets/javascripts/components/content/details/index.ts", "src/templates/assets/javascripts/components/content/mermaid/index.css", "src/templates/assets/javascripts/components/content/mermaid/index.ts", "src/templates/assets/javascripts/components/content/table/index.ts", "src/templates/assets/javascripts/components/content/tabs/index.ts", "src/templates/assets/javascripts/components/content/_/index.ts", "src/templates/assets/javascripts/components/dialog/index.ts", "src/templates/assets/javascripts/components/header/_/index.ts", "src/templates/assets/javascripts/components/header/title/index.ts", "src/templates/assets/javascripts/components/main/index.ts", "src/templates/assets/javascripts/components/palette/index.ts", "src/templates/assets/javascripts/components/progress/index.ts", "src/templates/assets/javascripts/integrations/clipboard/index.ts", "src/templates/assets/javascripts/integrations/sitemap/index.ts", "src/templates/assets/javascripts/integrations/instant/index.ts", "src/templates/assets/javascripts/integrations/search/highlighter/index.ts", "src/templates/assets/javascripts/integrations/search/worker/message/index.ts", "src/templates/assets/javascripts/integrations/search/worker/_/index.ts", "src/templates/assets/javascripts/integrations/version/index.ts", "src/templates/assets/javascripts/components/search/query/index.ts", "src/templates/assets/javascripts/components/search/result/index.ts", "src/templates/assets/javascripts/components/search/share/index.ts", "src/templates/assets/javascripts/components/search/suggest/index.ts", "src/templates/assets/javascripts/components/search/_/index.ts", "src/templates/assets/javascripts/components/search/highlight/index.ts", "src/templates/assets/javascripts/components/sidebar/index.ts", "src/templates/assets/javascripts/components/source/facts/github/index.ts", "src/templates/assets/javascripts/components/source/facts/gitlab/index.ts", "src/templates/assets/javascripts/components/source/facts/_/index.ts", "src/templates/assets/javascripts/components/source/_/index.ts", "src/templates/assets/javascripts/components/tabs/index.ts", "src/templates/assets/javascripts/components/toc/index.ts", "src/templates/assets/javascripts/components/top/index.ts", "src/templates/assets/javascripts/patches/ellipsis/index.ts", "src/templates/assets/javascripts/patches/indeterminate/index.ts", "src/templates/assets/javascripts/patches/scrollfix/index.ts", "src/templates/assets/javascripts/patches/scrolllock/index.ts", "src/templates/assets/javascripts/polyfills/index.ts"], + "sourcesContent": ["(function (global, factory) {\n typeof exports === 'object' && typeof module !== 'undefined' ? factory() :\n typeof define === 'function' && define.amd ? define(factory) :\n (factory());\n}(this, (function () { 'use strict';\n\n /**\n * Applies the :focus-visible polyfill at the given scope.\n * A scope in this case is either the top-level Document or a Shadow Root.\n *\n * @param {(Document|ShadowRoot)} scope\n * @see https://github.com/WICG/focus-visible\n */\n function applyFocusVisiblePolyfill(scope) {\n var hadKeyboardEvent = true;\n var hadFocusVisibleRecently = false;\n var hadFocusVisibleRecentlyTimeout = null;\n\n var inputTypesAllowlist = {\n text: true,\n search: true,\n url: true,\n tel: true,\n email: true,\n password: true,\n number: true,\n date: true,\n month: true,\n week: true,\n time: true,\n datetime: true,\n 'datetime-local': true\n };\n\n /**\n * Helper function for legacy browsers and iframes which sometimes focus\n * elements like document, body, and non-interactive SVG.\n * @param {Element} el\n */\n function isValidFocusTarget(el) {\n if (\n el &&\n el !== document &&\n el.nodeName !== 'HTML' &&\n el.nodeName !== 'BODY' &&\n 'classList' in el &&\n 'contains' in el.classList\n ) {\n return true;\n }\n return false;\n }\n\n /**\n * Computes whether the given element should automatically trigger the\n * `focus-visible` class being added, i.e. whether it should always match\n * `:focus-visible` when focused.\n * @param {Element} el\n * @return {boolean}\n */\n function focusTriggersKeyboardModality(el) {\n var type = el.type;\n var tagName = el.tagName;\n\n if (tagName === 'INPUT' && inputTypesAllowlist[type] && !el.readOnly) {\n return true;\n }\n\n if (tagName === 'TEXTAREA' && !el.readOnly) {\n return true;\n }\n\n if (el.isContentEditable) {\n return true;\n }\n\n return false;\n }\n\n /**\n * Add the `focus-visible` class to the given element if it was not added by\n * the author.\n * @param {Element} el\n */\n function addFocusVisibleClass(el) {\n if (el.classList.contains('focus-visible')) {\n return;\n }\n el.classList.add('focus-visible');\n el.setAttribute('data-focus-visible-added', '');\n }\n\n /**\n * Remove the `focus-visible` class from the given element if it was not\n * originally added by the author.\n * @param {Element} el\n */\n function removeFocusVisibleClass(el) {\n if (!el.hasAttribute('data-focus-visible-added')) {\n return;\n }\n el.classList.remove('focus-visible');\n el.removeAttribute('data-focus-visible-added');\n }\n\n /**\n * If the most recent user interaction was via the keyboard;\n * and the key press did not include a meta, alt/option, or control key;\n * then the modality is keyboard. Otherwise, the modality is not keyboard.\n * Apply `focus-visible` to any current active element and keep track\n * of our keyboard modality state with `hadKeyboardEvent`.\n * @param {KeyboardEvent} e\n */\n function onKeyDown(e) {\n if (e.metaKey || e.altKey || e.ctrlKey) {\n return;\n }\n\n if (isValidFocusTarget(scope.activeElement)) {\n addFocusVisibleClass(scope.activeElement);\n }\n\n hadKeyboardEvent = true;\n }\n\n /**\n * If at any point a user clicks with a pointing device, ensure that we change\n * the modality away from keyboard.\n * This avoids the situation where a user presses a key on an already focused\n * element, and then clicks on a different element, focusing it with a\n * pointing device, while we still think we're in keyboard modality.\n * @param {Event} e\n */\n function onPointerDown(e) {\n hadKeyboardEvent = false;\n }\n\n /**\n * On `focus`, add the `focus-visible` class to the target if:\n * - the target received focus as a result of keyboard navigation, or\n * - the event target is an element that will likely require interaction\n * via the keyboard (e.g. a text box)\n * @param {Event} e\n */\n function onFocus(e) {\n // Prevent IE from focusing the document or HTML element.\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (hadKeyboardEvent || focusTriggersKeyboardModality(e.target)) {\n addFocusVisibleClass(e.target);\n }\n }\n\n /**\n * On `blur`, remove the `focus-visible` class from the target.\n * @param {Event} e\n */\n function onBlur(e) {\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (\n e.target.classList.contains('focus-visible') ||\n e.target.hasAttribute('data-focus-visible-added')\n ) {\n // To detect a tab/window switch, we look for a blur event followed\n // rapidly by a visibility change.\n // If we don't see a visibility change within 100ms, it's probably a\n // regular focus change.\n hadFocusVisibleRecently = true;\n window.clearTimeout(hadFocusVisibleRecentlyTimeout);\n hadFocusVisibleRecentlyTimeout = window.setTimeout(function() {\n hadFocusVisibleRecently = false;\n }, 100);\n removeFocusVisibleClass(e.target);\n }\n }\n\n /**\n * If the user changes tabs, keep track of whether or not the previously\n * focused element had .focus-visible.\n * @param {Event} e\n */\n function onVisibilityChange(e) {\n if (document.visibilityState === 'hidden') {\n // If the tab becomes active again, the browser will handle calling focus\n // on the element (Safari actually calls it twice).\n // If this tab change caused a blur on an element with focus-visible,\n // re-apply the class when the user switches back to the tab.\n if (hadFocusVisibleRecently) {\n hadKeyboardEvent = true;\n }\n addInitialPointerMoveListeners();\n }\n }\n\n /**\n * Add a group of listeners to detect usage of any pointing devices.\n * These listeners will be added when the polyfill first loads, and anytime\n * the window is blurred, so that they are active when the window regains\n * focus.\n */\n function addInitialPointerMoveListeners() {\n document.addEventListener('mousemove', onInitialPointerMove);\n document.addEventListener('mousedown', onInitialPointerMove);\n document.addEventListener('mouseup', onInitialPointerMove);\n document.addEventListener('pointermove', onInitialPointerMove);\n document.addEventListener('pointerdown', onInitialPointerMove);\n document.addEventListener('pointerup', onInitialPointerMove);\n document.addEventListener('touchmove', onInitialPointerMove);\n document.addEventListener('touchstart', onInitialPointerMove);\n document.addEventListener('touchend', onInitialPointerMove);\n }\n\n function removeInitialPointerMoveListeners() {\n document.removeEventListener('mousemove', onInitialPointerMove);\n document.removeEventListener('mousedown', onInitialPointerMove);\n document.removeEventListener('mouseup', onInitialPointerMove);\n document.removeEventListener('pointermove', onInitialPointerMove);\n document.removeEventListener('pointerdown', onInitialPointerMove);\n document.removeEventListener('pointerup', onInitialPointerMove);\n document.removeEventListener('touchmove', onInitialPointerMove);\n document.removeEventListener('touchstart', onInitialPointerMove);\n document.removeEventListener('touchend', onInitialPointerMove);\n }\n\n /**\n * When the polfyill first loads, assume the user is in keyboard modality.\n * If any event is received from a pointing device (e.g. mouse, pointer,\n * touch), turn off keyboard modality.\n * This accounts for situations where focus enters the page from the URL bar.\n * @param {Event} e\n */\n function onInitialPointerMove(e) {\n // Work around a Safari quirk that fires a mousemove on whenever the\n // window blurs, even if you're tabbing out of the page. \u00AF\\_(\u30C4)_/\u00AF\n if (e.target.nodeName && e.target.nodeName.toLowerCase() === 'html') {\n return;\n }\n\n hadKeyboardEvent = false;\n removeInitialPointerMoveListeners();\n }\n\n // For some kinds of state, we are interested in changes at the global scope\n // only. For example, global pointer input, global key presses and global\n // visibility change should affect the state at every scope:\n document.addEventListener('keydown', onKeyDown, true);\n document.addEventListener('mousedown', onPointerDown, true);\n document.addEventListener('pointerdown', onPointerDown, true);\n document.addEventListener('touchstart', onPointerDown, true);\n document.addEventListener('visibilitychange', onVisibilityChange, true);\n\n addInitialPointerMoveListeners();\n\n // For focus and blur, we specifically care about state changes in the local\n // scope. This is because focus / blur events that originate from within a\n // shadow root are not re-dispatched from the host element if it was already\n // the active element in its own scope:\n scope.addEventListener('focus', onFocus, true);\n scope.addEventListener('blur', onBlur, true);\n\n // We detect that a node is a ShadowRoot by ensuring that it is a\n // DocumentFragment and also has a host property. This check covers native\n // implementation and polyfill implementation transparently. If we only cared\n // about the native implementation, we could just check if the scope was\n // an instance of a ShadowRoot.\n if (scope.nodeType === Node.DOCUMENT_FRAGMENT_NODE && scope.host) {\n // Since a ShadowRoot is a special kind of DocumentFragment, it does not\n // have a root element to add a class to. So, we add this attribute to the\n // host element instead:\n scope.host.setAttribute('data-js-focus-visible', '');\n } else if (scope.nodeType === Node.DOCUMENT_NODE) {\n document.documentElement.classList.add('js-focus-visible');\n document.documentElement.setAttribute('data-js-focus-visible', '');\n }\n }\n\n // It is important to wrap all references to global window and document in\n // these checks to support server-side rendering use cases\n // @see https://github.com/WICG/focus-visible/issues/199\n if (typeof window !== 'undefined' && typeof document !== 'undefined') {\n // Make the polyfill helper globally available. This can be used as a signal\n // to interested libraries that wish to coordinate with the polyfill for e.g.,\n // applying the polyfill to a shadow root:\n window.applyFocusVisiblePolyfill = applyFocusVisiblePolyfill;\n\n // Notify interested libraries of the polyfill's presence, in case the\n // polyfill was loaded lazily:\n var event;\n\n try {\n event = new CustomEvent('focus-visible-polyfill-ready');\n } catch (error) {\n // IE11 does not support using CustomEvent as a constructor directly:\n event = document.createEvent('CustomEvent');\n event.initCustomEvent('focus-visible-polyfill-ready', false, false, {});\n }\n\n window.dispatchEvent(event);\n }\n\n if (typeof document !== 'undefined') {\n // Apply the polyfill to the global document, so that no JavaScript\n // coordination is required to use the polyfill in the top-level document:\n applyFocusVisiblePolyfill(document);\n }\n\n})));\n", "/*!\n * clipboard.js v2.0.11\n * https://clipboardjs.com/\n *\n * Licensed MIT \u00A9 Zeno Rocha\n */\n(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"ClipboardJS\"] = factory();\n\telse\n\t\troot[\"ClipboardJS\"] = factory();\n})(this, function() {\nreturn /******/ (function() { // webpackBootstrap\n/******/ \tvar __webpack_modules__ = ({\n\n/***/ 686:\n/***/ (function(__unused_webpack_module, __webpack_exports__, __webpack_require__) {\n\n\"use strict\";\n\n// EXPORTS\n__webpack_require__.d(__webpack_exports__, {\n \"default\": function() { return /* binding */ clipboard; }\n});\n\n// EXTERNAL MODULE: ./node_modules/tiny-emitter/index.js\nvar tiny_emitter = __webpack_require__(279);\nvar tiny_emitter_default = /*#__PURE__*/__webpack_require__.n(tiny_emitter);\n// EXTERNAL MODULE: ./node_modules/good-listener/src/listen.js\nvar listen = __webpack_require__(370);\nvar listen_default = /*#__PURE__*/__webpack_require__.n(listen);\n// EXTERNAL MODULE: ./node_modules/select/src/select.js\nvar src_select = __webpack_require__(817);\nvar select_default = /*#__PURE__*/__webpack_require__.n(src_select);\n;// CONCATENATED MODULE: ./src/common/command.js\n/**\n * Executes a given operation type.\n * @param {String} type\n * @return {Boolean}\n */\nfunction command(type) {\n try {\n return document.execCommand(type);\n } catch (err) {\n return false;\n }\n}\n;// CONCATENATED MODULE: ./src/actions/cut.js\n\n\n/**\n * Cut action wrapper.\n * @param {String|HTMLElement} target\n * @return {String}\n */\n\nvar ClipboardActionCut = function ClipboardActionCut(target) {\n var selectedText = select_default()(target);\n command('cut');\n return selectedText;\n};\n\n/* harmony default export */ var actions_cut = (ClipboardActionCut);\n;// CONCATENATED MODULE: ./src/common/create-fake-element.js\n/**\n * Creates a fake textarea element with a value.\n * @param {String} value\n * @return {HTMLElement}\n */\nfunction createFakeElement(value) {\n var isRTL = document.documentElement.getAttribute('dir') === 'rtl';\n var fakeElement = document.createElement('textarea'); // Prevent zooming on iOS\n\n fakeElement.style.fontSize = '12pt'; // Reset box model\n\n fakeElement.style.border = '0';\n fakeElement.style.padding = '0';\n fakeElement.style.margin = '0'; // Move element out of screen horizontally\n\n fakeElement.style.position = 'absolute';\n fakeElement.style[isRTL ? 'right' : 'left'] = '-9999px'; // Move element to the same position vertically\n\n var yPosition = window.pageYOffset || document.documentElement.scrollTop;\n fakeElement.style.top = \"\".concat(yPosition, \"px\");\n fakeElement.setAttribute('readonly', '');\n fakeElement.value = value;\n return fakeElement;\n}\n;// CONCATENATED MODULE: ./src/actions/copy.js\n\n\n\n/**\n * Create fake copy action wrapper using a fake element.\n * @param {String} target\n * @param {Object} options\n * @return {String}\n */\n\nvar fakeCopyAction = function fakeCopyAction(value, options) {\n var fakeElement = createFakeElement(value);\n options.container.appendChild(fakeElement);\n var selectedText = select_default()(fakeElement);\n command('copy');\n fakeElement.remove();\n return selectedText;\n};\n/**\n * Copy action wrapper.\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @return {String}\n */\n\n\nvar ClipboardActionCopy = function ClipboardActionCopy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n var selectedText = '';\n\n if (typeof target === 'string') {\n selectedText = fakeCopyAction(target, options);\n } else if (target instanceof HTMLInputElement && !['text', 'search', 'url', 'tel', 'password'].includes(target === null || target === void 0 ? void 0 : target.type)) {\n // If input type doesn't support `setSelectionRange`. Simulate it. https://developer.mozilla.org/en-US/docs/Web/API/HTMLInputElement/setSelectionRange\n selectedText = fakeCopyAction(target.value, options);\n } else {\n selectedText = select_default()(target);\n command('copy');\n }\n\n return selectedText;\n};\n\n/* harmony default export */ var actions_copy = (ClipboardActionCopy);\n;// CONCATENATED MODULE: ./src/actions/default.js\nfunction _typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return _typeof(obj); }\n\n\n\n/**\n * Inner function which performs selection from either `text` or `target`\n * properties and then executes copy or cut operations.\n * @param {Object} options\n */\n\nvar ClipboardActionDefault = function ClipboardActionDefault() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n // Defines base properties passed from constructor.\n var _options$action = options.action,\n action = _options$action === void 0 ? 'copy' : _options$action,\n container = options.container,\n target = options.target,\n text = options.text; // Sets the `action` to be performed which can be either 'copy' or 'cut'.\n\n if (action !== 'copy' && action !== 'cut') {\n throw new Error('Invalid \"action\" value, use either \"copy\" or \"cut\"');\n } // Sets the `target` property using an element that will be have its content copied.\n\n\n if (target !== undefined) {\n if (target && _typeof(target) === 'object' && target.nodeType === 1) {\n if (action === 'copy' && target.hasAttribute('disabled')) {\n throw new Error('Invalid \"target\" attribute. Please use \"readonly\" instead of \"disabled\" attribute');\n }\n\n if (action === 'cut' && (target.hasAttribute('readonly') || target.hasAttribute('disabled'))) {\n throw new Error('Invalid \"target\" attribute. You can\\'t cut text from elements with \"readonly\" or \"disabled\" attributes');\n }\n } else {\n throw new Error('Invalid \"target\" value, use a valid Element');\n }\n } // Define selection strategy based on `text` property.\n\n\n if (text) {\n return actions_copy(text, {\n container: container\n });\n } // Defines which selection strategy based on `target` property.\n\n\n if (target) {\n return action === 'cut' ? actions_cut(target) : actions_copy(target, {\n container: container\n });\n }\n};\n\n/* harmony default export */ var actions_default = (ClipboardActionDefault);\n;// CONCATENATED MODULE: ./src/clipboard.js\nfunction clipboard_typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { clipboard_typeof = function _typeof(obj) { return typeof obj; }; } else { clipboard_typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return clipboard_typeof(obj); }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }\n\nfunction _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function\"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }\n\nfunction _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }\n\nfunction _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = _getPrototypeOf(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }\n\nfunction _possibleConstructorReturn(self, call) { if (call && (clipboard_typeof(call) === \"object\" || typeof call === \"function\")) { return call; } return _assertThisInitialized(self); }\n\nfunction _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return self; }\n\nfunction _isNativeReflectConstruct() { if (typeof Reflect === \"undefined\" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === \"function\") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }\n\nfunction _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }\n\n\n\n\n\n\n/**\n * Helper function to retrieve attribute value.\n * @param {String} suffix\n * @param {Element} element\n */\n\nfunction getAttributeValue(suffix, element) {\n var attribute = \"data-clipboard-\".concat(suffix);\n\n if (!element.hasAttribute(attribute)) {\n return;\n }\n\n return element.getAttribute(attribute);\n}\n/**\n * Base class which takes one or more elements, adds event listeners to them,\n * and instantiates a new `ClipboardAction` on each click.\n */\n\n\nvar Clipboard = /*#__PURE__*/function (_Emitter) {\n _inherits(Clipboard, _Emitter);\n\n var _super = _createSuper(Clipboard);\n\n /**\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n * @param {Object} options\n */\n function Clipboard(trigger, options) {\n var _this;\n\n _classCallCheck(this, Clipboard);\n\n _this = _super.call(this);\n\n _this.resolveOptions(options);\n\n _this.listenClick(trigger);\n\n return _this;\n }\n /**\n * Defines if attributes would be resolved using internal setter functions\n * or custom functions that were passed in the constructor.\n * @param {Object} options\n */\n\n\n _createClass(Clipboard, [{\n key: \"resolveOptions\",\n value: function resolveOptions() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n this.action = typeof options.action === 'function' ? options.action : this.defaultAction;\n this.target = typeof options.target === 'function' ? options.target : this.defaultTarget;\n this.text = typeof options.text === 'function' ? options.text : this.defaultText;\n this.container = clipboard_typeof(options.container) === 'object' ? options.container : document.body;\n }\n /**\n * Adds a click event listener to the passed trigger.\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n */\n\n }, {\n key: \"listenClick\",\n value: function listenClick(trigger) {\n var _this2 = this;\n\n this.listener = listen_default()(trigger, 'click', function (e) {\n return _this2.onClick(e);\n });\n }\n /**\n * Defines a new `ClipboardAction` on each click event.\n * @param {Event} e\n */\n\n }, {\n key: \"onClick\",\n value: function onClick(e) {\n var trigger = e.delegateTarget || e.currentTarget;\n var action = this.action(trigger) || 'copy';\n var text = actions_default({\n action: action,\n container: this.container,\n target: this.target(trigger),\n text: this.text(trigger)\n }); // Fires an event based on the copy operation result.\n\n this.emit(text ? 'success' : 'error', {\n action: action,\n text: text,\n trigger: trigger,\n clearSelection: function clearSelection() {\n if (trigger) {\n trigger.focus();\n }\n\n window.getSelection().removeAllRanges();\n }\n });\n }\n /**\n * Default `action` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultAction\",\n value: function defaultAction(trigger) {\n return getAttributeValue('action', trigger);\n }\n /**\n * Default `target` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultTarget\",\n value: function defaultTarget(trigger) {\n var selector = getAttributeValue('target', trigger);\n\n if (selector) {\n return document.querySelector(selector);\n }\n }\n /**\n * Allow fire programmatically a copy action\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @returns Text copied.\n */\n\n }, {\n key: \"defaultText\",\n\n /**\n * Default `text` lookup function.\n * @param {Element} trigger\n */\n value: function defaultText(trigger) {\n return getAttributeValue('text', trigger);\n }\n /**\n * Destroy lifecycle.\n */\n\n }, {\n key: \"destroy\",\n value: function destroy() {\n this.listener.destroy();\n }\n }], [{\n key: \"copy\",\n value: function copy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n return actions_copy(target, options);\n }\n /**\n * Allow fire programmatically a cut action\n * @param {String|HTMLElement} target\n * @returns Text cutted.\n */\n\n }, {\n key: \"cut\",\n value: function cut(target) {\n return actions_cut(target);\n }\n /**\n * Returns the support of the given action, or all actions if no action is\n * given.\n * @param {String} [action]\n */\n\n }, {\n key: \"isSupported\",\n value: function isSupported() {\n var action = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ['copy', 'cut'];\n var actions = typeof action === 'string' ? [action] : action;\n var support = !!document.queryCommandSupported;\n actions.forEach(function (action) {\n support = support && !!document.queryCommandSupported(action);\n });\n return support;\n }\n }]);\n\n return Clipboard;\n}((tiny_emitter_default()));\n\n/* harmony default export */ var clipboard = (Clipboard);\n\n/***/ }),\n\n/***/ 828:\n/***/ (function(module) {\n\nvar DOCUMENT_NODE_TYPE = 9;\n\n/**\n * A polyfill for Element.matches()\n */\nif (typeof Element !== 'undefined' && !Element.prototype.matches) {\n var proto = Element.prototype;\n\n proto.matches = proto.matchesSelector ||\n proto.mozMatchesSelector ||\n proto.msMatchesSelector ||\n proto.oMatchesSelector ||\n proto.webkitMatchesSelector;\n}\n\n/**\n * Finds the closest parent that matches a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @return {Function}\n */\nfunction closest (element, selector) {\n while (element && element.nodeType !== DOCUMENT_NODE_TYPE) {\n if (typeof element.matches === 'function' &&\n element.matches(selector)) {\n return element;\n }\n element = element.parentNode;\n }\n}\n\nmodule.exports = closest;\n\n\n/***/ }),\n\n/***/ 438:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar closest = __webpack_require__(828);\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction _delegate(element, selector, type, callback, useCapture) {\n var listenerFn = listener.apply(this, arguments);\n\n element.addEventListener(type, listenerFn, useCapture);\n\n return {\n destroy: function() {\n element.removeEventListener(type, listenerFn, useCapture);\n }\n }\n}\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element|String|Array} [elements]\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction delegate(elements, selector, type, callback, useCapture) {\n // Handle the regular Element usage\n if (typeof elements.addEventListener === 'function') {\n return _delegate.apply(null, arguments);\n }\n\n // Handle Element-less usage, it defaults to global delegation\n if (typeof type === 'function') {\n // Use `document` as the first parameter, then apply arguments\n // This is a short way to .unshift `arguments` without running into deoptimizations\n return _delegate.bind(null, document).apply(null, arguments);\n }\n\n // Handle Selector-based usage\n if (typeof elements === 'string') {\n elements = document.querySelectorAll(elements);\n }\n\n // Handle Array-like based usage\n return Array.prototype.map.call(elements, function (element) {\n return _delegate(element, selector, type, callback, useCapture);\n });\n}\n\n/**\n * Finds closest match and invokes callback.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Function}\n */\nfunction listener(element, selector, type, callback) {\n return function(e) {\n e.delegateTarget = closest(e.target, selector);\n\n if (e.delegateTarget) {\n callback.call(element, e);\n }\n }\n}\n\nmodule.exports = delegate;\n\n\n/***/ }),\n\n/***/ 879:\n/***/ (function(__unused_webpack_module, exports) {\n\n/**\n * Check if argument is a HTML element.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.node = function(value) {\n return value !== undefined\n && value instanceof HTMLElement\n && value.nodeType === 1;\n};\n\n/**\n * Check if argument is a list of HTML elements.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.nodeList = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return value !== undefined\n && (type === '[object NodeList]' || type === '[object HTMLCollection]')\n && ('length' in value)\n && (value.length === 0 || exports.node(value[0]));\n};\n\n/**\n * Check if argument is a string.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.string = function(value) {\n return typeof value === 'string'\n || value instanceof String;\n};\n\n/**\n * Check if argument is a function.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.fn = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return type === '[object Function]';\n};\n\n\n/***/ }),\n\n/***/ 370:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar is = __webpack_require__(879);\nvar delegate = __webpack_require__(438);\n\n/**\n * Validates all params and calls the right\n * listener function based on its target type.\n *\n * @param {String|HTMLElement|HTMLCollection|NodeList} target\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listen(target, type, callback) {\n if (!target && !type && !callback) {\n throw new Error('Missing required arguments');\n }\n\n if (!is.string(type)) {\n throw new TypeError('Second argument must be a String');\n }\n\n if (!is.fn(callback)) {\n throw new TypeError('Third argument must be a Function');\n }\n\n if (is.node(target)) {\n return listenNode(target, type, callback);\n }\n else if (is.nodeList(target)) {\n return listenNodeList(target, type, callback);\n }\n else if (is.string(target)) {\n return listenSelector(target, type, callback);\n }\n else {\n throw new TypeError('First argument must be a String, HTMLElement, HTMLCollection, or NodeList');\n }\n}\n\n/**\n * Adds an event listener to a HTML element\n * and returns a remove listener function.\n *\n * @param {HTMLElement} node\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNode(node, type, callback) {\n node.addEventListener(type, callback);\n\n return {\n destroy: function() {\n node.removeEventListener(type, callback);\n }\n }\n}\n\n/**\n * Add an event listener to a list of HTML elements\n * and returns a remove listener function.\n *\n * @param {NodeList|HTMLCollection} nodeList\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNodeList(nodeList, type, callback) {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.addEventListener(type, callback);\n });\n\n return {\n destroy: function() {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.removeEventListener(type, callback);\n });\n }\n }\n}\n\n/**\n * Add an event listener to a selector\n * and returns a remove listener function.\n *\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenSelector(selector, type, callback) {\n return delegate(document.body, selector, type, callback);\n}\n\nmodule.exports = listen;\n\n\n/***/ }),\n\n/***/ 817:\n/***/ (function(module) {\n\nfunction select(element) {\n var selectedText;\n\n if (element.nodeName === 'SELECT') {\n element.focus();\n\n selectedText = element.value;\n }\n else if (element.nodeName === 'INPUT' || element.nodeName === 'TEXTAREA') {\n var isReadOnly = element.hasAttribute('readonly');\n\n if (!isReadOnly) {\n element.setAttribute('readonly', '');\n }\n\n element.select();\n element.setSelectionRange(0, element.value.length);\n\n if (!isReadOnly) {\n element.removeAttribute('readonly');\n }\n\n selectedText = element.value;\n }\n else {\n if (element.hasAttribute('contenteditable')) {\n element.focus();\n }\n\n var selection = window.getSelection();\n var range = document.createRange();\n\n range.selectNodeContents(element);\n selection.removeAllRanges();\n selection.addRange(range);\n\n selectedText = selection.toString();\n }\n\n return selectedText;\n}\n\nmodule.exports = select;\n\n\n/***/ }),\n\n/***/ 279:\n/***/ (function(module) {\n\nfunction E () {\n // Keep this empty so it's easier to inherit from\n // (via https://github.com/lipsmack from https://github.com/scottcorgan/tiny-emitter/issues/3)\n}\n\nE.prototype = {\n on: function (name, callback, ctx) {\n var e = this.e || (this.e = {});\n\n (e[name] || (e[name] = [])).push({\n fn: callback,\n ctx: ctx\n });\n\n return this;\n },\n\n once: function (name, callback, ctx) {\n var self = this;\n function listener () {\n self.off(name, listener);\n callback.apply(ctx, arguments);\n };\n\n listener._ = callback\n return this.on(name, listener, ctx);\n },\n\n emit: function (name) {\n var data = [].slice.call(arguments, 1);\n var evtArr = ((this.e || (this.e = {}))[name] || []).slice();\n var i = 0;\n var len = evtArr.length;\n\n for (i; i < len; i++) {\n evtArr[i].fn.apply(evtArr[i].ctx, data);\n }\n\n return this;\n },\n\n off: function (name, callback) {\n var e = this.e || (this.e = {});\n var evts = e[name];\n var liveEvents = [];\n\n if (evts && callback) {\n for (var i = 0, len = evts.length; i < len; i++) {\n if (evts[i].fn !== callback && evts[i].fn._ !== callback)\n liveEvents.push(evts[i]);\n }\n }\n\n // Remove event from queue to prevent memory leak\n // Suggested by https://github.com/lazd\n // Ref: https://github.com/scottcorgan/tiny-emitter/commit/c6ebfaa9bc973b33d110a84a307742b7cf94c953#commitcomment-5024910\n\n (liveEvents.length)\n ? e[name] = liveEvents\n : delete e[name];\n\n return this;\n }\n};\n\nmodule.exports = E;\nmodule.exports.TinyEmitter = E;\n\n\n/***/ })\n\n/******/ \t});\n/************************************************************************/\n/******/ \t// The module cache\n/******/ \tvar __webpack_module_cache__ = {};\n/******/ \t\n/******/ \t// The require function\n/******/ \tfunction __webpack_require__(moduleId) {\n/******/ \t\t// Check if module is in cache\n/******/ \t\tif(__webpack_module_cache__[moduleId]) {\n/******/ \t\t\treturn __webpack_module_cache__[moduleId].exports;\n/******/ \t\t}\n/******/ \t\t// Create a new module (and put it into the cache)\n/******/ \t\tvar module = __webpack_module_cache__[moduleId] = {\n/******/ \t\t\t// no module.id needed\n/******/ \t\t\t// no module.loaded needed\n/******/ \t\t\texports: {}\n/******/ \t\t};\n/******/ \t\n/******/ \t\t// Execute the module function\n/******/ \t\t__webpack_modules__[moduleId](module, module.exports, __webpack_require__);\n/******/ \t\n/******/ \t\t// Return the exports of the module\n/******/ \t\treturn module.exports;\n/******/ \t}\n/******/ \t\n/************************************************************************/\n/******/ \t/* webpack/runtime/compat get default export */\n/******/ \t!function() {\n/******/ \t\t// getDefaultExport function for compatibility with non-harmony modules\n/******/ \t\t__webpack_require__.n = function(module) {\n/******/ \t\t\tvar getter = module && module.__esModule ?\n/******/ \t\t\t\tfunction() { return module['default']; } :\n/******/ \t\t\t\tfunction() { return module; };\n/******/ \t\t\t__webpack_require__.d(getter, { a: getter });\n/******/ \t\t\treturn getter;\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/define property getters */\n/******/ \t!function() {\n/******/ \t\t// define getter functions for harmony exports\n/******/ \t\t__webpack_require__.d = function(exports, definition) {\n/******/ \t\t\tfor(var key in definition) {\n/******/ \t\t\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n/******/ \t\t\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n/******/ \t\t\t\t}\n/******/ \t\t\t}\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/hasOwnProperty shorthand */\n/******/ \t!function() {\n/******/ \t\t__webpack_require__.o = function(obj, prop) { return Object.prototype.hasOwnProperty.call(obj, prop); }\n/******/ \t}();\n/******/ \t\n/************************************************************************/\n/******/ \t// module exports must be returned from runtime so entry inlining is disabled\n/******/ \t// startup\n/******/ \t// Load entry module and return exports\n/******/ \treturn __webpack_require__(686);\n/******/ })()\n.default;\n});", "/*!\n * escape-html\n * Copyright(c) 2012-2013 TJ Holowaychuk\n * Copyright(c) 2015 Andreas Lubbe\n * Copyright(c) 2015 Tiancheng \"Timothy\" Gu\n * MIT Licensed\n */\n\n'use strict';\n\n/**\n * Module variables.\n * @private\n */\n\nvar matchHtmlRegExp = /[\"'&<>]/;\n\n/**\n * Module exports.\n * @public\n */\n\nmodule.exports = escapeHtml;\n\n/**\n * Escape special characters in the given string of html.\n *\n * @param {string} string The string to escape for inserting into HTML\n * @return {string}\n * @public\n */\n\nfunction escapeHtml(string) {\n var str = '' + string;\n var match = matchHtmlRegExp.exec(str);\n\n if (!match) {\n return str;\n }\n\n var escape;\n var html = '';\n var index = 0;\n var lastIndex = 0;\n\n for (index = match.index; index < str.length; index++) {\n switch (str.charCodeAt(index)) {\n case 34: // \"\n escape = '"';\n break;\n case 38: // &\n escape = '&';\n break;\n case 39: // '\n escape = ''';\n break;\n case 60: // <\n escape = '<';\n break;\n case 62: // >\n escape = '>';\n break;\n default:\n continue;\n }\n\n if (lastIndex !== index) {\n html += str.substring(lastIndex, index);\n }\n\n lastIndex = index + 1;\n html += escape;\n }\n\n return lastIndex !== index\n ? html + str.substring(lastIndex, index)\n : html;\n}\n", "/*\n * Copyright (c) 2016-2024 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport \"focus-visible\"\n\nimport {\n EMPTY,\n NEVER,\n Observable,\n Subject,\n defer,\n delay,\n filter,\n map,\n merge,\n mergeWith,\n shareReplay,\n switchMap\n} from \"rxjs\"\n\nimport { configuration, feature } from \"./_\"\nimport {\n at,\n getActiveElement,\n getOptionalElement,\n requestJSON,\n setLocation,\n setToggle,\n watchDocument,\n watchKeyboard,\n watchLocation,\n watchLocationTarget,\n watchMedia,\n watchPrint,\n watchScript,\n watchViewport\n} from \"./browser\"\nimport {\n getComponentElement,\n getComponentElements,\n mountAnnounce,\n mountBackToTop,\n mountConsent,\n mountContent,\n mountDialog,\n mountHeader,\n mountHeaderTitle,\n mountPalette,\n mountProgress,\n mountSearch,\n mountSearchHiglight,\n mountSidebar,\n mountSource,\n mountTableOfContents,\n mountTabs,\n watchHeader,\n watchMain\n} from \"./components\"\nimport {\n SearchIndex,\n setupClipboardJS,\n setupInstantNavigation,\n setupVersionSelector\n} from \"./integrations\"\nimport {\n patchEllipsis,\n patchIndeterminate,\n patchScrollfix,\n patchScrolllock\n} from \"./patches\"\nimport \"./polyfills\"\n\n/* ----------------------------------------------------------------------------\n * Functions - @todo refactor\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch search index\n *\n * @returns Search index observable\n */\nfunction fetchSearchIndex(): Observable {\n if (location.protocol === \"file:\") {\n return watchScript(\n `${new URL(\"search/search_index.js\", config.base)}`\n )\n .pipe(\n // @ts-ignore - @todo fix typings\n map(() => __index),\n shareReplay(1)\n )\n } else {\n return requestJSON(\n new URL(\"search/search_index.json\", config.base)\n )\n }\n}\n\n/* ----------------------------------------------------------------------------\n * Application\n * ------------------------------------------------------------------------- */\n\n/* Yay, JavaScript is available */\ndocument.documentElement.classList.remove(\"no-js\")\ndocument.documentElement.classList.add(\"js\")\n\n/* Set up navigation observables and subjects */\nconst document$ = watchDocument()\nconst location$ = watchLocation()\nconst target$ = watchLocationTarget(location$)\nconst keyboard$ = watchKeyboard()\n\n/* Set up media observables */\nconst viewport$ = watchViewport()\nconst tablet$ = watchMedia(\"(min-width: 960px)\")\nconst screen$ = watchMedia(\"(min-width: 1220px)\")\nconst print$ = watchPrint()\n\n/* Retrieve search index, if search is enabled */\nconst config = configuration()\nconst index$ = document.forms.namedItem(\"search\")\n ? fetchSearchIndex()\n : NEVER\n\n/* Set up Clipboard.js integration */\nconst alert$ = new Subject()\nsetupClipboardJS({ alert$ })\n\n/* Set up progress indicator */\nconst progress$ = new Subject()\n\n/* Set up instant navigation, if enabled */\nif (feature(\"navigation.instant\"))\n setupInstantNavigation({ location$, viewport$, progress$ })\n .subscribe(document$)\n\n/* Set up version selector */\nif (config.version?.provider === \"mike\")\n setupVersionSelector({ document$ })\n\n/* Always close drawer and search on navigation */\nmerge(location$, target$)\n .pipe(\n delay(125)\n )\n .subscribe(() => {\n setToggle(\"drawer\", false)\n setToggle(\"search\", false)\n })\n\n/* Set up global keyboard handlers */\nkeyboard$\n .pipe(\n filter(({ mode }) => mode === \"global\")\n )\n .subscribe(key => {\n switch (key.type) {\n\n /* Go to previous page */\n case \"p\":\n case \",\":\n const prev = getOptionalElement(\"link[rel=prev]\")\n if (typeof prev !== \"undefined\")\n setLocation(prev)\n break\n\n /* Go to next page */\n case \"n\":\n case \".\":\n const next = getOptionalElement(\"link[rel=next]\")\n if (typeof next !== \"undefined\")\n setLocation(next)\n break\n\n /* Expand navigation, see https://bit.ly/3ZjG5io */\n case \"Enter\":\n const active = getActiveElement()\n if (active instanceof HTMLLabelElement)\n active.click()\n }\n })\n\n/* Set up patches */\npatchEllipsis({ document$ })\npatchIndeterminate({ document$, tablet$ })\npatchScrollfix({ document$ })\npatchScrolllock({ viewport$, tablet$ })\n\n/* Set up header and main area observable */\nconst header$ = watchHeader(getComponentElement(\"header\"), { viewport$ })\nconst main$ = document$\n .pipe(\n map(() => getComponentElement(\"main\")),\n switchMap(el => watchMain(el, { viewport$, header$ })),\n shareReplay(1)\n )\n\n/* Set up control component observables */\nconst control$ = merge(\n\n /* Consent */\n ...getComponentElements(\"consent\")\n .map(el => mountConsent(el, { target$ })),\n\n /* Dialog */\n ...getComponentElements(\"dialog\")\n .map(el => mountDialog(el, { alert$ })),\n\n /* Header */\n ...getComponentElements(\"header\")\n .map(el => mountHeader(el, { viewport$, header$, main$ })),\n\n /* Color palette */\n ...getComponentElements(\"palette\")\n .map(el => mountPalette(el)),\n\n /* Progress bar */\n ...getComponentElements(\"progress\")\n .map(el => mountProgress(el, { progress$ })),\n\n /* Search */\n ...getComponentElements(\"search\")\n .map(el => mountSearch(el, { index$, keyboard$ })),\n\n /* Repository information */\n ...getComponentElements(\"source\")\n .map(el => mountSource(el))\n)\n\n/* Set up content component observables */\nconst content$ = defer(() => merge(\n\n /* Announcement bar */\n ...getComponentElements(\"announce\")\n .map(el => mountAnnounce(el)),\n\n /* Content */\n ...getComponentElements(\"content\")\n .map(el => mountContent(el, { viewport$, target$, print$ })),\n\n /* Search highlighting */\n ...getComponentElements(\"content\")\n .map(el => feature(\"search.highlight\")\n ? mountSearchHiglight(el, { index$, location$ })\n : EMPTY\n ),\n\n /* Header title */\n ...getComponentElements(\"header-title\")\n .map(el => mountHeaderTitle(el, { viewport$, header$ })),\n\n /* Sidebar */\n ...getComponentElements(\"sidebar\")\n .map(el => el.getAttribute(\"data-md-type\") === \"navigation\"\n ? at(screen$, () => mountSidebar(el, { viewport$, header$, main$ }))\n : at(tablet$, () => mountSidebar(el, { viewport$, header$, main$ }))\n ),\n\n /* Navigation tabs */\n ...getComponentElements(\"tabs\")\n .map(el => mountTabs(el, { viewport$, header$ })),\n\n /* Table of contents */\n ...getComponentElements(\"toc\")\n .map(el => mountTableOfContents(el, {\n viewport$, header$, main$, target$\n })),\n\n /* Back-to-top button */\n ...getComponentElements(\"top\")\n .map(el => mountBackToTop(el, { viewport$, header$, main$, target$ }))\n))\n\n/* Set up component observables */\nconst component$ = document$\n .pipe(\n switchMap(() => content$),\n mergeWith(control$),\n shareReplay(1)\n )\n\n/* Subscribe to all components */\ncomponent$.subscribe()\n\n/* ----------------------------------------------------------------------------\n * Exports\n * ------------------------------------------------------------------------- */\n\nwindow.document$ = document$ /* Document observable */\nwindow.location$ = location$ /* Location subject */\nwindow.target$ = target$ /* Location target observable */\nwindow.keyboard$ = keyboard$ /* Keyboard observable */\nwindow.viewport$ = viewport$ /* Viewport observable */\nwindow.tablet$ = tablet$ /* Media tablet observable */\nwindow.screen$ = screen$ /* Media screen observable */\nwindow.print$ = print$ /* Media print observable */\nwindow.alert$ = alert$ /* Alert subject */\nwindow.progress$ = progress$ /* Progress indicator subject */\nwindow.component$ = component$ /* Component observable */\n", "/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise */\r\n\r\nvar extendStatics = function(d, b) {\r\n extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n return extendStatics(d, b);\r\n};\r\n\r\nexport function __extends(d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n}\r\n\r\nexport var __assign = function() {\r\n __assign = Object.assign || function __assign(t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n }\r\n return __assign.apply(this, arguments);\r\n}\r\n\r\nexport function __rest(s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n}\r\n\r\nexport function __decorate(decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport var __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n});\r\n\r\nexport function __exportStar(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n}\r\n\r\nexport function __spreadArray(to, from, pack) {\r\n if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {\r\n if (ar || !(i in from)) {\r\n if (!ar) ar = Array.prototype.slice.call(from, 0, i);\r\n ar[i] = from[i];\r\n }\r\n }\r\n return to.concat(ar || Array.prototype.slice.call(from));\r\n}\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nvar __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, state, kind, f) {\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\r\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, state, value, kind, f) {\r\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\r\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\r\n}\r\n", "/**\n * Returns true if the object is a function.\n * @param value The value to check\n */\nexport function isFunction(value: any): value is (...args: any[]) => any {\n return typeof value === 'function';\n}\n", "/**\n * Used to create Error subclasses until the community moves away from ES5.\n *\n * This is because compiling from TypeScript down to ES5 has issues with subclassing Errors\n * as well as other built-in types: https://github.com/Microsoft/TypeScript/issues/12123\n *\n * @param createImpl A factory function to create the actual constructor implementation. The returned\n * function should be a named function that calls `_super` internally.\n */\nexport function createErrorClass(createImpl: (_super: any) => any): T {\n const _super = (instance: any) => {\n Error.call(instance);\n instance.stack = new Error().stack;\n };\n\n const ctorFunc = createImpl(_super);\n ctorFunc.prototype = Object.create(Error.prototype);\n ctorFunc.prototype.constructor = ctorFunc;\n return ctorFunc;\n}\n", "import { createErrorClass } from './createErrorClass';\n\nexport interface UnsubscriptionError extends Error {\n readonly errors: any[];\n}\n\nexport interface UnsubscriptionErrorCtor {\n /**\n * @deprecated Internal implementation detail. Do not construct error instances.\n * Cannot be tagged as internal: https://github.com/ReactiveX/rxjs/issues/6269\n */\n new (errors: any[]): UnsubscriptionError;\n}\n\n/**\n * An error thrown when one or more errors have occurred during the\n * `unsubscribe` of a {@link Subscription}.\n */\nexport const UnsubscriptionError: UnsubscriptionErrorCtor = createErrorClass(\n (_super) =>\n function UnsubscriptionErrorImpl(this: any, errors: (Error | string)[]) {\n _super(this);\n this.message = errors\n ? `${errors.length} errors occurred during unsubscription:\n${errors.map((err, i) => `${i + 1}) ${err.toString()}`).join('\\n ')}`\n : '';\n this.name = 'UnsubscriptionError';\n this.errors = errors;\n }\n);\n", "/**\n * Removes an item from an array, mutating it.\n * @param arr The array to remove the item from\n * @param item The item to remove\n */\nexport function arrRemove(arr: T[] | undefined | null, item: T) {\n if (arr) {\n const index = arr.indexOf(item);\n 0 <= index && arr.splice(index, 1);\n }\n}\n", "import { isFunction } from './util/isFunction';\nimport { UnsubscriptionError } from './util/UnsubscriptionError';\nimport { SubscriptionLike, TeardownLogic, Unsubscribable } from './types';\nimport { arrRemove } from './util/arrRemove';\n\n/**\n * Represents a disposable resource, such as the execution of an Observable. A\n * Subscription has one important method, `unsubscribe`, that takes no argument\n * and just disposes the resource held by the subscription.\n *\n * Additionally, subscriptions may be grouped together through the `add()`\n * method, which will attach a child Subscription to the current Subscription.\n * When a Subscription is unsubscribed, all its children (and its grandchildren)\n * will be unsubscribed as well.\n *\n * @class Subscription\n */\nexport class Subscription implements SubscriptionLike {\n /** @nocollapse */\n public static EMPTY = (() => {\n const empty = new Subscription();\n empty.closed = true;\n return empty;\n })();\n\n /**\n * A flag to indicate whether this Subscription has already been unsubscribed.\n */\n public closed = false;\n\n private _parentage: Subscription[] | Subscription | null = null;\n\n /**\n * The list of registered finalizers to execute upon unsubscription. Adding and removing from this\n * list occurs in the {@link #add} and {@link #remove} methods.\n */\n private _finalizers: Exclude[] | null = null;\n\n /**\n * @param initialTeardown A function executed first as part of the finalization\n * process that is kicked off when {@link #unsubscribe} is called.\n */\n constructor(private initialTeardown?: () => void) {}\n\n /**\n * Disposes the resources held by the subscription. May, for instance, cancel\n * an ongoing Observable execution or cancel any other type of work that\n * started when the Subscription was created.\n * @return {void}\n */\n unsubscribe(): void {\n let errors: any[] | undefined;\n\n if (!this.closed) {\n this.closed = true;\n\n // Remove this from it's parents.\n const { _parentage } = this;\n if (_parentage) {\n this._parentage = null;\n if (Array.isArray(_parentage)) {\n for (const parent of _parentage) {\n parent.remove(this);\n }\n } else {\n _parentage.remove(this);\n }\n }\n\n const { initialTeardown: initialFinalizer } = this;\n if (isFunction(initialFinalizer)) {\n try {\n initialFinalizer();\n } catch (e) {\n errors = e instanceof UnsubscriptionError ? e.errors : [e];\n }\n }\n\n const { _finalizers } = this;\n if (_finalizers) {\n this._finalizers = null;\n for (const finalizer of _finalizers) {\n try {\n execFinalizer(finalizer);\n } catch (err) {\n errors = errors ?? [];\n if (err instanceof UnsubscriptionError) {\n errors = [...errors, ...err.errors];\n } else {\n errors.push(err);\n }\n }\n }\n }\n\n if (errors) {\n throw new UnsubscriptionError(errors);\n }\n }\n }\n\n /**\n * Adds a finalizer to this subscription, so that finalization will be unsubscribed/called\n * when this subscription is unsubscribed. If this subscription is already {@link #closed},\n * because it has already been unsubscribed, then whatever finalizer is passed to it\n * will automatically be executed (unless the finalizer itself is also a closed subscription).\n *\n * Closed Subscriptions cannot be added as finalizers to any subscription. Adding a closed\n * subscription to a any subscription will result in no operation. (A noop).\n *\n * Adding a subscription to itself, or adding `null` or `undefined` will not perform any\n * operation at all. (A noop).\n *\n * `Subscription` instances that are added to this instance will automatically remove themselves\n * if they are unsubscribed. Functions and {@link Unsubscribable} objects that you wish to remove\n * will need to be removed manually with {@link #remove}\n *\n * @param teardown The finalization logic to add to this subscription.\n */\n add(teardown: TeardownLogic): void {\n // Only add the finalizer if it's not undefined\n // and don't add a subscription to itself.\n if (teardown && teardown !== this) {\n if (this.closed) {\n // If this subscription is already closed,\n // execute whatever finalizer is handed to it automatically.\n execFinalizer(teardown);\n } else {\n if (teardown instanceof Subscription) {\n // We don't add closed subscriptions, and we don't add the same subscription\n // twice. Subscription unsubscribe is idempotent.\n if (teardown.closed || teardown._hasParent(this)) {\n return;\n }\n teardown._addParent(this);\n }\n (this._finalizers = this._finalizers ?? []).push(teardown);\n }\n }\n }\n\n /**\n * Checks to see if a this subscription already has a particular parent.\n * This will signal that this subscription has already been added to the parent in question.\n * @param parent the parent to check for\n */\n private _hasParent(parent: Subscription) {\n const { _parentage } = this;\n return _parentage === parent || (Array.isArray(_parentage) && _parentage.includes(parent));\n }\n\n /**\n * Adds a parent to this subscription so it can be removed from the parent if it\n * unsubscribes on it's own.\n *\n * NOTE: THIS ASSUMES THAT {@link _hasParent} HAS ALREADY BEEN CHECKED.\n * @param parent The parent subscription to add\n */\n private _addParent(parent: Subscription) {\n const { _parentage } = this;\n this._parentage = Array.isArray(_parentage) ? (_parentage.push(parent), _parentage) : _parentage ? [_parentage, parent] : parent;\n }\n\n /**\n * Called on a child when it is removed via {@link #remove}.\n * @param parent The parent to remove\n */\n private _removeParent(parent: Subscription) {\n const { _parentage } = this;\n if (_parentage === parent) {\n this._parentage = null;\n } else if (Array.isArray(_parentage)) {\n arrRemove(_parentage, parent);\n }\n }\n\n /**\n * Removes a finalizer from this subscription that was previously added with the {@link #add} method.\n *\n * Note that `Subscription` instances, when unsubscribed, will automatically remove themselves\n * from every other `Subscription` they have been added to. This means that using the `remove` method\n * is not a common thing and should be used thoughtfully.\n *\n * If you add the same finalizer instance of a function or an unsubscribable object to a `Subscription` instance\n * more than once, you will need to call `remove` the same number of times to remove all instances.\n *\n * All finalizer instances are removed to free up memory upon unsubscription.\n *\n * @param teardown The finalizer to remove from this subscription\n */\n remove(teardown: Exclude): void {\n const { _finalizers } = this;\n _finalizers && arrRemove(_finalizers, teardown);\n\n if (teardown instanceof Subscription) {\n teardown._removeParent(this);\n }\n }\n}\n\nexport const EMPTY_SUBSCRIPTION = Subscription.EMPTY;\n\nexport function isSubscription(value: any): value is Subscription {\n return (\n value instanceof Subscription ||\n (value && 'closed' in value && isFunction(value.remove) && isFunction(value.add) && isFunction(value.unsubscribe))\n );\n}\n\nfunction execFinalizer(finalizer: Unsubscribable | (() => void)) {\n if (isFunction(finalizer)) {\n finalizer();\n } else {\n finalizer.unsubscribe();\n }\n}\n", "import { Subscriber } from './Subscriber';\nimport { ObservableNotification } from './types';\n\n/**\n * The {@link GlobalConfig} object for RxJS. It is used to configure things\n * like how to react on unhandled errors.\n */\nexport const config: GlobalConfig = {\n onUnhandledError: null,\n onStoppedNotification: null,\n Promise: undefined,\n useDeprecatedSynchronousErrorHandling: false,\n useDeprecatedNextContext: false,\n};\n\n/**\n * The global configuration object for RxJS, used to configure things\n * like how to react on unhandled errors. Accessible via {@link config}\n * object.\n */\nexport interface GlobalConfig {\n /**\n * A registration point for unhandled errors from RxJS. These are errors that\n * cannot were not handled by consuming code in the usual subscription path. For\n * example, if you have this configured, and you subscribe to an observable without\n * providing an error handler, errors from that subscription will end up here. This\n * will _always_ be called asynchronously on another job in the runtime. This is because\n * we do not want errors thrown in this user-configured handler to interfere with the\n * behavior of the library.\n */\n onUnhandledError: ((err: any) => void) | null;\n\n /**\n * A registration point for notifications that cannot be sent to subscribers because they\n * have completed, errored or have been explicitly unsubscribed. By default, next, complete\n * and error notifications sent to stopped subscribers are noops. However, sometimes callers\n * might want a different behavior. For example, with sources that attempt to report errors\n * to stopped subscribers, a caller can configure RxJS to throw an unhandled error instead.\n * This will _always_ be called asynchronously on another job in the runtime. This is because\n * we do not want errors thrown in this user-configured handler to interfere with the\n * behavior of the library.\n */\n onStoppedNotification: ((notification: ObservableNotification, subscriber: Subscriber) => void) | null;\n\n /**\n * The promise constructor used by default for {@link Observable#toPromise toPromise} and {@link Observable#forEach forEach}\n * methods.\n *\n * @deprecated As of version 8, RxJS will no longer support this sort of injection of a\n * Promise constructor. If you need a Promise implementation other than native promises,\n * please polyfill/patch Promise as you see appropriate. Will be removed in v8.\n */\n Promise?: PromiseConstructorLike;\n\n /**\n * If true, turns on synchronous error rethrowing, which is a deprecated behavior\n * in v6 and higher. This behavior enables bad patterns like wrapping a subscribe\n * call in a try/catch block. It also enables producer interference, a nasty bug\n * where a multicast can be broken for all observers by a downstream consumer with\n * an unhandled error. DO NOT USE THIS FLAG UNLESS IT'S NEEDED TO BUY TIME\n * FOR MIGRATION REASONS.\n *\n * @deprecated As of version 8, RxJS will no longer support synchronous throwing\n * of unhandled errors. All errors will be thrown on a separate call stack to prevent bad\n * behaviors described above. Will be removed in v8.\n */\n useDeprecatedSynchronousErrorHandling: boolean;\n\n /**\n * If true, enables an as-of-yet undocumented feature from v5: The ability to access\n * `unsubscribe()` via `this` context in `next` functions created in observers passed\n * to `subscribe`.\n *\n * This is being removed because the performance was severely problematic, and it could also cause\n * issues when types other than POJOs are passed to subscribe as subscribers, as they will likely have\n * their `this` context overwritten.\n *\n * @deprecated As of version 8, RxJS will no longer support altering the\n * context of next functions provided as part of an observer to Subscribe. Instead,\n * you will have access to a subscription or a signal or token that will allow you to do things like\n * unsubscribe and test closed status. Will be removed in v8.\n */\n useDeprecatedNextContext: boolean;\n}\n", "import type { TimerHandle } from './timerHandle';\ntype SetTimeoutFunction = (handler: () => void, timeout?: number, ...args: any[]) => TimerHandle;\ntype ClearTimeoutFunction = (handle: TimerHandle) => void;\n\ninterface TimeoutProvider {\n setTimeout: SetTimeoutFunction;\n clearTimeout: ClearTimeoutFunction;\n delegate:\n | {\n setTimeout: SetTimeoutFunction;\n clearTimeout: ClearTimeoutFunction;\n }\n | undefined;\n}\n\nexport const timeoutProvider: TimeoutProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n setTimeout(handler: () => void, timeout?: number, ...args) {\n const { delegate } = timeoutProvider;\n if (delegate?.setTimeout) {\n return delegate.setTimeout(handler, timeout, ...args);\n }\n return setTimeout(handler, timeout, ...args);\n },\n clearTimeout(handle) {\n const { delegate } = timeoutProvider;\n return (delegate?.clearTimeout || clearTimeout)(handle as any);\n },\n delegate: undefined,\n};\n", "import { config } from '../config';\nimport { timeoutProvider } from '../scheduler/timeoutProvider';\n\n/**\n * Handles an error on another job either with the user-configured {@link onUnhandledError},\n * or by throwing it on that new job so it can be picked up by `window.onerror`, `process.on('error')`, etc.\n *\n * This should be called whenever there is an error that is out-of-band with the subscription\n * or when an error hits a terminal boundary of the subscription and no error handler was provided.\n *\n * @param err the error to report\n */\nexport function reportUnhandledError(err: any) {\n timeoutProvider.setTimeout(() => {\n const { onUnhandledError } = config;\n if (onUnhandledError) {\n // Execute the user-configured error handler.\n onUnhandledError(err);\n } else {\n // Throw so it is picked up by the runtime's uncaught error mechanism.\n throw err;\n }\n });\n}\n", "/* tslint:disable:no-empty */\nexport function noop() { }\n", "import { CompleteNotification, NextNotification, ErrorNotification } from './types';\n\n/**\n * A completion object optimized for memory use and created to be the\n * same \"shape\" as other notifications in v8.\n * @internal\n */\nexport const COMPLETE_NOTIFICATION = (() => createNotification('C', undefined, undefined) as CompleteNotification)();\n\n/**\n * Internal use only. Creates an optimized error notification that is the same \"shape\"\n * as other notifications.\n * @internal\n */\nexport function errorNotification(error: any): ErrorNotification {\n return createNotification('E', undefined, error) as any;\n}\n\n/**\n * Internal use only. Creates an optimized next notification that is the same \"shape\"\n * as other notifications.\n * @internal\n */\nexport function nextNotification(value: T) {\n return createNotification('N', value, undefined) as NextNotification;\n}\n\n/**\n * Ensures that all notifications created internally have the same \"shape\" in v8.\n *\n * TODO: This is only exported to support a crazy legacy test in `groupBy`.\n * @internal\n */\nexport function createNotification(kind: 'N' | 'E' | 'C', value: any, error: any) {\n return {\n kind,\n value,\n error,\n };\n}\n", "import { config } from '../config';\n\nlet context: { errorThrown: boolean; error: any } | null = null;\n\n/**\n * Handles dealing with errors for super-gross mode. Creates a context, in which\n * any synchronously thrown errors will be passed to {@link captureError}. Which\n * will record the error such that it will be rethrown after the call back is complete.\n * TODO: Remove in v8\n * @param cb An immediately executed function.\n */\nexport function errorContext(cb: () => void) {\n if (config.useDeprecatedSynchronousErrorHandling) {\n const isRoot = !context;\n if (isRoot) {\n context = { errorThrown: false, error: null };\n }\n cb();\n if (isRoot) {\n const { errorThrown, error } = context!;\n context = null;\n if (errorThrown) {\n throw error;\n }\n }\n } else {\n // This is the general non-deprecated path for everyone that\n // isn't crazy enough to use super-gross mode (useDeprecatedSynchronousErrorHandling)\n cb();\n }\n}\n\n/**\n * Captures errors only in super-gross mode.\n * @param err the error to capture\n */\nexport function captureError(err: any) {\n if (config.useDeprecatedSynchronousErrorHandling && context) {\n context.errorThrown = true;\n context.error = err;\n }\n}\n", "import { isFunction } from './util/isFunction';\nimport { Observer, ObservableNotification } from './types';\nimport { isSubscription, Subscription } from './Subscription';\nimport { config } from './config';\nimport { reportUnhandledError } from './util/reportUnhandledError';\nimport { noop } from './util/noop';\nimport { nextNotification, errorNotification, COMPLETE_NOTIFICATION } from './NotificationFactories';\nimport { timeoutProvider } from './scheduler/timeoutProvider';\nimport { captureError } from './util/errorContext';\n\n/**\n * Implements the {@link Observer} interface and extends the\n * {@link Subscription} class. While the {@link Observer} is the public API for\n * consuming the values of an {@link Observable}, all Observers get converted to\n * a Subscriber, in order to provide Subscription-like capabilities such as\n * `unsubscribe`. Subscriber is a common type in RxJS, and crucial for\n * implementing operators, but it is rarely used as a public API.\n *\n * @class Subscriber\n */\nexport class Subscriber extends Subscription implements Observer {\n /**\n * A static factory for a Subscriber, given a (potentially partial) definition\n * of an Observer.\n * @param next The `next` callback of an Observer.\n * @param error The `error` callback of an\n * Observer.\n * @param complete The `complete` callback of an\n * Observer.\n * @return A Subscriber wrapping the (partially defined)\n * Observer represented by the given arguments.\n * @nocollapse\n * @deprecated Do not use. Will be removed in v8. There is no replacement for this\n * method, and there is no reason to be creating instances of `Subscriber` directly.\n * If you have a specific use case, please file an issue.\n */\n static create(next?: (x?: T) => void, error?: (e?: any) => void, complete?: () => void): Subscriber {\n return new SafeSubscriber(next, error, complete);\n }\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n protected isStopped: boolean = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n protected destination: Subscriber | Observer; // this `any` is the escape hatch to erase extra type param (e.g. R)\n\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n * There is no reason to directly create an instance of Subscriber. This type is exported for typings reasons.\n */\n constructor(destination?: Subscriber | Observer) {\n super();\n if (destination) {\n this.destination = destination;\n // Automatically chain subscriptions together here.\n // if destination is a Subscription, then it is a Subscriber.\n if (isSubscription(destination)) {\n destination.add(this);\n }\n } else {\n this.destination = EMPTY_OBSERVER;\n }\n }\n\n /**\n * The {@link Observer} callback to receive notifications of type `next` from\n * the Observable, with a value. The Observable may call this method 0 or more\n * times.\n * @param {T} [value] The `next` value.\n * @return {void}\n */\n next(value?: T): void {\n if (this.isStopped) {\n handleStoppedNotification(nextNotification(value), this);\n } else {\n this._next(value!);\n }\n }\n\n /**\n * The {@link Observer} callback to receive notifications of type `error` from\n * the Observable, with an attached `Error`. Notifies the Observer that\n * the Observable has experienced an error condition.\n * @param {any} [err] The `error` exception.\n * @return {void}\n */\n error(err?: any): void {\n if (this.isStopped) {\n handleStoppedNotification(errorNotification(err), this);\n } else {\n this.isStopped = true;\n this._error(err);\n }\n }\n\n /**\n * The {@link Observer} callback to receive a valueless notification of type\n * `complete` from the Observable. Notifies the Observer that the Observable\n * has finished sending push-based notifications.\n * @return {void}\n */\n complete(): void {\n if (this.isStopped) {\n handleStoppedNotification(COMPLETE_NOTIFICATION, this);\n } else {\n this.isStopped = true;\n this._complete();\n }\n }\n\n unsubscribe(): void {\n if (!this.closed) {\n this.isStopped = true;\n super.unsubscribe();\n this.destination = null!;\n }\n }\n\n protected _next(value: T): void {\n this.destination.next(value);\n }\n\n protected _error(err: any): void {\n try {\n this.destination.error(err);\n } finally {\n this.unsubscribe();\n }\n }\n\n protected _complete(): void {\n try {\n this.destination.complete();\n } finally {\n this.unsubscribe();\n }\n }\n}\n\n/**\n * This bind is captured here because we want to be able to have\n * compatibility with monoid libraries that tend to use a method named\n * `bind`. In particular, a library called Monio requires this.\n */\nconst _bind = Function.prototype.bind;\n\nfunction bind any>(fn: Fn, thisArg: any): Fn {\n return _bind.call(fn, thisArg);\n}\n\n/**\n * Internal optimization only, DO NOT EXPOSE.\n * @internal\n */\nclass ConsumerObserver implements Observer {\n constructor(private partialObserver: Partial>) {}\n\n next(value: T): void {\n const { partialObserver } = this;\n if (partialObserver.next) {\n try {\n partialObserver.next(value);\n } catch (error) {\n handleUnhandledError(error);\n }\n }\n }\n\n error(err: any): void {\n const { partialObserver } = this;\n if (partialObserver.error) {\n try {\n partialObserver.error(err);\n } catch (error) {\n handleUnhandledError(error);\n }\n } else {\n handleUnhandledError(err);\n }\n }\n\n complete(): void {\n const { partialObserver } = this;\n if (partialObserver.complete) {\n try {\n partialObserver.complete();\n } catch (error) {\n handleUnhandledError(error);\n }\n }\n }\n}\n\nexport class SafeSubscriber extends Subscriber {\n constructor(\n observerOrNext?: Partial> | ((value: T) => void) | null,\n error?: ((e?: any) => void) | null,\n complete?: (() => void) | null\n ) {\n super();\n\n let partialObserver: Partial>;\n if (isFunction(observerOrNext) || !observerOrNext) {\n // The first argument is a function, not an observer. The next\n // two arguments *could* be observers, or they could be empty.\n partialObserver = {\n next: (observerOrNext ?? undefined) as (((value: T) => void) | undefined),\n error: error ?? undefined,\n complete: complete ?? undefined,\n };\n } else {\n // The first argument is a partial observer.\n let context: any;\n if (this && config.useDeprecatedNextContext) {\n // This is a deprecated path that made `this.unsubscribe()` available in\n // next handler functions passed to subscribe. This only exists behind a flag\n // now, as it is *very* slow.\n context = Object.create(observerOrNext);\n context.unsubscribe = () => this.unsubscribe();\n partialObserver = {\n next: observerOrNext.next && bind(observerOrNext.next, context),\n error: observerOrNext.error && bind(observerOrNext.error, context),\n complete: observerOrNext.complete && bind(observerOrNext.complete, context),\n };\n } else {\n // The \"normal\" path. Just use the partial observer directly.\n partialObserver = observerOrNext;\n }\n }\n\n // Wrap the partial observer to ensure it's a full observer, and\n // make sure proper error handling is accounted for.\n this.destination = new ConsumerObserver(partialObserver);\n }\n}\n\nfunction handleUnhandledError(error: any) {\n if (config.useDeprecatedSynchronousErrorHandling) {\n captureError(error);\n } else {\n // Ideal path, we report this as an unhandled error,\n // which is thrown on a new call stack.\n reportUnhandledError(error);\n }\n}\n\n/**\n * An error handler used when no error handler was supplied\n * to the SafeSubscriber -- meaning no error handler was supplied\n * do the `subscribe` call on our observable.\n * @param err The error to handle\n */\nfunction defaultErrorHandler(err: any) {\n throw err;\n}\n\n/**\n * A handler for notifications that cannot be sent to a stopped subscriber.\n * @param notification The notification being sent\n * @param subscriber The stopped subscriber\n */\nfunction handleStoppedNotification(notification: ObservableNotification, subscriber: Subscriber) {\n const { onStoppedNotification } = config;\n onStoppedNotification && timeoutProvider.setTimeout(() => onStoppedNotification(notification, subscriber));\n}\n\n/**\n * The observer used as a stub for subscriptions where the user did not\n * pass any arguments to `subscribe`. Comes with the default error handling\n * behavior.\n */\nexport const EMPTY_OBSERVER: Readonly> & { closed: true } = {\n closed: true,\n next: noop,\n error: defaultErrorHandler,\n complete: noop,\n};\n", "/**\n * Symbol.observable or a string \"@@observable\". Used for interop\n *\n * @deprecated We will no longer be exporting this symbol in upcoming versions of RxJS.\n * Instead polyfill and use Symbol.observable directly *or* use https://www.npmjs.com/package/symbol-observable\n */\nexport const observable: string | symbol = (() => (typeof Symbol === 'function' && Symbol.observable) || '@@observable')();\n", "/**\n * This function takes one parameter and just returns it. Simply put,\n * this is like `(x: T): T => x`.\n *\n * ## Examples\n *\n * This is useful in some cases when using things like `mergeMap`\n *\n * ```ts\n * import { interval, take, map, range, mergeMap, identity } from 'rxjs';\n *\n * const source$ = interval(1000).pipe(take(5));\n *\n * const result$ = source$.pipe(\n * map(i => range(i)),\n * mergeMap(identity) // same as mergeMap(x => x)\n * );\n *\n * result$.subscribe({\n * next: console.log\n * });\n * ```\n *\n * Or when you want to selectively apply an operator\n *\n * ```ts\n * import { interval, take, identity } from 'rxjs';\n *\n * const shouldLimit = () => Math.random() < 0.5;\n *\n * const source$ = interval(1000);\n *\n * const result$ = source$.pipe(shouldLimit() ? take(5) : identity);\n *\n * result$.subscribe({\n * next: console.log\n * });\n * ```\n *\n * @param x Any value that is returned by this function\n * @returns The value passed as the first parameter to this function\n */\nexport function identity(x: T): T {\n return x;\n}\n", "import { identity } from './identity';\nimport { UnaryFunction } from '../types';\n\nexport function pipe(): typeof identity;\nexport function pipe(fn1: UnaryFunction): UnaryFunction;\nexport function pipe(fn1: UnaryFunction, fn2: UnaryFunction): UnaryFunction;\nexport function pipe(fn1: UnaryFunction, fn2: UnaryFunction, fn3: UnaryFunction): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction,\n fn9: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction,\n fn9: UnaryFunction,\n ...fns: UnaryFunction[]\n): UnaryFunction;\n\n/**\n * pipe() can be called on one or more functions, each of which can take one argument (\"UnaryFunction\")\n * and uses it to return a value.\n * It returns a function that takes one argument, passes it to the first UnaryFunction, and then\n * passes the result to the next one, passes that result to the next one, and so on. \n */\nexport function pipe(...fns: Array>): UnaryFunction {\n return pipeFromArray(fns);\n}\n\n/** @internal */\nexport function pipeFromArray(fns: Array>): UnaryFunction {\n if (fns.length === 0) {\n return identity as UnaryFunction;\n }\n\n if (fns.length === 1) {\n return fns[0];\n }\n\n return function piped(input: T): R {\n return fns.reduce((prev: any, fn: UnaryFunction) => fn(prev), input as any);\n };\n}\n", "import { Operator } from './Operator';\nimport { SafeSubscriber, Subscriber } from './Subscriber';\nimport { isSubscription, Subscription } from './Subscription';\nimport { TeardownLogic, OperatorFunction, Subscribable, Observer } from './types';\nimport { observable as Symbol_observable } from './symbol/observable';\nimport { pipeFromArray } from './util/pipe';\nimport { config } from './config';\nimport { isFunction } from './util/isFunction';\nimport { errorContext } from './util/errorContext';\n\n/**\n * A representation of any set of values over any amount of time. This is the most basic building block\n * of RxJS.\n *\n * @class Observable\n */\nexport class Observable implements Subscribable {\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n */\n source: Observable | undefined;\n\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n */\n operator: Operator | undefined;\n\n /**\n * @constructor\n * @param {Function} subscribe the function that is called when the Observable is\n * initially subscribed to. This function is given a Subscriber, to which new values\n * can be `next`ed, or an `error` method can be called to raise an error, or\n * `complete` can be called to notify of a successful completion.\n */\n constructor(subscribe?: (this: Observable, subscriber: Subscriber) => TeardownLogic) {\n if (subscribe) {\n this._subscribe = subscribe;\n }\n }\n\n // HACK: Since TypeScript inherits static properties too, we have to\n // fight against TypeScript here so Subject can have a different static create signature\n /**\n * Creates a new Observable by calling the Observable constructor\n * @owner Observable\n * @method create\n * @param {Function} subscribe? the subscriber function to be passed to the Observable constructor\n * @return {Observable} a new observable\n * @nocollapse\n * @deprecated Use `new Observable()` instead. Will be removed in v8.\n */\n static create: (...args: any[]) => any = (subscribe?: (subscriber: Subscriber) => TeardownLogic) => {\n return new Observable(subscribe);\n };\n\n /**\n * Creates a new Observable, with this Observable instance as the source, and the passed\n * operator defined as the new observable's operator.\n * @method lift\n * @param operator the operator defining the operation to take on the observable\n * @return a new observable with the Operator applied\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n * If you have implemented an operator using `lift`, it is recommended that you create an\n * operator by simply returning `new Observable()` directly. See \"Creating new operators from\n * scratch\" section here: https://rxjs.dev/guide/operators\n */\n lift(operator?: Operator): Observable {\n const observable = new Observable();\n observable.source = this;\n observable.operator = operator;\n return observable;\n }\n\n subscribe(observerOrNext?: Partial> | ((value: T) => void)): Subscription;\n /** @deprecated Instead of passing separate callback arguments, use an observer argument. Signatures taking separate callback arguments will be removed in v8. Details: https://rxjs.dev/deprecations/subscribe-arguments */\n subscribe(next?: ((value: T) => void) | null, error?: ((error: any) => void) | null, complete?: (() => void) | null): Subscription;\n /**\n * Invokes an execution of an Observable and registers Observer handlers for notifications it will emit.\n *\n * Use it when you have all these Observables, but still nothing is happening.\n *\n * `subscribe` is not a regular operator, but a method that calls Observable's internal `subscribe` function. It\n * might be for example a function that you passed to Observable's constructor, but most of the time it is\n * a library implementation, which defines what will be emitted by an Observable, and when it be will emitted. This means\n * that calling `subscribe` is actually the moment when Observable starts its work, not when it is created, as it is often\n * the thought.\n *\n * Apart from starting the execution of an Observable, this method allows you to listen for values\n * that an Observable emits, as well as for when it completes or errors. You can achieve this in two\n * of the following ways.\n *\n * The first way is creating an object that implements {@link Observer} interface. It should have methods\n * defined by that interface, but note that it should be just a regular JavaScript object, which you can create\n * yourself in any way you want (ES6 class, classic function constructor, object literal etc.). In particular, do\n * not attempt to use any RxJS implementation details to create Observers - you don't need them. Remember also\n * that your object does not have to implement all methods. If you find yourself creating a method that doesn't\n * do anything, you can simply omit it. Note however, if the `error` method is not provided and an error happens,\n * it will be thrown asynchronously. Errors thrown asynchronously cannot be caught using `try`/`catch`. Instead,\n * use the {@link onUnhandledError} configuration option or use a runtime handler (like `window.onerror` or\n * `process.on('error)`) to be notified of unhandled errors. Because of this, it's recommended that you provide\n * an `error` method to avoid missing thrown errors.\n *\n * The second way is to give up on Observer object altogether and simply provide callback functions in place of its methods.\n * This means you can provide three functions as arguments to `subscribe`, where the first function is equivalent\n * of a `next` method, the second of an `error` method and the third of a `complete` method. Just as in case of an Observer,\n * if you do not need to listen for something, you can omit a function by passing `undefined` or `null`,\n * since `subscribe` recognizes these functions by where they were placed in function call. When it comes\n * to the `error` function, as with an Observer, if not provided, errors emitted by an Observable will be thrown asynchronously.\n *\n * You can, however, subscribe with no parameters at all. This may be the case where you're not interested in terminal events\n * and you also handled emissions internally by using operators (e.g. using `tap`).\n *\n * Whichever style of calling `subscribe` you use, in both cases it returns a Subscription object.\n * This object allows you to call `unsubscribe` on it, which in turn will stop the work that an Observable does and will clean\n * up all resources that an Observable used. Note that cancelling a subscription will not call `complete` callback\n * provided to `subscribe` function, which is reserved for a regular completion signal that comes from an Observable.\n *\n * Remember that callbacks provided to `subscribe` are not guaranteed to be called asynchronously.\n * It is an Observable itself that decides when these functions will be called. For example {@link of}\n * by default emits all its values synchronously. Always check documentation for how given Observable\n * will behave when subscribed and if its default behavior can be modified with a `scheduler`.\n *\n * #### Examples\n *\n * Subscribe with an {@link guide/observer Observer}\n *\n * ```ts\n * import { of } from 'rxjs';\n *\n * const sumObserver = {\n * sum: 0,\n * next(value) {\n * console.log('Adding: ' + value);\n * this.sum = this.sum + value;\n * },\n * error() {\n * // We actually could just remove this method,\n * // since we do not really care about errors right now.\n * },\n * complete() {\n * console.log('Sum equals: ' + this.sum);\n * }\n * };\n *\n * of(1, 2, 3) // Synchronously emits 1, 2, 3 and then completes.\n * .subscribe(sumObserver);\n *\n * // Logs:\n * // 'Adding: 1'\n * // 'Adding: 2'\n * // 'Adding: 3'\n * // 'Sum equals: 6'\n * ```\n *\n * Subscribe with functions ({@link deprecations/subscribe-arguments deprecated})\n *\n * ```ts\n * import { of } from 'rxjs'\n *\n * let sum = 0;\n *\n * of(1, 2, 3).subscribe(\n * value => {\n * console.log('Adding: ' + value);\n * sum = sum + value;\n * },\n * undefined,\n * () => console.log('Sum equals: ' + sum)\n * );\n *\n * // Logs:\n * // 'Adding: 1'\n * // 'Adding: 2'\n * // 'Adding: 3'\n * // 'Sum equals: 6'\n * ```\n *\n * Cancel a subscription\n *\n * ```ts\n * import { interval } from 'rxjs';\n *\n * const subscription = interval(1000).subscribe({\n * next(num) {\n * console.log(num)\n * },\n * complete() {\n * // Will not be called, even when cancelling subscription.\n * console.log('completed!');\n * }\n * });\n *\n * setTimeout(() => {\n * subscription.unsubscribe();\n * console.log('unsubscribed!');\n * }, 2500);\n *\n * // Logs:\n * // 0 after 1s\n * // 1 after 2s\n * // 'unsubscribed!' after 2.5s\n * ```\n *\n * @param {Observer|Function} observerOrNext (optional) Either an observer with methods to be called,\n * or the first of three possible handlers, which is the handler for each value emitted from the subscribed\n * Observable.\n * @param {Function} error (optional) A handler for a terminal event resulting from an error. If no error handler is provided,\n * the error will be thrown asynchronously as unhandled.\n * @param {Function} complete (optional) A handler for a terminal event resulting from successful completion.\n * @return {Subscription} a subscription reference to the registered handlers\n * @method subscribe\n */\n subscribe(\n observerOrNext?: Partial> | ((value: T) => void) | null,\n error?: ((error: any) => void) | null,\n complete?: (() => void) | null\n ): Subscription {\n const subscriber = isSubscriber(observerOrNext) ? observerOrNext : new SafeSubscriber(observerOrNext, error, complete);\n\n errorContext(() => {\n const { operator, source } = this;\n subscriber.add(\n operator\n ? // We're dealing with a subscription in the\n // operator chain to one of our lifted operators.\n operator.call(subscriber, source)\n : source\n ? // If `source` has a value, but `operator` does not, something that\n // had intimate knowledge of our API, like our `Subject`, must have\n // set it. We're going to just call `_subscribe` directly.\n this._subscribe(subscriber)\n : // In all other cases, we're likely wrapping a user-provided initializer\n // function, so we need to catch errors and handle them appropriately.\n this._trySubscribe(subscriber)\n );\n });\n\n return subscriber;\n }\n\n /** @internal */\n protected _trySubscribe(sink: Subscriber): TeardownLogic {\n try {\n return this._subscribe(sink);\n } catch (err) {\n // We don't need to return anything in this case,\n // because it's just going to try to `add()` to a subscription\n // above.\n sink.error(err);\n }\n }\n\n /**\n * Used as a NON-CANCELLABLE means of subscribing to an observable, for use with\n * APIs that expect promises, like `async/await`. You cannot unsubscribe from this.\n *\n * **WARNING**: Only use this with observables you *know* will complete. If the source\n * observable does not complete, you will end up with a promise that is hung up, and\n * potentially all of the state of an async function hanging out in memory. To avoid\n * this situation, look into adding something like {@link timeout}, {@link take},\n * {@link takeWhile}, or {@link takeUntil} amongst others.\n *\n * #### Example\n *\n * ```ts\n * import { interval, take } from 'rxjs';\n *\n * const source$ = interval(1000).pipe(take(4));\n *\n * async function getTotal() {\n * let total = 0;\n *\n * await source$.forEach(value => {\n * total += value;\n * console.log('observable -> ' + value);\n * });\n *\n * return total;\n * }\n *\n * getTotal().then(\n * total => console.log('Total: ' + total)\n * );\n *\n * // Expected:\n * // 'observable -> 0'\n * // 'observable -> 1'\n * // 'observable -> 2'\n * // 'observable -> 3'\n * // 'Total: 6'\n * ```\n *\n * @param next a handler for each value emitted by the observable\n * @return a promise that either resolves on observable completion or\n * rejects with the handled error\n */\n forEach(next: (value: T) => void): Promise;\n\n /**\n * @param next a handler for each value emitted by the observable\n * @param promiseCtor a constructor function used to instantiate the Promise\n * @return a promise that either resolves on observable completion or\n * rejects with the handled error\n * @deprecated Passing a Promise constructor will no longer be available\n * in upcoming versions of RxJS. This is because it adds weight to the library, for very\n * little benefit. If you need this functionality, it is recommended that you either\n * polyfill Promise, or you create an adapter to convert the returned native promise\n * to whatever promise implementation you wanted. Will be removed in v8.\n */\n forEach(next: (value: T) => void, promiseCtor: PromiseConstructorLike): Promise;\n\n forEach(next: (value: T) => void, promiseCtor?: PromiseConstructorLike): Promise {\n promiseCtor = getPromiseCtor(promiseCtor);\n\n return new promiseCtor((resolve, reject) => {\n const subscriber = new SafeSubscriber({\n next: (value) => {\n try {\n next(value);\n } catch (err) {\n reject(err);\n subscriber.unsubscribe();\n }\n },\n error: reject,\n complete: resolve,\n });\n this.subscribe(subscriber);\n }) as Promise;\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): TeardownLogic {\n return this.source?.subscribe(subscriber);\n }\n\n /**\n * An interop point defined by the es7-observable spec https://github.com/zenparsing/es-observable\n * @method Symbol.observable\n * @return {Observable} this instance of the observable\n */\n [Symbol_observable]() {\n return this;\n }\n\n /* tslint:disable:max-line-length */\n pipe(): Observable;\n pipe(op1: OperatorFunction): Observable;\n pipe(op1: OperatorFunction, op2: OperatorFunction): Observable;\n pipe(op1: OperatorFunction, op2: OperatorFunction, op3: OperatorFunction): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction,\n op9: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction,\n op9: OperatorFunction,\n ...operations: OperatorFunction[]\n ): Observable;\n /* tslint:enable:max-line-length */\n\n /**\n * Used to stitch together functional operators into a chain.\n * @method pipe\n * @return {Observable} the Observable result of all of the operators having\n * been called in the order they were passed in.\n *\n * ## Example\n *\n * ```ts\n * import { interval, filter, map, scan } from 'rxjs';\n *\n * interval(1000)\n * .pipe(\n * filter(x => x % 2 === 0),\n * map(x => x + x),\n * scan((acc, x) => acc + x)\n * )\n * .subscribe(x => console.log(x));\n * ```\n */\n pipe(...operations: OperatorFunction[]): Observable {\n return pipeFromArray(operations)(this);\n }\n\n /* tslint:disable:max-line-length */\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(): Promise;\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(PromiseCtor: typeof Promise): Promise;\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(PromiseCtor: PromiseConstructorLike): Promise;\n /* tslint:enable:max-line-length */\n\n /**\n * Subscribe to this Observable and get a Promise resolving on\n * `complete` with the last emission (if any).\n *\n * **WARNING**: Only use this with observables you *know* will complete. If the source\n * observable does not complete, you will end up with a promise that is hung up, and\n * potentially all of the state of an async function hanging out in memory. To avoid\n * this situation, look into adding something like {@link timeout}, {@link take},\n * {@link takeWhile}, or {@link takeUntil} amongst others.\n *\n * @method toPromise\n * @param [promiseCtor] a constructor function used to instantiate\n * the Promise\n * @return A Promise that resolves with the last value emit, or\n * rejects on an error. If there were no emissions, Promise\n * resolves with undefined.\n * @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise\n */\n toPromise(promiseCtor?: PromiseConstructorLike): Promise {\n promiseCtor = getPromiseCtor(promiseCtor);\n\n return new promiseCtor((resolve, reject) => {\n let value: T | undefined;\n this.subscribe(\n (x: T) => (value = x),\n (err: any) => reject(err),\n () => resolve(value)\n );\n }) as Promise;\n }\n}\n\n/**\n * Decides between a passed promise constructor from consuming code,\n * A default configured promise constructor, and the native promise\n * constructor and returns it. If nothing can be found, it will throw\n * an error.\n * @param promiseCtor The optional promise constructor to passed by consuming code\n */\nfunction getPromiseCtor(promiseCtor: PromiseConstructorLike | undefined) {\n return promiseCtor ?? config.Promise ?? Promise;\n}\n\nfunction isObserver(value: any): value is Observer {\n return value && isFunction(value.next) && isFunction(value.error) && isFunction(value.complete);\n}\n\nfunction isSubscriber(value: any): value is Subscriber {\n return (value && value instanceof Subscriber) || (isObserver(value) && isSubscription(value));\n}\n", "import { Observable } from '../Observable';\nimport { Subscriber } from '../Subscriber';\nimport { OperatorFunction } from '../types';\nimport { isFunction } from './isFunction';\n\n/**\n * Used to determine if an object is an Observable with a lift function.\n */\nexport function hasLift(source: any): source is { lift: InstanceType['lift'] } {\n return isFunction(source?.lift);\n}\n\n/**\n * Creates an `OperatorFunction`. Used to define operators throughout the library in a concise way.\n * @param init The logic to connect the liftedSource to the subscriber at the moment of subscription.\n */\nexport function operate(\n init: (liftedSource: Observable, subscriber: Subscriber) => (() => void) | void\n): OperatorFunction {\n return (source: Observable) => {\n if (hasLift(source)) {\n return source.lift(function (this: Subscriber, liftedSource: Observable) {\n try {\n return init(liftedSource, this);\n } catch (err) {\n this.error(err);\n }\n });\n }\n throw new TypeError('Unable to lift unknown Observable type');\n };\n}\n", "import { Subscriber } from '../Subscriber';\n\n/**\n * Creates an instance of an `OperatorSubscriber`.\n * @param destination The downstream subscriber.\n * @param onNext Handles next values, only called if this subscriber is not stopped or closed. Any\n * error that occurs in this function is caught and sent to the `error` method of this subscriber.\n * @param onError Handles errors from the subscription, any errors that occur in this handler are caught\n * and send to the `destination` error handler.\n * @param onComplete Handles completion notification from the subscription. Any errors that occur in\n * this handler are sent to the `destination` error handler.\n * @param onFinalize Additional teardown logic here. This will only be called on teardown if the\n * subscriber itself is not already closed. This is called after all other teardown logic is executed.\n */\nexport function createOperatorSubscriber(\n destination: Subscriber,\n onNext?: (value: T) => void,\n onComplete?: () => void,\n onError?: (err: any) => void,\n onFinalize?: () => void\n): Subscriber {\n return new OperatorSubscriber(destination, onNext, onComplete, onError, onFinalize);\n}\n\n/**\n * A generic helper for allowing operators to be created with a Subscriber and\n * use closures to capture necessary state from the operator function itself.\n */\nexport class OperatorSubscriber extends Subscriber {\n /**\n * Creates an instance of an `OperatorSubscriber`.\n * @param destination The downstream subscriber.\n * @param onNext Handles next values, only called if this subscriber is not stopped or closed. Any\n * error that occurs in this function is caught and sent to the `error` method of this subscriber.\n * @param onError Handles errors from the subscription, any errors that occur in this handler are caught\n * and send to the `destination` error handler.\n * @param onComplete Handles completion notification from the subscription. Any errors that occur in\n * this handler are sent to the `destination` error handler.\n * @param onFinalize Additional finalization logic here. This will only be called on finalization if the\n * subscriber itself is not already closed. This is called after all other finalization logic is executed.\n * @param shouldUnsubscribe An optional check to see if an unsubscribe call should truly unsubscribe.\n * NOTE: This currently **ONLY** exists to support the strange behavior of {@link groupBy}, where unsubscription\n * to the resulting observable does not actually disconnect from the source if there are active subscriptions\n * to any grouped observable. (DO NOT EXPOSE OR USE EXTERNALLY!!!)\n */\n constructor(\n destination: Subscriber,\n onNext?: (value: T) => void,\n onComplete?: () => void,\n onError?: (err: any) => void,\n private onFinalize?: () => void,\n private shouldUnsubscribe?: () => boolean\n ) {\n // It's important - for performance reasons - that all of this class's\n // members are initialized and that they are always initialized in the same\n // order. This will ensure that all OperatorSubscriber instances have the\n // same hidden class in V8. This, in turn, will help keep the number of\n // hidden classes involved in property accesses within the base class as\n // low as possible. If the number of hidden classes involved exceeds four,\n // the property accesses will become megamorphic and performance penalties\n // will be incurred - i.e. inline caches won't be used.\n //\n // The reasons for ensuring all instances have the same hidden class are\n // further discussed in this blog post from Benedikt Meurer:\n // https://benediktmeurer.de/2018/03/23/impact-of-polymorphism-on-component-based-frameworks-like-react/\n super(destination);\n this._next = onNext\n ? function (this: OperatorSubscriber, value: T) {\n try {\n onNext(value);\n } catch (err) {\n destination.error(err);\n }\n }\n : super._next;\n this._error = onError\n ? function (this: OperatorSubscriber, err: any) {\n try {\n onError(err);\n } catch (err) {\n // Send any errors that occur down stream.\n destination.error(err);\n } finally {\n // Ensure finalization.\n this.unsubscribe();\n }\n }\n : super._error;\n this._complete = onComplete\n ? function (this: OperatorSubscriber) {\n try {\n onComplete();\n } catch (err) {\n // Send any errors that occur down stream.\n destination.error(err);\n } finally {\n // Ensure finalization.\n this.unsubscribe();\n }\n }\n : super._complete;\n }\n\n unsubscribe() {\n if (!this.shouldUnsubscribe || this.shouldUnsubscribe()) {\n const { closed } = this;\n super.unsubscribe();\n // Execute additional teardown if we have any and we didn't already do so.\n !closed && this.onFinalize?.();\n }\n }\n}\n", "import { Subscription } from '../Subscription';\n\ninterface AnimationFrameProvider {\n schedule(callback: FrameRequestCallback): Subscription;\n requestAnimationFrame: typeof requestAnimationFrame;\n cancelAnimationFrame: typeof cancelAnimationFrame;\n delegate:\n | {\n requestAnimationFrame: typeof requestAnimationFrame;\n cancelAnimationFrame: typeof cancelAnimationFrame;\n }\n | undefined;\n}\n\nexport const animationFrameProvider: AnimationFrameProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n schedule(callback) {\n let request = requestAnimationFrame;\n let cancel: typeof cancelAnimationFrame | undefined = cancelAnimationFrame;\n const { delegate } = animationFrameProvider;\n if (delegate) {\n request = delegate.requestAnimationFrame;\n cancel = delegate.cancelAnimationFrame;\n }\n const handle = request((timestamp) => {\n // Clear the cancel function. The request has been fulfilled, so\n // attempting to cancel the request upon unsubscription would be\n // pointless.\n cancel = undefined;\n callback(timestamp);\n });\n return new Subscription(() => cancel?.(handle));\n },\n requestAnimationFrame(...args) {\n const { delegate } = animationFrameProvider;\n return (delegate?.requestAnimationFrame || requestAnimationFrame)(...args);\n },\n cancelAnimationFrame(...args) {\n const { delegate } = animationFrameProvider;\n return (delegate?.cancelAnimationFrame || cancelAnimationFrame)(...args);\n },\n delegate: undefined,\n};\n", "import { createErrorClass } from './createErrorClass';\n\nexport interface ObjectUnsubscribedError extends Error {}\n\nexport interface ObjectUnsubscribedErrorCtor {\n /**\n * @deprecated Internal implementation detail. Do not construct error instances.\n * Cannot be tagged as internal: https://github.com/ReactiveX/rxjs/issues/6269\n */\n new (): ObjectUnsubscribedError;\n}\n\n/**\n * An error thrown when an action is invalid because the object has been\n * unsubscribed.\n *\n * @see {@link Subject}\n * @see {@link BehaviorSubject}\n *\n * @class ObjectUnsubscribedError\n */\nexport const ObjectUnsubscribedError: ObjectUnsubscribedErrorCtor = createErrorClass(\n (_super) =>\n function ObjectUnsubscribedErrorImpl(this: any) {\n _super(this);\n this.name = 'ObjectUnsubscribedError';\n this.message = 'object unsubscribed';\n }\n);\n", "import { Operator } from './Operator';\nimport { Observable } from './Observable';\nimport { Subscriber } from './Subscriber';\nimport { Subscription, EMPTY_SUBSCRIPTION } from './Subscription';\nimport { Observer, SubscriptionLike, TeardownLogic } from './types';\nimport { ObjectUnsubscribedError } from './util/ObjectUnsubscribedError';\nimport { arrRemove } from './util/arrRemove';\nimport { errorContext } from './util/errorContext';\n\n/**\n * A Subject is a special type of Observable that allows values to be\n * multicasted to many Observers. Subjects are like EventEmitters.\n *\n * Every Subject is an Observable and an Observer. You can subscribe to a\n * Subject, and you can call next to feed values as well as error and complete.\n */\nexport class Subject extends Observable implements SubscriptionLike {\n closed = false;\n\n private currentObservers: Observer[] | null = null;\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n observers: Observer[] = [];\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n isStopped = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n hasError = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n thrownError: any = null;\n\n /**\n * Creates a \"subject\" by basically gluing an observer to an observable.\n *\n * @nocollapse\n * @deprecated Recommended you do not use. Will be removed at some point in the future. Plans for replacement still under discussion.\n */\n static create: (...args: any[]) => any = (destination: Observer, source: Observable): AnonymousSubject => {\n return new AnonymousSubject(destination, source);\n };\n\n constructor() {\n // NOTE: This must be here to obscure Observable's constructor.\n super();\n }\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n lift(operator: Operator): Observable {\n const subject = new AnonymousSubject(this, this);\n subject.operator = operator as any;\n return subject as any;\n }\n\n /** @internal */\n protected _throwIfClosed() {\n if (this.closed) {\n throw new ObjectUnsubscribedError();\n }\n }\n\n next(value: T) {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n if (!this.currentObservers) {\n this.currentObservers = Array.from(this.observers);\n }\n for (const observer of this.currentObservers) {\n observer.next(value);\n }\n }\n });\n }\n\n error(err: any) {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n this.hasError = this.isStopped = true;\n this.thrownError = err;\n const { observers } = this;\n while (observers.length) {\n observers.shift()!.error(err);\n }\n }\n });\n }\n\n complete() {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n this.isStopped = true;\n const { observers } = this;\n while (observers.length) {\n observers.shift()!.complete();\n }\n }\n });\n }\n\n unsubscribe() {\n this.isStopped = this.closed = true;\n this.observers = this.currentObservers = null!;\n }\n\n get observed() {\n return this.observers?.length > 0;\n }\n\n /** @internal */\n protected _trySubscribe(subscriber: Subscriber): TeardownLogic {\n this._throwIfClosed();\n return super._trySubscribe(subscriber);\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n this._throwIfClosed();\n this._checkFinalizedStatuses(subscriber);\n return this._innerSubscribe(subscriber);\n }\n\n /** @internal */\n protected _innerSubscribe(subscriber: Subscriber) {\n const { hasError, isStopped, observers } = this;\n if (hasError || isStopped) {\n return EMPTY_SUBSCRIPTION;\n }\n this.currentObservers = null;\n observers.push(subscriber);\n return new Subscription(() => {\n this.currentObservers = null;\n arrRemove(observers, subscriber);\n });\n }\n\n /** @internal */\n protected _checkFinalizedStatuses(subscriber: Subscriber) {\n const { hasError, thrownError, isStopped } = this;\n if (hasError) {\n subscriber.error(thrownError);\n } else if (isStopped) {\n subscriber.complete();\n }\n }\n\n /**\n * Creates a new Observable with this Subject as the source. You can do this\n * to create custom Observer-side logic of the Subject and conceal it from\n * code that uses the Observable.\n * @return {Observable} Observable that the Subject casts to\n */\n asObservable(): Observable {\n const observable: any = new Observable();\n observable.source = this;\n return observable;\n }\n}\n\n/**\n * @class AnonymousSubject\n */\nexport class AnonymousSubject extends Subject {\n constructor(\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n public destination?: Observer,\n source?: Observable\n ) {\n super();\n this.source = source;\n }\n\n next(value: T) {\n this.destination?.next?.(value);\n }\n\n error(err: any) {\n this.destination?.error?.(err);\n }\n\n complete() {\n this.destination?.complete?.();\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n return this.source?.subscribe(subscriber) ?? EMPTY_SUBSCRIPTION;\n }\n}\n", "import { TimestampProvider } from '../types';\n\ninterface DateTimestampProvider extends TimestampProvider {\n delegate: TimestampProvider | undefined;\n}\n\nexport const dateTimestampProvider: DateTimestampProvider = {\n now() {\n // Use the variable rather than `this` so that the function can be called\n // without being bound to the provider.\n return (dateTimestampProvider.delegate || Date).now();\n },\n delegate: undefined,\n};\n", "import { Subject } from './Subject';\nimport { TimestampProvider } from './types';\nimport { Subscriber } from './Subscriber';\nimport { Subscription } from './Subscription';\nimport { dateTimestampProvider } from './scheduler/dateTimestampProvider';\n\n/**\n * A variant of {@link Subject} that \"replays\" old values to new subscribers by emitting them when they first subscribe.\n *\n * `ReplaySubject` has an internal buffer that will store a specified number of values that it has observed. Like `Subject`,\n * `ReplaySubject` \"observes\" values by having them passed to its `next` method. When it observes a value, it will store that\n * value for a time determined by the configuration of the `ReplaySubject`, as passed to its constructor.\n *\n * When a new subscriber subscribes to the `ReplaySubject` instance, it will synchronously emit all values in its buffer in\n * a First-In-First-Out (FIFO) manner. The `ReplaySubject` will also complete, if it has observed completion; and it will\n * error if it has observed an error.\n *\n * There are two main configuration items to be concerned with:\n *\n * 1. `bufferSize` - This will determine how many items are stored in the buffer, defaults to infinite.\n * 2. `windowTime` - The amount of time to hold a value in the buffer before removing it from the buffer.\n *\n * Both configurations may exist simultaneously. So if you would like to buffer a maximum of 3 values, as long as the values\n * are less than 2 seconds old, you could do so with a `new ReplaySubject(3, 2000)`.\n *\n * ### Differences with BehaviorSubject\n *\n * `BehaviorSubject` is similar to `new ReplaySubject(1)`, with a couple of exceptions:\n *\n * 1. `BehaviorSubject` comes \"primed\" with a single value upon construction.\n * 2. `ReplaySubject` will replay values, even after observing an error, where `BehaviorSubject` will not.\n *\n * @see {@link Subject}\n * @see {@link BehaviorSubject}\n * @see {@link shareReplay}\n */\nexport class ReplaySubject extends Subject {\n private _buffer: (T | number)[] = [];\n private _infiniteTimeWindow = true;\n\n /**\n * @param bufferSize The size of the buffer to replay on subscription\n * @param windowTime The amount of time the buffered items will stay buffered\n * @param timestampProvider An object with a `now()` method that provides the current timestamp. This is used to\n * calculate the amount of time something has been buffered.\n */\n constructor(\n private _bufferSize = Infinity,\n private _windowTime = Infinity,\n private _timestampProvider: TimestampProvider = dateTimestampProvider\n ) {\n super();\n this._infiniteTimeWindow = _windowTime === Infinity;\n this._bufferSize = Math.max(1, _bufferSize);\n this._windowTime = Math.max(1, _windowTime);\n }\n\n next(value: T): void {\n const { isStopped, _buffer, _infiniteTimeWindow, _timestampProvider, _windowTime } = this;\n if (!isStopped) {\n _buffer.push(value);\n !_infiniteTimeWindow && _buffer.push(_timestampProvider.now() + _windowTime);\n }\n this._trimBuffer();\n super.next(value);\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n this._throwIfClosed();\n this._trimBuffer();\n\n const subscription = this._innerSubscribe(subscriber);\n\n const { _infiniteTimeWindow, _buffer } = this;\n // We use a copy here, so reentrant code does not mutate our array while we're\n // emitting it to a new subscriber.\n const copy = _buffer.slice();\n for (let i = 0; i < copy.length && !subscriber.closed; i += _infiniteTimeWindow ? 1 : 2) {\n subscriber.next(copy[i] as T);\n }\n\n this._checkFinalizedStatuses(subscriber);\n\n return subscription;\n }\n\n private _trimBuffer() {\n const { _bufferSize, _timestampProvider, _buffer, _infiniteTimeWindow } = this;\n // If we don't have an infinite buffer size, and we're over the length,\n // use splice to truncate the old buffer values off. Note that we have to\n // double the size for instances where we're not using an infinite time window\n // because we're storing the values and the timestamps in the same array.\n const adjustedBufferSize = (_infiniteTimeWindow ? 1 : 2) * _bufferSize;\n _bufferSize < Infinity && adjustedBufferSize < _buffer.length && _buffer.splice(0, _buffer.length - adjustedBufferSize);\n\n // Now, if we're not in an infinite time window, remove all values where the time is\n // older than what is allowed.\n if (!_infiniteTimeWindow) {\n const now = _timestampProvider.now();\n let last = 0;\n // Search the array for the first timestamp that isn't expired and\n // truncate the buffer up to that point.\n for (let i = 1; i < _buffer.length && (_buffer[i] as number) <= now; i += 2) {\n last = i;\n }\n last && _buffer.splice(0, last + 1);\n }\n }\n}\n", "import { Scheduler } from '../Scheduler';\nimport { Subscription } from '../Subscription';\nimport { SchedulerAction } from '../types';\n\n/**\n * A unit of work to be executed in a `scheduler`. An action is typically\n * created from within a {@link SchedulerLike} and an RxJS user does not need to concern\n * themselves about creating and manipulating an Action.\n *\n * ```ts\n * class Action extends Subscription {\n * new (scheduler: Scheduler, work: (state?: T) => void);\n * schedule(state?: T, delay: number = 0): Subscription;\n * }\n * ```\n *\n * @class Action\n */\nexport class Action extends Subscription {\n constructor(scheduler: Scheduler, work: (this: SchedulerAction, state?: T) => void) {\n super();\n }\n /**\n * Schedules this action on its parent {@link SchedulerLike} for execution. May be passed\n * some context object, `state`. May happen at some point in the future,\n * according to the `delay` parameter, if specified.\n * @param {T} [state] Some contextual data that the `work` function uses when\n * called by the Scheduler.\n * @param {number} [delay] Time to wait before executing the work, where the\n * time unit is implicit and defined by the Scheduler.\n * @return {void}\n */\n public schedule(state?: T, delay: number = 0): Subscription {\n return this;\n }\n}\n", "import type { TimerHandle } from './timerHandle';\ntype SetIntervalFunction = (handler: () => void, timeout?: number, ...args: any[]) => TimerHandle;\ntype ClearIntervalFunction = (handle: TimerHandle) => void;\n\ninterface IntervalProvider {\n setInterval: SetIntervalFunction;\n clearInterval: ClearIntervalFunction;\n delegate:\n | {\n setInterval: SetIntervalFunction;\n clearInterval: ClearIntervalFunction;\n }\n | undefined;\n}\n\nexport const intervalProvider: IntervalProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n setInterval(handler: () => void, timeout?: number, ...args) {\n const { delegate } = intervalProvider;\n if (delegate?.setInterval) {\n return delegate.setInterval(handler, timeout, ...args);\n }\n return setInterval(handler, timeout, ...args);\n },\n clearInterval(handle) {\n const { delegate } = intervalProvider;\n return (delegate?.clearInterval || clearInterval)(handle as any);\n },\n delegate: undefined,\n};\n", "import { Action } from './Action';\nimport { SchedulerAction } from '../types';\nimport { Subscription } from '../Subscription';\nimport { AsyncScheduler } from './AsyncScheduler';\nimport { intervalProvider } from './intervalProvider';\nimport { arrRemove } from '../util/arrRemove';\nimport { TimerHandle } from './timerHandle';\n\nexport class AsyncAction extends Action {\n public id: TimerHandle | undefined;\n public state?: T;\n // @ts-ignore: Property has no initializer and is not definitely assigned\n public delay: number;\n protected pending: boolean = false;\n\n constructor(protected scheduler: AsyncScheduler, protected work: (this: SchedulerAction, state?: T) => void) {\n super(scheduler, work);\n }\n\n public schedule(state?: T, delay: number = 0): Subscription {\n if (this.closed) {\n return this;\n }\n\n // Always replace the current state with the new state.\n this.state = state;\n\n const id = this.id;\n const scheduler = this.scheduler;\n\n //\n // Important implementation note:\n //\n // Actions only execute once by default, unless rescheduled from within the\n // scheduled callback. This allows us to implement single and repeat\n // actions via the same code path, without adding API surface area, as well\n // as mimic traditional recursion but across asynchronous boundaries.\n //\n // However, JS runtimes and timers distinguish between intervals achieved by\n // serial `setTimeout` calls vs. a single `setInterval` call. An interval of\n // serial `setTimeout` calls can be individually delayed, which delays\n // scheduling the next `setTimeout`, and so on. `setInterval` attempts to\n // guarantee the interval callback will be invoked more precisely to the\n // interval period, regardless of load.\n //\n // Therefore, we use `setInterval` to schedule single and repeat actions.\n // If the action reschedules itself with the same delay, the interval is not\n // canceled. If the action doesn't reschedule, or reschedules with a\n // different delay, the interval will be canceled after scheduled callback\n // execution.\n //\n if (id != null) {\n this.id = this.recycleAsyncId(scheduler, id, delay);\n }\n\n // Set the pending flag indicating that this action has been scheduled, or\n // has recursively rescheduled itself.\n this.pending = true;\n\n this.delay = delay;\n // If this action has already an async Id, don't request a new one.\n this.id = this.id ?? this.requestAsyncId(scheduler, this.id, delay);\n\n return this;\n }\n\n protected requestAsyncId(scheduler: AsyncScheduler, _id?: TimerHandle, delay: number = 0): TimerHandle {\n return intervalProvider.setInterval(scheduler.flush.bind(scheduler, this), delay);\n }\n\n protected recycleAsyncId(_scheduler: AsyncScheduler, id?: TimerHandle, delay: number | null = 0): TimerHandle | undefined {\n // If this action is rescheduled with the same delay time, don't clear the interval id.\n if (delay != null && this.delay === delay && this.pending === false) {\n return id;\n }\n // Otherwise, if the action's delay time is different from the current delay,\n // or the action has been rescheduled before it's executed, clear the interval id\n if (id != null) {\n intervalProvider.clearInterval(id);\n }\n\n return undefined;\n }\n\n /**\n * Immediately executes this action and the `work` it contains.\n * @return {any}\n */\n public execute(state: T, delay: number): any {\n if (this.closed) {\n return new Error('executing a cancelled action');\n }\n\n this.pending = false;\n const error = this._execute(state, delay);\n if (error) {\n return error;\n } else if (this.pending === false && this.id != null) {\n // Dequeue if the action didn't reschedule itself. Don't call\n // unsubscribe(), because the action could reschedule later.\n // For example:\n // ```\n // scheduler.schedule(function doWork(counter) {\n // /* ... I'm a busy worker bee ... */\n // var originalAction = this;\n // /* wait 100ms before rescheduling the action */\n // setTimeout(function () {\n // originalAction.schedule(counter + 1);\n // }, 100);\n // }, 1000);\n // ```\n this.id = this.recycleAsyncId(this.scheduler, this.id, null);\n }\n }\n\n protected _execute(state: T, _delay: number): any {\n let errored: boolean = false;\n let errorValue: any;\n try {\n this.work(state);\n } catch (e) {\n errored = true;\n // HACK: Since code elsewhere is relying on the \"truthiness\" of the\n // return here, we can't have it return \"\" or 0 or false.\n // TODO: Clean this up when we refactor schedulers mid-version-8 or so.\n errorValue = e ? e : new Error('Scheduled action threw falsy error');\n }\n if (errored) {\n this.unsubscribe();\n return errorValue;\n }\n }\n\n unsubscribe() {\n if (!this.closed) {\n const { id, scheduler } = this;\n const { actions } = scheduler;\n\n this.work = this.state = this.scheduler = null!;\n this.pending = false;\n\n arrRemove(actions, this);\n if (id != null) {\n this.id = this.recycleAsyncId(scheduler, id, null);\n }\n\n this.delay = null!;\n super.unsubscribe();\n }\n }\n}\n", "import { Action } from './scheduler/Action';\nimport { Subscription } from './Subscription';\nimport { SchedulerLike, SchedulerAction } from './types';\nimport { dateTimestampProvider } from './scheduler/dateTimestampProvider';\n\n/**\n * An execution context and a data structure to order tasks and schedule their\n * execution. Provides a notion of (potentially virtual) time, through the\n * `now()` getter method.\n *\n * Each unit of work in a Scheduler is called an `Action`.\n *\n * ```ts\n * class Scheduler {\n * now(): number;\n * schedule(work, delay?, state?): Subscription;\n * }\n * ```\n *\n * @class Scheduler\n * @deprecated Scheduler is an internal implementation detail of RxJS, and\n * should not be used directly. Rather, create your own class and implement\n * {@link SchedulerLike}. Will be made internal in v8.\n */\nexport class Scheduler implements SchedulerLike {\n public static now: () => number = dateTimestampProvider.now;\n\n constructor(private schedulerActionCtor: typeof Action, now: () => number = Scheduler.now) {\n this.now = now;\n }\n\n /**\n * A getter method that returns a number representing the current time\n * (at the time this function was called) according to the scheduler's own\n * internal clock.\n * @return {number} A number that represents the current time. May or may not\n * have a relation to wall-clock time. May or may not refer to a time unit\n * (e.g. milliseconds).\n */\n public now: () => number;\n\n /**\n * Schedules a function, `work`, for execution. May happen at some point in\n * the future, according to the `delay` parameter, if specified. May be passed\n * some context object, `state`, which will be passed to the `work` function.\n *\n * The given arguments will be processed an stored as an Action object in a\n * queue of actions.\n *\n * @param {function(state: ?T): ?Subscription} work A function representing a\n * task, or some unit of work to be executed by the Scheduler.\n * @param {number} [delay] Time to wait before executing the work, where the\n * time unit is implicit and defined by the Scheduler itself.\n * @param {T} [state] Some contextual data that the `work` function uses when\n * called by the Scheduler.\n * @return {Subscription} A subscription in order to be able to unsubscribe\n * the scheduled work.\n */\n public schedule(work: (this: SchedulerAction, state?: T) => void, delay: number = 0, state?: T): Subscription {\n return new this.schedulerActionCtor(this, work).schedule(state, delay);\n }\n}\n", "import { Scheduler } from '../Scheduler';\nimport { Action } from './Action';\nimport { AsyncAction } from './AsyncAction';\nimport { TimerHandle } from './timerHandle';\n\nexport class AsyncScheduler extends Scheduler {\n public actions: Array> = [];\n /**\n * A flag to indicate whether the Scheduler is currently executing a batch of\n * queued actions.\n * @type {boolean}\n * @internal\n */\n public _active: boolean = false;\n /**\n * An internal ID used to track the latest asynchronous task such as those\n * coming from `setTimeout`, `setInterval`, `requestAnimationFrame`, and\n * others.\n * @type {any}\n * @internal\n */\n public _scheduled: TimerHandle | undefined;\n\n constructor(SchedulerAction: typeof Action, now: () => number = Scheduler.now) {\n super(SchedulerAction, now);\n }\n\n public flush(action: AsyncAction): void {\n const { actions } = this;\n\n if (this._active) {\n actions.push(action);\n return;\n }\n\n let error: any;\n this._active = true;\n\n do {\n if ((error = action.execute(action.state, action.delay))) {\n break;\n }\n } while ((action = actions.shift()!)); // exhaust the scheduler queue\n\n this._active = false;\n\n if (error) {\n while ((action = actions.shift()!)) {\n action.unsubscribe();\n }\n throw error;\n }\n }\n}\n", "import { AsyncAction } from './AsyncAction';\nimport { AsyncScheduler } from './AsyncScheduler';\n\n/**\n *\n * Async Scheduler\n *\n * Schedule task as if you used setTimeout(task, duration)\n *\n * `async` scheduler schedules tasks asynchronously, by putting them on the JavaScript\n * event loop queue. It is best used to delay tasks in time or to schedule tasks repeating\n * in intervals.\n *\n * If you just want to \"defer\" task, that is to perform it right after currently\n * executing synchronous code ends (commonly achieved by `setTimeout(deferredTask, 0)`),\n * better choice will be the {@link asapScheduler} scheduler.\n *\n * ## Examples\n * Use async scheduler to delay task\n * ```ts\n * import { asyncScheduler } from 'rxjs';\n *\n * const task = () => console.log('it works!');\n *\n * asyncScheduler.schedule(task, 2000);\n *\n * // After 2 seconds logs:\n * // \"it works!\"\n * ```\n *\n * Use async scheduler to repeat task in intervals\n * ```ts\n * import { asyncScheduler } from 'rxjs';\n *\n * function task(state) {\n * console.log(state);\n * this.schedule(state + 1, 1000); // `this` references currently executing Action,\n * // which we reschedule with new state and delay\n * }\n *\n * asyncScheduler.schedule(task, 3000, 0);\n *\n * // Logs:\n * // 0 after 3s\n * // 1 after 4s\n * // 2 after 5s\n * // 3 after 6s\n * ```\n */\n\nexport const asyncScheduler = new AsyncScheduler(AsyncAction);\n\n/**\n * @deprecated Renamed to {@link asyncScheduler}. Will be removed in v8.\n */\nexport const async = asyncScheduler;\n", "import { AsyncAction } from './AsyncAction';\nimport { AnimationFrameScheduler } from './AnimationFrameScheduler';\nimport { SchedulerAction } from '../types';\nimport { animationFrameProvider } from './animationFrameProvider';\nimport { TimerHandle } from './timerHandle';\n\nexport class AnimationFrameAction extends AsyncAction {\n constructor(protected scheduler: AnimationFrameScheduler, protected work: (this: SchedulerAction, state?: T) => void) {\n super(scheduler, work);\n }\n\n protected requestAsyncId(scheduler: AnimationFrameScheduler, id?: TimerHandle, delay: number = 0): TimerHandle {\n // If delay is greater than 0, request as an async action.\n if (delay !== null && delay > 0) {\n return super.requestAsyncId(scheduler, id, delay);\n }\n // Push the action to the end of the scheduler queue.\n scheduler.actions.push(this);\n // If an animation frame has already been requested, don't request another\n // one. If an animation frame hasn't been requested yet, request one. Return\n // the current animation frame request id.\n return scheduler._scheduled || (scheduler._scheduled = animationFrameProvider.requestAnimationFrame(() => scheduler.flush(undefined)));\n }\n\n protected recycleAsyncId(scheduler: AnimationFrameScheduler, id?: TimerHandle, delay: number = 0): TimerHandle | undefined {\n // If delay exists and is greater than 0, or if the delay is null (the\n // action wasn't rescheduled) but was originally scheduled as an async\n // action, then recycle as an async action.\n if (delay != null ? delay > 0 : this.delay > 0) {\n return super.recycleAsyncId(scheduler, id, delay);\n }\n // If the scheduler queue has no remaining actions with the same async id,\n // cancel the requested animation frame and set the scheduled flag to\n // undefined so the next AnimationFrameAction will request its own.\n const { actions } = scheduler;\n if (id != null && actions[actions.length - 1]?.id !== id) {\n animationFrameProvider.cancelAnimationFrame(id as number);\n scheduler._scheduled = undefined;\n }\n // Return undefined so the action knows to request a new async id if it's rescheduled.\n return undefined;\n }\n}\n", "import { AsyncAction } from './AsyncAction';\nimport { AsyncScheduler } from './AsyncScheduler';\n\nexport class AnimationFrameScheduler extends AsyncScheduler {\n public flush(action?: AsyncAction): void {\n this._active = true;\n // The async id that effects a call to flush is stored in _scheduled.\n // Before executing an action, it's necessary to check the action's async\n // id to determine whether it's supposed to be executed in the current\n // flush.\n // Previous implementations of this method used a count to determine this,\n // but that was unsound, as actions that are unsubscribed - i.e. cancelled -\n // are removed from the actions array and that can shift actions that are\n // scheduled to be executed in a subsequent flush into positions at which\n // they are executed within the current flush.\n const flushId = this._scheduled;\n this._scheduled = undefined;\n\n const { actions } = this;\n let error: any;\n action = action || actions.shift()!;\n\n do {\n if ((error = action.execute(action.state, action.delay))) {\n break;\n }\n } while ((action = actions[0]) && action.id === flushId && actions.shift());\n\n this._active = false;\n\n if (error) {\n while ((action = actions[0]) && action.id === flushId && actions.shift()) {\n action.unsubscribe();\n }\n throw error;\n }\n }\n}\n", "import { AnimationFrameAction } from './AnimationFrameAction';\nimport { AnimationFrameScheduler } from './AnimationFrameScheduler';\n\n/**\n *\n * Animation Frame Scheduler\n *\n * Perform task when `window.requestAnimationFrame` would fire\n *\n * When `animationFrame` scheduler is used with delay, it will fall back to {@link asyncScheduler} scheduler\n * behaviour.\n *\n * Without delay, `animationFrame` scheduler can be used to create smooth browser animations.\n * It makes sure scheduled task will happen just before next browser content repaint,\n * thus performing animations as efficiently as possible.\n *\n * ## Example\n * Schedule div height animation\n * ```ts\n * // html:
\n * import { animationFrameScheduler } from 'rxjs';\n *\n * const div = document.querySelector('div');\n *\n * animationFrameScheduler.schedule(function(height) {\n * div.style.height = height + \"px\";\n *\n * this.schedule(height + 1); // `this` references currently executing Action,\n * // which we reschedule with new state\n * }, 0, 0);\n *\n * // You will see a div element growing in height\n * ```\n */\n\nexport const animationFrameScheduler = new AnimationFrameScheduler(AnimationFrameAction);\n\n/**\n * @deprecated Renamed to {@link animationFrameScheduler}. Will be removed in v8.\n */\nexport const animationFrame = animationFrameScheduler;\n", "import { Observable } from '../Observable';\nimport { SchedulerLike } from '../types';\n\n/**\n * A simple Observable that emits no items to the Observer and immediately\n * emits a complete notification.\n *\n * Just emits 'complete', and nothing else.\n *\n * ![](empty.png)\n *\n * A simple Observable that only emits the complete notification. It can be used\n * for composing with other Observables, such as in a {@link mergeMap}.\n *\n * ## Examples\n *\n * Log complete notification\n *\n * ```ts\n * import { EMPTY } from 'rxjs';\n *\n * EMPTY.subscribe({\n * next: () => console.log('Next'),\n * complete: () => console.log('Complete!')\n * });\n *\n * // Outputs\n * // Complete!\n * ```\n *\n * Emit the number 7, then complete\n *\n * ```ts\n * import { EMPTY, startWith } from 'rxjs';\n *\n * const result = EMPTY.pipe(startWith(7));\n * result.subscribe(x => console.log(x));\n *\n * // Outputs\n * // 7\n * ```\n *\n * Map and flatten only odd numbers to the sequence `'a'`, `'b'`, `'c'`\n *\n * ```ts\n * import { interval, mergeMap, of, EMPTY } from 'rxjs';\n *\n * const interval$ = interval(1000);\n * const result = interval$.pipe(\n * mergeMap(x => x % 2 === 1 ? of('a', 'b', 'c') : EMPTY),\n * );\n * result.subscribe(x => console.log(x));\n *\n * // Results in the following to the console:\n * // x is equal to the count on the interval, e.g. (0, 1, 2, 3, ...)\n * // x will occur every 1000ms\n * // if x % 2 is equal to 1, print a, b, c (each on its own)\n * // if x % 2 is not equal to 1, nothing will be output\n * ```\n *\n * @see {@link Observable}\n * @see {@link NEVER}\n * @see {@link of}\n * @see {@link throwError}\n */\nexport const EMPTY = new Observable((subscriber) => subscriber.complete());\n\n/**\n * @param scheduler A {@link SchedulerLike} to use for scheduling\n * the emission of the complete notification.\n * @deprecated Replaced with the {@link EMPTY} constant or {@link scheduled} (e.g. `scheduled([], scheduler)`). Will be removed in v8.\n */\nexport function empty(scheduler?: SchedulerLike) {\n return scheduler ? emptyScheduled(scheduler) : EMPTY;\n}\n\nfunction emptyScheduled(scheduler: SchedulerLike) {\n return new Observable((subscriber) => scheduler.schedule(() => subscriber.complete()));\n}\n", "import { SchedulerLike } from '../types';\nimport { isFunction } from './isFunction';\n\nexport function isScheduler(value: any): value is SchedulerLike {\n return value && isFunction(value.schedule);\n}\n", "import { SchedulerLike } from '../types';\nimport { isFunction } from './isFunction';\nimport { isScheduler } from './isScheduler';\n\nfunction last(arr: T[]): T | undefined {\n return arr[arr.length - 1];\n}\n\nexport function popResultSelector(args: any[]): ((...args: unknown[]) => unknown) | undefined {\n return isFunction(last(args)) ? args.pop() : undefined;\n}\n\nexport function popScheduler(args: any[]): SchedulerLike | undefined {\n return isScheduler(last(args)) ? args.pop() : undefined;\n}\n\nexport function popNumber(args: any[], defaultValue: number): number {\n return typeof last(args) === 'number' ? args.pop()! : defaultValue;\n}\n", "export const isArrayLike = ((x: any): x is ArrayLike => x && typeof x.length === 'number' && typeof x !== 'function');", "import { isFunction } from \"./isFunction\";\n\n/**\n * Tests to see if the object is \"thennable\".\n * @param value the object to test\n */\nexport function isPromise(value: any): value is PromiseLike {\n return isFunction(value?.then);\n}\n", "import { InteropObservable } from '../types';\nimport { observable as Symbol_observable } from '../symbol/observable';\nimport { isFunction } from './isFunction';\n\n/** Identifies an input as being Observable (but not necessary an Rx Observable) */\nexport function isInteropObservable(input: any): input is InteropObservable {\n return isFunction(input[Symbol_observable]);\n}\n", "import { isFunction } from './isFunction';\n\nexport function isAsyncIterable(obj: any): obj is AsyncIterable {\n return Symbol.asyncIterator && isFunction(obj?.[Symbol.asyncIterator]);\n}\n", "/**\n * Creates the TypeError to throw if an invalid object is passed to `from` or `scheduled`.\n * @param input The object that was passed.\n */\nexport function createInvalidObservableTypeError(input: any) {\n // TODO: We should create error codes that can be looked up, so this can be less verbose.\n return new TypeError(\n `You provided ${\n input !== null && typeof input === 'object' ? 'an invalid object' : `'${input}'`\n } where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.`\n );\n}\n", "export function getSymbolIterator(): symbol {\n if (typeof Symbol !== 'function' || !Symbol.iterator) {\n return '@@iterator' as any;\n }\n\n return Symbol.iterator;\n}\n\nexport const iterator = getSymbolIterator();\n", "import { iterator as Symbol_iterator } from '../symbol/iterator';\nimport { isFunction } from './isFunction';\n\n/** Identifies an input as being an Iterable */\nexport function isIterable(input: any): input is Iterable {\n return isFunction(input?.[Symbol_iterator]);\n}\n", "import { ReadableStreamLike } from '../types';\nimport { isFunction } from './isFunction';\n\nexport async function* readableStreamLikeToAsyncGenerator(readableStream: ReadableStreamLike): AsyncGenerator {\n const reader = readableStream.getReader();\n try {\n while (true) {\n const { value, done } = await reader.read();\n if (done) {\n return;\n }\n yield value!;\n }\n } finally {\n reader.releaseLock();\n }\n}\n\nexport function isReadableStreamLike(obj: any): obj is ReadableStreamLike {\n // We don't want to use instanceof checks because they would return\n // false for instances from another Realm, like an