diff --git a/packages/common-library/src/common_library/serialization.py b/packages/common-library/src/common_library/serialization.py index 787e88123eb..964dfc01ef8 100644 --- a/packages/common-library/src/common_library/serialization.py +++ b/packages/common-library/src/common_library/serialization.py @@ -1,13 +1,14 @@ from datetime import timedelta from typing import Any -from common_library.pydantic_fields_extension import get_type from pydantic import BaseModel, SecretStr from pydantic_core import Url +from .pydantic_fields_extension import get_type + def model_dump_with_secrets( - settings_obj: BaseModel, show_secrets: bool, **pydantic_export_options + settings_obj: BaseModel, *, show_secrets: bool, **pydantic_export_options ) -> dict[str, Any]: data = settings_obj.model_dump(**pydantic_export_options) @@ -25,16 +26,16 @@ def model_dump_with_secrets( data[field_name] = field_data.get_secret_value() else: data[field_name] = str(field_data) - + elif isinstance(field_data, Url): data[field_name] = str(field_data) - + elif isinstance(field_data, dict): field_type = get_type(settings_obj.model_fields[field_name]) if issubclass(field_type, BaseModel): data[field_name] = model_dump_with_secrets( field_type.model_validate(field_data), - show_secrets, + show_secrets=show_secrets, **pydantic_export_options, ) diff --git a/packages/settings-library/src/settings_library/utils_cli.py b/packages/settings-library/src/settings_library/utils_cli.py index 073a8ae2d5e..0311ed28d7b 100644 --- a/packages/settings-library/src/settings_library/utils_cli.py +++ b/packages/settings-library/src/settings_library/utils_cli.py @@ -8,7 +8,9 @@ import rich import typer from common_library.serialization import model_dump_with_secrets +from models_library.utils.json_serialization import json_dumps from pydantic import ValidationError +from pydantic_core import to_jsonable_python from pydantic_settings import BaseSettings from ._constants import HEADER_STR @@ -87,7 +89,7 @@ def print_as_json( def create_settings_command( settings_cls: type[BaseCustomSettings], logger: logging.Logger | None = None, - json_serializer=json.dumps, + json_serializer=json_dumps, ) -> Callable: """Creates typer command function for settings""" @@ -112,14 +114,24 @@ def settings( """Resolves settings and prints envfile""" if as_json_schema: - typer.echo(settings_cls.schema_json(indent=0 if compact else 2)) + typer.echo( + json.dumps( + settings_cls.model_json_schema(), + default=to_jsonable_python, + indent=0 if compact else 2, + ) + ) return try: settings_obj = settings_cls.create_from_envs() except ValidationError as err: - settings_schema = settings_cls.schema_json(indent=2) + settings_schema = json.dumps( + settings_cls.model_json_schema(), + default=to_jsonable_python, + indent=2, + ) assert logger is not None # nosec logger.error( # noqa: TRY400 diff --git a/services/dask-sidecar/requirements/_base.in b/services/dask-sidecar/requirements/_base.in index 3bb1150d8e1..2352652e4a0 100644 --- a/services/dask-sidecar/requirements/_base.in +++ b/services/dask-sidecar/requirements/_base.in @@ -25,6 +25,6 @@ dask[distributed, diagnostics] dask-gateway # needed for the osparc-dask-gateway to preload the module fsspec[http, s3] # sub types needed as we acces http and s3 here lz4 # for compression -pydantic[email,dotenv] +pydantic prometheus_client repro-zipfile diff --git a/services/dask-sidecar/requirements/_base.txt b/services/dask-sidecar/requirements/_base.txt index 6d776591b15..9c36e1d2e6e 100644 --- a/services/dask-sidecar/requirements/_base.txt +++ b/services/dask-sidecar/requirements/_base.txt @@ -16,13 +16,22 @@ aiofiles==23.2.1 # -r requirements/_base.in aiohttp==3.9.5 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # aiobotocore @@ -36,6 +45,8 @@ aiormq==6.8.0 # via aio-pika aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -59,13 +70,22 @@ botocore==1.34.106 # via aiobotocore certifi==2024.7.4 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # requests @@ -137,13 +157,22 @@ importlib-metadata==7.1.0 # opentelemetry-api jinja2==3.1.4 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # bokeh @@ -222,13 +251,22 @@ opentelemetry-util-http==0.47b0 # via opentelemetry-instrumentation-requests orjson==3.10.3 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in @@ -257,27 +295,62 @@ psutil==6.0.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # distributed -pydantic==1.10.15 +pydantic==2.9.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/dask-task-models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fast-depends + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.6.0 + # via + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.6.2 @@ -288,18 +361,27 @@ python-dateutil==2.9.0.post0 # botocore # pandas python-dotenv==1.0.1 - # via pydantic + # via pydantic-settings pytz==2024.1 # via pandas pyyaml==6.0.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_base.in @@ -309,13 +391,22 @@ pyyaml==6.0.1 # distributed redis==5.0.4 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_base.in @@ -384,18 +475,28 @@ typing-extensions==4.11.0 # faststream # opentelemetry-sdk # pydantic + # pydantic-core # typer tzdata==2024.1 # via pandas urllib3==2.2.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # botocore diff --git a/services/dask-sidecar/requirements/_test.txt b/services/dask-sidecar/requirements/_test.txt index 7f13a97ad89..92130c87b77 100644 --- a/services/dask-sidecar/requirements/_test.txt +++ b/services/dask-sidecar/requirements/_test.txt @@ -1,3 +1,7 @@ +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic antlr4-python3-runtime==4.13.2 # via moto attrs==23.2.0 @@ -141,11 +145,15 @@ py-partiql-parser==0.5.6 # via moto pycparser==2.22 # via cffi -pydantic==1.10.15 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic pyftpdlib==2.0.0 # via pytest-localftpserver pyopenssl==24.2.1 @@ -244,6 +252,7 @@ typing-extensions==4.11.0 # aws-sam-translator # cfn-lint # pydantic + # pydantic-core urllib3==2.2.1 # via # -c requirements/../../../requirements/constraints.txt diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py index 63e9bc97a1b..b6ae0b25611 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py @@ -71,7 +71,7 @@ async def _write_input_data( if isinstance(input_params, FileUrl): file_name = ( input_params.file_mapping - or Path(URL(input_params.url).path.strip("/")).name + or Path(URL(f"{input_params.url}").path.strip("/")).name ) destination_path = task_volumes.inputs_folder / file_name @@ -114,7 +114,7 @@ async def _retrieve_output_data( ) _logger.debug( "following outputs will be searched for:\n%s", - self.task_parameters.output_data_keys.json(indent=1), + self.task_parameters.output_data_keys.model_dump_json(indent=1), ) output_data = TaskOutputData.from_task_output( @@ -132,7 +132,7 @@ async def _retrieve_output_data( if isinstance(output_params, FileUrl): assert ( # nosec output_params.file_mapping - ), f"{output_params.json(indent=1)} expected resolved in TaskOutputData.from_task_output" + ), f"{output_params.model_dump_json(indent=1)} expected resolved in TaskOutputData.from_task_output" src_path = task_volumes.outputs_folder / output_params.file_mapping upload_tasks.append( @@ -146,7 +146,9 @@ async def _retrieve_output_data( await asyncio.gather(*upload_tasks) await self._publish_sidecar_log("All the output data were uploaded.") - _logger.info("retrieved outputs data:\n%s", output_data.json(indent=1)) + _logger.info( + "retrieved outputs data:\n%s", output_data.model_dump_json(indent=1) + ) return output_data except (ValueError, ValidationError) as exc: diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py index 0a2d9e3e9d3..289f5df9169 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py @@ -26,7 +26,7 @@ from models_library.services_resources import BootMode from models_library.utils.labels_annotations import OSPARC_LABEL_PREFIXES, from_labels from packaging import version -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from servicelib.logging_utils import ( LogLevelInt, LogMessageStr, @@ -95,7 +95,7 @@ async def create_container_config( NanoCPUs=nano_cpus_limit, ), ) - logger.debug("Container configuration: \n%s", pformat(config.dict())) + logger.debug("Container configuration: \n%s", pformat(config.model_dump())) return config @@ -109,7 +109,7 @@ async def managed_container( logger, logging.DEBUG, msg=f"managing container {name} for {config.image}" ): container = await docker_client.containers.create( - config.dict(by_alias=True), name=name + config.model_dump(by_alias=True), name=name ) yield container except asyncio.CancelledError: @@ -443,7 +443,7 @@ async def get_image_labels( data = from_labels( image_labels, prefix_key=OSPARC_LABEL_PREFIXES[0], trim_key_head=False ) - return parse_obj_as(ImageLabels, data) + return TypeAdapter(ImageLabels).validate_python(data) return ImageLabels() diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/errors.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/errors.py index eabe5f00d03..8e5d1e8794f 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/errors.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class ComputationalSidecarRuntimeError(PydanticErrorMixin, RuntimeError): +class ComputationalSidecarRuntimeError(OsparcErrorMixin, RuntimeError): ... diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py index 691192716e9..ee270aeb888 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py @@ -3,7 +3,14 @@ from models_library.basic_regex import SIMPLE_VERSION_RE from models_library.services import ServiceMetaDataPublished from packaging import version -from pydantic import BaseModel, ByteSize, Extra, Field, validator +from pydantic import ( + BaseModel, + ByteSize, + ConfigDict, + Field, + field_validator, + model_validator, +) LEGACY_INTEGRATION_VERSION = version.Version("0") PROGRESS_REGEXP: re.Pattern[str] = re.compile( @@ -41,21 +48,15 @@ class ContainerHostConfig(BaseModel): ..., alias="NanoCPUs", description="CPU quota in units of 10-9 CPUs" ) - @validator("memory_swap", pre=True, always=True) - @classmethod - def ensure_no_memory_swap_means_no_swap(cls, v, values): - if v is None: - # if not set it will be the same value as memory to ensure swap is disabled - return values["memory"] - return v + @model_validator(mode="after") + def ensure_memory_swap_is_not_unlimited(self) -> "ContainerHostConfig": + if self.memory_swap is None: + self.memory_swap = self.memory - @validator("memory_swap") - @classmethod - def ensure_memory_swap_cannot_be_unlimited_nor_smaller_than_memory(cls, v, values): - if v < values["memory"]: + if self.memory_swap < self.memory: msg = "Memory swap cannot be set to a smaller value than memory" raise ValueError(msg) - return v + return self class DockerContainerConfig(BaseModel): @@ -71,7 +72,7 @@ class ImageLabels(BaseModel): default=str(LEGACY_INTEGRATION_VERSION), alias="integration-version", description="integration version number", - regex=SIMPLE_VERSION_RE, + pattern=SIMPLE_VERSION_RE, examples=["1.0.0"], ) progress_regexp: str = Field( @@ -79,18 +80,16 @@ class ImageLabels(BaseModel): alias="progress_regexp", description="regexp pattern for detecting computational service's progress", ) + model_config = ConfigDict(extra="ignore") - class Config: - extra = Extra.ignore - - @validator("integration_version", pre=True) + @field_validator("integration_version", mode="before") @classmethod def default_integration_version(cls, v): if v is None: return ImageLabels().integration_version return v - @validator("progress_regexp", pre=True) + @field_validator("progress_regexp", mode="before") @classmethod def default_progress_regexp(cls, v): if v is None: @@ -104,6 +103,6 @@ def get_progress_regexp(self) -> re.Pattern[str]: return re.compile(self.progress_regexp) -assert set(ImageLabels.__fields__).issubset( - ServiceMetaDataPublished.__fields__ +assert set(ImageLabels.model_fields).issubset( + ServiceMetaDataPublished.model_fields ), "ImageLabels must be compatible with ServiceDockerData" diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/dask_utils.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/dask_utils.py index e042c5c022a..d04682dac07 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/dask_utils.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/dask_utils.py @@ -153,4 +153,4 @@ async def periodicaly_check_if_aborted(task_name: str) -> None: def publish_event(dask_pub: distributed.Pub, event: BaseTaskEvent) -> None: """never reraises, only CancellationError""" with log_catch(_logger, reraise=False): - dask_pub.put(event.json()) + dask_pub.put(event.model_dump_json()) diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py index b14b5db657f..e5b78bd286a 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py @@ -13,7 +13,7 @@ import aiofiles.tempfile import fsspec # type: ignore[import-untyped] import repro_zipfile # type: ignore[import-untyped] -from pydantic import ByteSize, FileUrl, parse_obj_as +from pydantic import ByteSize, FileUrl, TypeAdapter from pydantic.networks import AnyUrl from servicelib.logging_utils import LogLevelInt, LogMessageStr from settings_library.s3 import S3Settings @@ -96,9 +96,9 @@ async def _copy_file( ): src_storage_kwargs = src_storage_cfg or {} dst_storage_kwargs = dst_storage_cfg or {} - with fsspec.open(src_url, mode="rb", **src_storage_kwargs) as src_fp, fsspec.open( - dst_url, "wb", **dst_storage_kwargs - ) as dst_fp: + with fsspec.open( + f"{src_url}", mode="rb", **src_storage_kwargs + ) as src_fp, fsspec.open(f"{dst_url}", "wb", **dst_storage_kwargs) as dst_fp: assert isinstance(src_fp, IOBase) # nosec assert isinstance(dst_fp, IOBase) # nosec file_size = getattr(src_fp, "size", None) @@ -148,7 +148,7 @@ async def pull_file_from_remote( storage_kwargs = _s3fs_settings_from_s3_settings(s3_settings) await _copy_file( src_url, - parse_obj_as(FileUrl, dst_path.as_uri()), + TypeAdapter(FileUrl).validate_python(dst_path.as_uri()), src_storage_cfg=cast(dict[str, Any], storage_kwargs), log_publishing_cb=log_publishing_cb, text_prefix=f"Downloading '{src_url.path.strip('/')}':", @@ -218,7 +218,7 @@ async def _push_file_to_remote( storage_kwargs = _s3fs_settings_from_s3_settings(s3_settings) await _copy_file( - parse_obj_as(FileUrl, file_to_upload.as_uri()), + TypeAdapter(FileUrl).validate_python(file_to_upload.as_uri()), dst_url, dst_storage_cfg=cast(dict[str, Any], storage_kwargs), log_publishing_cb=log_publishing_cb, @@ -246,7 +246,7 @@ async def push_file_to_remote( src_mime_type, _ = mimetypes.guess_type(src_path) if dst_mime_type == _ZIP_MIME_TYPE and src_mime_type != _ZIP_MIME_TYPE: - archive_file_path = Path(tmp_dir) / Path(URL(dst_url).path).name + archive_file_path = Path(tmp_dir) / Path(URL(f"{dst_url}").path).name await log_publishing_cb( f"Compressing '{src_path.name}' to '{archive_file_path.name}'...", logging.INFO, diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py index 80661c7ecb2..7073f2a4caa 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py @@ -2,7 +2,7 @@ from typing import Any from models_library.basic_types import LogLevel -from pydantic import Field, validator +from pydantic import AliasChoices, Field, field_validator from settings_library.base import BaseCustomSettings from settings_library.utils_logging import MixinLoggingSettings @@ -14,7 +14,9 @@ class Settings(BaseCustomSettings, MixinLoggingSettings): SC_BOOT_MODE: str | None = None LOG_LEVEL: LogLevel = Field( LogLevel.INFO.value, - env=["DASK_SIDECAR_LOGLEVEL", "SIDECAR_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"], + validation_alias=AliasChoices( + "DASK_SIDECAR_LOGLEVEL", "SIDECAR_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL" + ), ) # sidecar config --- @@ -37,7 +39,10 @@ class Settings(BaseCustomSettings, MixinLoggingSettings): DASK_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=["DASK_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED"], + validation_alias=AliasChoices( + "DASK_LOG_FORMAT_LOCAL_DEV_ENABLED", + "LOG_FORMAT_LOCAL_DEV_ENABLED", + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) @@ -50,7 +55,7 @@ def as_worker(self) -> bool: assert self.DASK_SCHEDULER_HOST is not None # nosec return as_worker - @validator("LOG_LEVEL", pre=True) + @field_validator("LOG_LEVEL", mode="before") @classmethod def _validate_loglevel(cls, value: Any) -> str: return cls.validate_log_level(f"{value}") diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/tasks.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/tasks.py index 79dfd08cbdb..e818a3301b6 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/tasks.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/tasks.py @@ -67,7 +67,7 @@ async def dask_setup(worker: distributed.Worker) -> None: ) logger.info("Setting up worker...") - logger.info("Settings: %s", pformat(settings.dict())) + logger.info("Settings: %s", pformat(settings.model_dump())) print_dask_sidecar_banner() @@ -94,7 +94,7 @@ async def _run_computational_sidecar_async( _logger.info( "run_computational_sidecar %s", - f"{task_parameters.dict()=}, {docker_auth=}, {log_file_url=}, {s3_settings=}", + f"{task_parameters.model_dump()=}, {docker_auth=}, {log_file_url=}, {s3_settings=}", ) current_task = asyncio.current_task() assert current_task # nosec diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/utils.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/utils.py index 936d54a3377..61481d32c0a 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/utils.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/utils.py @@ -6,7 +6,7 @@ import aiodocker from aiodocker.containers import DockerContainer -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter logger = logging.getLogger(__name__) @@ -57,7 +57,7 @@ async def async_num_available_gpus() -> int: if container_data.setdefault("StatusCode", 127) == 0 else 0 ) - except asyncio.TimeoutError as err: + except TimeoutError as err: logger.warning( "num_gpus timedout while check-run %s: %s", spec_config, err ) @@ -100,14 +100,14 @@ async def async_video_memory() -> int: Coroutine, container.log(stdout=True, stderr=True, follow=False), ) - video_ram = parse_obj_as(ByteSize, 0) + video_ram = TypeAdapter(ByteSize).validate_python(0) if container_data.setdefault("StatusCode", 127) == 0: for line in container_logs: - video_ram = parse_obj_as( - ByteSize, video_ram + parse_obj_as(ByteSize, line) + video_ram = TypeAdapter(ByteSize).validate_python( + video_ram + TypeAdapter(ByteSize).validate_python(line) ) - except asyncio.TimeoutError as err: + except TimeoutError as err: logger.warning( "num_gpus timedout while check-run %s: %s", spec_config, err ) diff --git a/services/dask-sidecar/tests/unit/conftest.py b/services/dask-sidecar/tests/unit/conftest.py index edc92c87969..4d4801752d9 100644 --- a/services/dask-sidecar/tests/unit/conftest.py +++ b/services/dask-sidecar/tests/unit/conftest.py @@ -6,6 +6,7 @@ from collections.abc import AsyncIterator, Callable, Iterator from pathlib import Path from pprint import pformat +from typing import cast import dask import dask.config @@ -19,7 +20,7 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.users import UserID -from pydantic import AnyUrl, parse_obj_as +from pydantic import AnyUrl, TypeAdapter from pytest_localftpserver.servers import ProcessFTPServer from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict @@ -169,8 +170,7 @@ def s3_settings(mocked_s3_server_envs: None) -> S3Settings: @pytest.fixture def s3_endpoint_url(s3_settings: S3Settings) -> AnyUrl: assert s3_settings.S3_ENDPOINT - return parse_obj_as( - AnyUrl, + return TypeAdapter(AnyUrl).validate_python( f"{s3_settings.S3_ENDPOINT}", ) @@ -203,9 +203,7 @@ async def bucket( response = await aiobotocore_s3_client.list_buckets() assert response["Buckets"] assert len(response["Buckets"]) == 1 - bucket_name = response["Buckets"][0]["Name"] - return bucket_name - # await _clean_bucket_content(aiobotocore_s3_client, bucket_name) + return response["Buckets"][0]["Name"] @pytest.fixture @@ -214,7 +212,7 @@ def creator(file_path: Path | None = None) -> AnyUrl: file_path_with_bucket = Path(s3_settings.S3_BUCKET_NAME) / ( file_path or faker.file_name() ) - return parse_obj_as(AnyUrl, f"s3://{file_path_with_bucket}") + return TypeAdapter(AnyUrl).validate_python(f"s3://{file_path_with_bucket}") return creator @@ -230,7 +228,7 @@ def file_on_s3_server( def creator() -> AnyUrl: new_remote_file = s3_remote_file_url() - open_file = fsspec.open(new_remote_file, mode="wt", **s3_storage_kwargs) + open_file = fsspec.open(f"{new_remote_file}", mode="wt", **s3_storage_kwargs) with open_file as fp: fp.write( # type: ignore f"This is the file contents of file #'{(len(list_of_created_files)+1):03}'\n" @@ -245,7 +243,7 @@ def creator() -> AnyUrl: # cleanup fs = fsspec.filesystem("s3", **s3_storage_kwargs) for file in list_of_created_files: - fs.delete(file.partition(f"{file.scheme}://")[2]) + fs.delete(f"{file}".partition(f"{file.scheme}://")[2]) @pytest.fixture @@ -255,12 +253,12 @@ def job_id() -> str: @pytest.fixture def project_id(faker: Faker) -> ProjectID: - return faker.uuid4(cast_to=None) + return cast(ProjectID, faker.uuid4(cast_to=None)) @pytest.fixture def node_id(faker: Faker) -> NodeID: - return faker.uuid4(cast_to=None) + return cast(NodeID, faker.uuid4(cast_to=None)) @pytest.fixture(params=["no_parent_node", "with_parent_node"]) @@ -276,9 +274,13 @@ def task_owner( project_id=project_id, node_id=node_id, parent_project_id=( - None if request.param == "no_parent_node" else faker.uuid4(cast_to=None) + None + if request.param == "no_parent_node" + else cast(ProjectID, faker.uuid4(cast_to=None)) ), parent_node_id=( - None if request.param == "no_parent_node" else faker.uuid4(cast_to=None) + None + if request.param == "no_parent_node" + else cast(NodeID, faker.uuid4(cast_to=None)) ), ) diff --git a/services/dask-sidecar/tests/unit/test_cli.py b/services/dask-sidecar/tests/unit/test_cli.py index 4af796ec69b..101b0e4bcdc 100644 --- a/services/dask-sidecar/tests/unit/test_cli.py +++ b/services/dask-sidecar/tests/unit/test_cli.py @@ -29,5 +29,5 @@ def test_list_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): assert result.exit_code == os.EX_OK, result.output print(result.output) - settings = Settings.parse_raw(result.output) + settings = Settings.model_validate_json(result.output) assert settings == Settings.create_from_envs() diff --git a/services/dask-sidecar/tests/unit/test_dask_utils.py b/services/dask-sidecar/tests/unit/test_dask_utils.py index a12ee06e211..214a9550200 100644 --- a/services/dask-sidecar/tests/unit/test_dask_utils.py +++ b/services/dask-sidecar/tests/unit/test_dask_utils.py @@ -52,7 +52,8 @@ def test_publish_event( # hence the long time out message = dask_sub.get(timeout=DASK_TESTING_TIMEOUT_S) assert message is not None - received_task_log_event = TaskLogEvent.parse_raw(message) # type: ignore + assert isinstance(message, str) + received_task_log_event = TaskLogEvent.model_validate_json(message) assert received_task_log_event == event_to_publish @@ -73,7 +74,7 @@ async def test_publish_event_async( assert isinstance(message, Coroutine) message = await message assert message is not None - received_task_log_event = TaskLogEvent.parse_raw(message) # type: ignore + received_task_log_event = TaskLogEvent.model_validate_json(message) assert received_task_log_event == event_to_publish diff --git a/services/dask-sidecar/tests/unit/test_docker_utils.py b/services/dask-sidecar/tests/unit/test_docker_utils.py index 41e801b70bb..4bc154edd95 100644 --- a/services/dask-sidecar/tests/unit/test_docker_utils.py +++ b/services/dask-sidecar/tests/unit/test_docker_utils.py @@ -91,7 +91,7 @@ async def test_create_container_config( envs=task_envs, labels=task_labels, ) - assert container_config.dict(by_alias=True) == ( + assert container_config.model_dump(by_alias=True) == ( { "Env": [ "INPUT_FOLDER=/inputs", @@ -221,7 +221,7 @@ async def test_managed_container_always_removes_container( call() .__aenter__() .containers.create( - container_config.dict(by_alias=True), name=None + container_config.model_dump(by_alias=True), name=None ), ] ) diff --git a/services/dask-sidecar/tests/unit/test_file_utils.py b/services/dask-sidecar/tests/unit/test_file_utils.py index 5c51f5f5b00..b31980b46a5 100644 --- a/services/dask-sidecar/tests/unit/test_file_utils.py +++ b/services/dask-sidecar/tests/unit/test_file_utils.py @@ -15,7 +15,7 @@ import fsspec import pytest from faker import Faker -from pydantic import AnyUrl, parse_obj_as +from pydantic import AnyUrl, TypeAdapter from pytest_localftpserver.servers import ProcessFTPServer from pytest_mock.plugin import MockerFixture from settings_library.s3 import S3Settings @@ -28,7 +28,6 @@ @pytest.fixture() async def mocked_log_publishing_cb( - event_loop: asyncio.AbstractEventLoop, mocker: MockerFixture, ) -> AsyncIterable[mock.AsyncMock]: async with mocker.AsyncMock() as mocked_callback: @@ -46,8 +45,8 @@ def s3_presigned_link_storage_kwargs(s3_settings: S3Settings) -> dict[str, Any]: @pytest.fixture def ftp_remote_file_url(ftpserver: ProcessFTPServer, faker: Faker) -> AnyUrl: - return parse_obj_as( - AnyUrl, f"{ftpserver.get_login_data(style='url')}/{faker.file_name()}" + return TypeAdapter(AnyUrl).validate_python( + f"{ftpserver.get_login_data(style='url')}/{faker.file_name()}" ) @@ -57,8 +56,7 @@ async def s3_presigned_link_remote_file_url( aiobotocore_s3_client, faker: Faker, ) -> AnyUrl: - return parse_obj_as( - AnyUrl, + return TypeAdapter(AnyUrl).validate_python( await aiobotocore_s3_client.generate_presigned_url( "put_object", Params={"Bucket": s3_settings.S3_BUCKET_NAME, "Key": faker.file_name()}, @@ -69,7 +67,9 @@ async def s3_presigned_link_remote_file_url( @pytest.fixture def s3_remote_file_url(s3_settings: S3Settings, faker: Faker) -> AnyUrl: - return parse_obj_as(AnyUrl, f"s3://{s3_settings.S3_BUCKET_NAME}{faker.file_path()}") + return TypeAdapter(AnyUrl).validate_python( + f"s3://{s3_settings.S3_BUCKET_NAME}{faker.file_path()}" + ) @dataclass(frozen=True) @@ -122,7 +122,7 @@ async def test_push_file_to_remote( with cast( fsspec.core.OpenFile, fsspec.open( - remote_parameters.remote_file_url, + f"{remote_parameters.remote_file_url}", mode="rt", **storage_kwargs, ), @@ -153,15 +153,14 @@ async def test_push_file_to_remote_s3_http_presigned_link( ) # check the remote is actually having the file in, but we need s3 access now - s3_remote_file_url = parse_obj_as( - AnyUrl, + s3_remote_file_url = TypeAdapter(AnyUrl).validate_python( f"s3:/{s3_presigned_link_remote_file_url.path}", ) storage_kwargs = _s3fs_settings_from_s3_settings(s3_settings) with cast( fsspec.core.OpenFile, - fsspec.open(s3_remote_file_url, mode="rt", **storage_kwargs), + fsspec.open(f"{s3_remote_file_url}", mode="rt", **storage_kwargs), ) as fp: assert fp.read() == TEXT_IN_FILE mocked_log_publishing_cb.assert_called() @@ -173,7 +172,9 @@ async def test_push_file_to_remote_compresses_if_zip_destination( faker: Faker, mocked_log_publishing_cb: mock.AsyncMock, ): - destination_url = parse_obj_as(AnyUrl, f"{remote_parameters.remote_file_url}.zip") + destination_url = TypeAdapter(AnyUrl).validate_python( + f"{remote_parameters.remote_file_url}.zip" + ) src_path = tmp_path / faker.file_name() TEXT_IN_FILE = faker.text() src_path.write_text(TEXT_IN_FILE) @@ -214,7 +215,7 @@ async def test_pull_file_from_remote( with cast( fsspec.core.OpenFile, fsspec.open( - remote_parameters.remote_file_url, + f"{remote_parameters.remote_file_url}", mode="wt", **storage_kwargs, ), @@ -250,7 +251,7 @@ async def test_pull_file_from_remote_s3_presigned_link( with cast( fsspec.core.OpenFile, fsspec.open( - s3_remote_file_url, + f"{s3_remote_file_url}", mode="wt", **storage_kwargs, ), @@ -259,8 +260,7 @@ async def test_pull_file_from_remote_s3_presigned_link( # create a corresponding presigned get link assert s3_remote_file_url.path - remote_file_url = parse_obj_as( - AnyUrl, + remote_file_url = TypeAdapter(AnyUrl).validate_python( await aiobotocore_s3_client.generate_presigned_url( "get_object", Params={ @@ -303,7 +303,9 @@ async def test_pull_compressed_zip_file_from_remote( zfp.write(local_test_file, local_test_file.name) file_names_within_zip_file.add(local_test_file.name) - destination_url = parse_obj_as(AnyUrl, f"{remote_parameters.remote_file_url}.zip") + destination_url = TypeAdapter(AnyUrl).validate_python( + f"{remote_parameters.remote_file_url}.zip" + ) storage_kwargs = {} if remote_parameters.s3_settings: storage_kwargs = _s3fs_settings_from_s3_settings(remote_parameters.s3_settings) @@ -311,7 +313,7 @@ async def test_pull_compressed_zip_file_from_remote( with cast( fsspec.core.OpenFile, fsspec.open( - destination_url, + f"{destination_url}", mode="wb", **storage_kwargs, ), @@ -395,8 +397,12 @@ async def test_push_file_to_remote_creates_reproducible_zip_archive( faker: Faker, mocked_log_publishing_cb: mock.AsyncMock, ): - destination_url1 = parse_obj_as(AnyUrl, f"{remote_parameters.remote_file_url}1.zip") - destination_url2 = parse_obj_as(AnyUrl, f"{remote_parameters.remote_file_url}2.zip") + destination_url1 = TypeAdapter(AnyUrl).validate_python( + f"{remote_parameters.remote_file_url}1.zip" + ) + destination_url2 = TypeAdapter(AnyUrl).validate_python( + f"{remote_parameters.remote_file_url}2.zip" + ) src_path = tmp_path / faker.file_name() TEXT_IN_FILE = faker.text() src_path.write_text(TEXT_IN_FILE) diff --git a/services/dask-sidecar/tests/unit/test_models.py b/services/dask-sidecar/tests/unit/test_models.py index 65ec5304631..f9e80f67fa4 100644 --- a/services/dask-sidecar/tests/unit/test_models.py +++ b/services/dask-sidecar/tests/unit/test_models.py @@ -10,7 +10,7 @@ def test_container_host_config_sets_swap_same_as_memory_if_not_set(faker: Faker) instance = ContainerHostConfig( Binds=[faker.pystr() for _ in range(5)], Memory=ByteSize(faker.pyint()), - NanoCPUs=faker.pyfloat(min_value=0.1), + NanoCPUs=faker.pyint(min_value=1), ) assert instance.memory == instance.memory_swap @@ -22,7 +22,7 @@ def test_container_host_config_raises_if_set_negative( ContainerHostConfig( Binds=[faker.pystr() for _ in range(5)], Memory=ByteSize(faker.pyint(min_value=234)), - NanoCPUs=faker.pyfloat(min_value=0.1), + NanoCPUs=faker.pyint(min_value=1), MemorySwap=ByteSize(faker.pyint(min_value=-84654, max_value=-1)), ) @@ -34,14 +34,14 @@ def test_container_host_config_raises_if_set_smaller_than_memory( ContainerHostConfig( Binds=[faker.pystr() for _ in range(5)], Memory=ByteSize(faker.pyint(min_value=234)), - NanoCPUs=faker.pyfloat(min_value=0.1), + NanoCPUs=faker.pyint(min_value=1), MemorySwap=ByteSize(0), ) with pytest.raises(ValidationError): ContainerHostConfig( Binds=[faker.pystr() for _ in range(5)], Memory=ByteSize(faker.pyint(min_value=234)), - NanoCPUs=faker.pyfloat(min_value=0.1), + NanoCPUs=faker.pyint(min_value=1), MemorySwap=ByteSize(faker.pyint(min_value=1, max_value=233)), ) @@ -52,7 +52,7 @@ def test_container_host_config_sets_swap_if_set_bigger_than_memory( instance = ContainerHostConfig( Binds=[faker.pystr() for _ in range(5)], Memory=ByteSize(faker.pyint(min_value=234, max_value=434234)), - NanoCPUs=faker.pyfloat(min_value=0.1), + NanoCPUs=faker.pyint(min_value=1), MemorySwap=ByteSize(faker.pyint(min_value=434235, max_value=12343424234)), ) assert instance.memory_swap diff --git a/services/dask-sidecar/tests/unit/test_tasks.py b/services/dask-sidecar/tests/unit/test_tasks.py index 4aff3a1fd3d..5beebe2e37f 100644 --- a/services/dask-sidecar/tests/unit/test_tasks.py +++ b/services/dask-sidecar/tests/unit/test_tasks.py @@ -41,7 +41,7 @@ from models_library.services import ServiceMetaDataPublished from models_library.services_resources import BootMode from packaging import version -from pydantic import AnyUrl, SecretStr, parse_obj_as +from pydantic import AnyUrl, SecretStr, TypeAdapter from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from settings_library.s3 import S3Settings @@ -178,7 +178,9 @@ def integration_version(request: pytest.FixtureRequest) -> version.Version: @pytest.fixture def additional_envs(faker: Faker) -> dict[EnvVarKey, str]: - return parse_obj_as(dict[EnvVarKey, str], faker.pydict(allowed_types=(str,))) + return TypeAdapter(dict[EnvVarKey, str]).validate_python( + faker.pydict(allowed_types=(str,)) + ) @pytest.fixture @@ -198,7 +200,7 @@ def sleeper_task( list_of_files = [file_on_s3_server() for _ in range(NUM_FILES)] # defines the inputs of the task - input_data = TaskInputData.parse_obj( + input_data = TaskInputData.model_validate( { "input_1": 23, "input_23": "a string input", @@ -276,7 +278,7 @@ def sleeper_task( "pytest_bool": False, } output_file_url = s3_remote_file_url(file_path="output_file") - expected_output_keys = TaskOutputDataSchema.parse_obj( + expected_output_keys = TaskOutputDataSchema.model_validate( { **( {k: {"required": True} for k in jsonable_outputs} @@ -295,7 +297,7 @@ def sleeper_task( ), } ) - expected_output_data = TaskOutputData.parse_obj( + expected_output_data = TaskOutputData.model_validate( { **( jsonable_outputs @@ -395,10 +397,10 @@ def _creator(command: list[str] | None = None) -> ServiceExampleParam: service_version="latest", command=command or ["/bin/bash", "-c", "echo 'hello I'm an empty ubuntu task!"], - input_data=TaskInputData.parse_obj({}), - output_data_keys=TaskOutputDataSchema.parse_obj({}), + input_data=TaskInputData.model_validate({}), + output_data_keys=TaskOutputDataSchema.model_validate({}), log_file_url=s3_remote_file_url(file_path="log.dat"), - expected_output_data=TaskOutputData.parse_obj({}), + expected_output_data=TaskOutputData.model_validate({}), expected_logs=[], integration_version=integration_version, task_envs={}, @@ -433,12 +435,16 @@ def caplog_info_level( yield caplog +# from pydantic.json_schema import JsonDict + + @pytest.fixture def mocked_get_image_labels( integration_version: version.Version, mocker: MockerFixture ) -> mock.Mock: - labels: ImageLabels = parse_obj_as( - ImageLabels, ServiceMetaDataPublished.Config.schema_extra["examples"][0] + assert "json_schema_extra" in ServiceMetaDataPublished.model_config + labels: ImageLabels = TypeAdapter(ImageLabels).validate_python( + ServiceMetaDataPublished.model_config["json_schema_extra"]["examples"][0], ) labels.integration_version = f"{integration_version}" return mocker.patch( @@ -580,7 +586,8 @@ async def test_run_computational_sidecar_dask( # check that the task produces expected logs worker_progresses = [ - TaskProgressEvent.parse_raw(msg).progress for msg in progress_sub.buffer + TaskProgressEvent.model_validate_json(msg).progress + for msg in progress_sub.buffer ] # check ordering assert worker_progresses == sorted( @@ -588,7 +595,7 @@ async def test_run_computational_sidecar_dask( ), "ordering of progress values incorrectly sorted!" assert worker_progresses[0] == 0, "missing/incorrect initial progress value" assert worker_progresses[-1] == 1, "missing/incorrect final progress value" - worker_logs = [TaskLogEvent.parse_raw(msg).log for msg in log_sub.buffer] + worker_logs = [TaskLogEvent.model_validate_json(msg).log for msg in log_sub.buffer] print(f"<-- we got {len(worker_logs)} lines of logs") for log in sleeper_task.expected_logs: @@ -649,7 +656,8 @@ async def test_run_computational_sidecar_dask_does_not_lose_messages_with_pubsub # check that the task produces expected logs worker_progresses = [ - TaskProgressEvent.parse_raw(msg).progress for msg in progress_sub.buffer + TaskProgressEvent.model_validate_json(msg).progress + for msg in progress_sub.buffer ] # check length assert len(worker_progresses) == len( @@ -659,7 +667,7 @@ async def test_run_computational_sidecar_dask_does_not_lose_messages_with_pubsub assert worker_progresses[0] == 0, "missing/incorrect initial progress value" assert worker_progresses[-1] == 1, "missing/incorrect final progress value" - worker_logs = [TaskLogEvent.parse_raw(msg).log for msg in log_sub.buffer] + worker_logs = [TaskLogEvent.model_validate_json(msg).log for msg in log_sub.buffer] # check all the awaited logs are in there filtered_worker_logs = filter(lambda log: "This is iteration" in log, worker_logs) assert len(list(filtered_worker_logs)) == NUMBER_OF_LOGS diff --git a/services/dask-sidecar/tests/unit/test_utils.py b/services/dask-sidecar/tests/unit/test_utils.py index 5ee6f9156e5..f3d162952ff 100644 --- a/services/dask-sidecar/tests/unit/test_utils.py +++ b/services/dask-sidecar/tests/unit/test_utils.py @@ -13,12 +13,11 @@ from simcore_service_dask_sidecar.utils import num_available_gpus -@pytest.fixture(scope="function") +@pytest.fixture def mock_aiodocker(mocker: MockerFixture) -> mock.MagicMock: - mock_docker = mocker.patch( + return mocker.patch( "simcore_service_dask_sidecar.utils.aiodocker.Docker", autospec=True ) - return mock_docker def test_num_available_gpus_returns_0_when_container_not_created( @@ -74,7 +73,7 @@ def test_num_available_gpus_returns_0_when_container_wait_timesout( mock_aiodocker: mock.MagicMock, ): mock_aiodocker.return_value.__aenter__.return_value.containers.run.return_value.wait.side_effect = ( - asyncio.TimeoutError() + TimeoutError() ) assert num_available_gpus() == 0 @@ -91,6 +90,9 @@ def test_num_available_gpus( mock_aiodocker: mock.MagicMock, ): # default with mock should return 0 gpus + mock_aiodocker.return_value.__aenter__.return_value.containers.run.return_value.wait.return_value = { + "StatusCode": 0 + } assert num_available_gpus() == 0 # add the correct log