Skip to content

Commit

Permalink
♻️Pydantic V2 migration: various fixes (ITISFoundation#6647)
Browse files Browse the repository at this point in the history
  • Loading branch information
sanderegg authored Nov 1, 2024
1 parent 01769ac commit 2486b38
Show file tree
Hide file tree
Showing 27 changed files with 199 additions and 116 deletions.
2 changes: 1 addition & 1 deletion packages/aws-library/tests/test_s3_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -696,7 +696,7 @@ async def test_create_single_presigned_download_link(

dest_file = tmp_path / faker.file_name()
async with ClientSession() as session:
response = await session.get(download_url)
response = await session.get(f"{download_url}")
response.raise_for_status()
with dest_file.open("wb") as fp:
fp.write(await response.read())
Expand Down
2 changes: 1 addition & 1 deletion packages/models-library/src/models_library/docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def to_simcore_runtime_docker_label_key(key: str) -> DockerLabelKey:
class StandardSimcoreDockerLabels(BaseModel):
"""
Represents the standard label on oSparc created containers (not yet services)
In order to create this object in code, please use construct() method!
In order to create this object in code, please use model_construct() method!
"""

user_id: UserID = Field(..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}user-id") # type: ignore[literal-required]
Expand Down
4 changes: 2 additions & 2 deletions packages/postgres-database/tests/test_utils_projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from aiopg.sa.connection import SAConnection
from aiopg.sa.result import RowProxy
from faker import Faker
from pydantic import parse_obj_as
from pydantic import TypeAdapter
from simcore_postgres_database.models.projects import projects
from simcore_postgres_database.utils_projects import (
DBProjectNotFoundError,
Expand Down Expand Up @@ -69,7 +69,7 @@ async def test_get_project_trashed_at_column_can_be_converted_to_datetime(

row = result.fetchone()

trashed_at = parse_obj_as(datetime | None, row.trashed_at)
trashed_at = TypeAdapter(datetime | None).validate_python(row.trashed_at)
assert trashed_at == expected


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from models_library.projects_nodes_io import NodeID
from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace
from pydantic import NonNegativeInt, parse_obj_as
from pydantic import NonNegativeInt, TypeAdapter
from servicelib.logging_utils import log_decorator
from servicelib.rabbitmq import RabbitMQRPCClient

Expand All @@ -29,7 +29,9 @@ async def remove_volumes_without_backup_for_service(
"swarm_stack_name": swarm_stack_name,
}
),
parse_obj_as(RPCMethodName, "remove_volumes_without_backup_for_service"),
TypeAdapter(RPCMethodName).validate_python(
"remove_volumes_without_backup_for_service"
),
node_id=node_id,
timeout_s=_REQUEST_TIMEOUT,
)
Expand All @@ -51,7 +53,9 @@ async def backup_and_remove_volumes_for_all_services(
"swarm_stack_name": swarm_stack_name,
}
),
parse_obj_as(RPCMethodName, "backup_and_remove_volumes_for_all_services"),
TypeAdapter(RPCMethodName).validate_python(
"backup_and_remove_volumes_for_all_services"
),
timeout_s=_REQUEST_TIMEOUT,
)
assert result is None # nosec
Original file line number Diff line number Diff line change
@@ -1,14 +1,20 @@
import logging
from typing import Final

from models_library.api_schemas_dynamic_sidecar.telemetry import DiskUsage
from models_library.projects_nodes_io import NodeID
from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace
from pydantic import parse_obj_as
from servicelib.logging_utils import log_decorator
from servicelib.rabbitmq import RabbitMQRPCClient
from pydantic import TypeAdapter

from ....logging_utils import log_decorator
from ... import RabbitMQRPCClient

_logger = logging.getLogger(__name__)

_UPDATE_DISK_USAGE: Final[RPCMethodName] = TypeAdapter(RPCMethodName).validate_python(
"update_disk_usage"
)


@log_decorator(_logger, level=logging.DEBUG)
async def update_disk_usage(
Expand All @@ -21,6 +27,8 @@ async def update_disk_usage(
{"service": "dy-sidecar", "node_id": f"{node_id}"}
)
result = await rabbitmq_rpc_client.request(
rpc_namespace, parse_obj_as(RPCMethodName, "update_disk_usage"), usage=usage
rpc_namespace,
_UPDATE_DISK_USAGE,
usage=usage,
)
assert result is None # nosec
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import logging
from typing import Final, cast
from typing import Final

from models_library.api_schemas_resource_usage_tracker import (
RESOURCE_USAGE_TRACKER_RPC_NAMESPACE,
Expand Down Expand Up @@ -115,5 +115,5 @@ async def export_service_runs(
filters=filters,
timeout_s=_DEFAULT_TIMEOUT_S,
)
assert cast(AnyUrl, isinstance(result, AnyUrl)) # nosec
assert isinstance(result, AnyUrl) # nosec
return result
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import logging
import re

from pydantic import ByteSize, parse_obj_as
from pydantic import ByteSize, TypeAdapter
from servicelib.logging_utils import log_catch
from servicelib.progress_bar import ProgressBarData

Expand Down Expand Up @@ -35,5 +35,5 @@ async def __call__(self, logs: str) -> None:
_logger.debug("received logs: %s", logs)
with log_catch(_logger, reraise=False):
if _size := _parse_size(logs):
_bytes = parse_obj_as(ByteSize, _size)
_bytes = TypeAdapter(ByteSize).validate_python(_size)
await self.progress_bar.set_(_bytes)
43 changes: 34 additions & 9 deletions packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import logging
import traceback
from abc import ABC, abstractmethod
from asyncio import CancelledError
from asyncio import CancelledError, Task
from collections.abc import Callable, Coroutine
from pathlib import Path
from typing import Any
Expand All @@ -12,6 +13,7 @@
from models_library.services_types import ServicePortKey
from models_library.users import UserID
from pydantic import BaseModel, ConfigDict, Field, ValidationError
from pydantic_core import InitErrorDetails
from servicelib.progress_bar import ProgressBarData
from servicelib.utils import logged_gather
from settings_library.aws_s3_cli import AwsS3CliSettings
Expand All @@ -28,6 +30,28 @@
log = logging.getLogger(__name__)


# -> @GitHK this looks very dangerous, using a lot of protected stuff, just checking the number of ignores shows it's a bad idea...
def _format_error(task: Task) -> str:
# pylint:disable=protected-access
assert task._exception # nosec # noqa: SLF001
error_list = traceback.format_exception(
type(task._exception), # noqa: SLF001
task._exception, # noqa: SLF001
task._exception.__traceback__, # noqa: SLF001
)
return "\n".join(error_list)


def _get_error_details(task: Task, port_key: str) -> InitErrorDetails:
# pylint:disable=protected-access
return InitErrorDetails(
type="value_error",
loc=(f"{port_key}",),
input=_format_error(task),
ctx={"error": task._exception}, # noqa: SLF001
)


class OutputsCallbacks(ABC):
@abstractmethod
async def aborted(self, key: ServicePortKey) -> None:
Expand Down Expand Up @@ -72,9 +96,9 @@ def __init__(self, **data: Any):

# let's pass ourselves down
for input_key in self.internal_inputs:
self.internal_inputs[input_key]._node_ports = self
self.internal_inputs[input_key]._node_ports = self # noqa: SLF001
for output_key in self.internal_outputs:
self.internal_outputs[output_key]._node_ports = self
self.internal_outputs[output_key]._node_ports = self # noqa: SLF001

@property
async def inputs(self) -> InputsList:
Expand Down Expand Up @@ -132,10 +156,11 @@ async def set(

async def set_file_by_keymap(self, item_value: Path) -> None:
for output in (await self.outputs).values():
if is_file_type(output.property_type) and output.file_to_key_map:
if item_value.name in output.file_to_key_map:
await output.set(item_value)
return
if (is_file_type(output.property_type) and output.file_to_key_map) and (
item_value.name in output.file_to_key_map
):
await output.set(item_value)
return
raise PortNotFound(msg=f"output port for item {item_value} not found")

async def _node_ports_creator_cb(self, node_uuid: NodeIDStr) -> type["Nodeports"]:
Expand All @@ -152,9 +177,9 @@ async def _auto_update_from_db(self) -> None:
# let's pass ourselves down
# pylint: disable=protected-access
for input_key in self.internal_inputs:
self.internal_inputs[input_key]._node_ports = self
self.internal_inputs[input_key]._node_ports = self # noqa: SLF001
for output_key in self.internal_outputs:
self.internal_outputs[output_key]._node_ports = self
self.internal_outputs[output_key]._node_ports = self # noqa: SLF001

async def set_multiple(
self,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@
from models_library.basic_types import IDStr, SHA256Str
from models_library.services_types import FileName, ServicePortKey
from models_library.users import UserID
from pydantic import AnyUrl, ByteSize
from pydantic.tools import parse_obj_as
from pydantic import AnyUrl, ByteSize, TypeAdapter
from servicelib.progress_bar import ProgressBarData
from settings_library.aws_s3_cli import AwsS3CliSettings
from settings_library.r_clone import RCloneSettings
Expand Down Expand Up @@ -64,6 +63,7 @@ async def get_value_from_link(
file_name = other_value.name
# move the file to the right final location
# if a file alias is present use it

if file_to_key_map:
file_name = next(iter(file_to_key_map))

Expand Down Expand Up @@ -101,7 +101,7 @@ async def get_download_link_from_storage(

# could raise ValidationError but will never do it since
assert isinstance(link, URL) # nosec
url: AnyUrl = parse_obj_as(AnyUrl, f"{link}")
url: AnyUrl = TypeAdapter(AnyUrl).validate_python(f"{link}")
return url


Expand All @@ -123,7 +123,7 @@ async def get_download_link_from_storage_overload(
s3_object=s3_object,
link_type=link_type,
)
url: AnyUrl = parse_obj_as(AnyUrl, f"{link}")
url: AnyUrl = TypeAdapter(AnyUrl).validate_python(f"{link}")
return url


Expand Down
4 changes: 2 additions & 2 deletions packages/simcore-sdk/tests/integration/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from models_library.generics import Envelope
from models_library.projects_nodes_io import LocationID, NodeIDStr, SimcoreS3FileID
from models_library.users import UserID
from pydantic import parse_obj_as
from pydantic import TypeAdapter
from pytest_simcore.helpers.faker_factories import random_project, random_user
from settings_library.aws_s3_cli import AwsS3CliSettings
from settings_library.r_clone import RCloneSettings, S3Provider
Expand Down Expand Up @@ -94,7 +94,7 @@ def create_valid_file_uuid(
) -> Callable[[str, Path], SimcoreS3FileID]:
def _create(key: str, file_path: Path) -> SimcoreS3FileID:
clean_path = Path(f"{project_id}/{node_uuid}/{key}/{file_path.name}")
return parse_obj_as(SimcoreS3FileID, f"{clean_path}")
return TypeAdapter(SimcoreS3FileID).validate_python(f"{clean_path}")

return _create

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,11 @@

import pytest
from faker import Faker
from models_library.basic_types import IDStr
from models_library.projects import ProjectID
from models_library.projects_nodes_io import NodeID, SimcoreS3FileID
from models_library.users import UserID
from pydantic import parse_obj_as
from pydantic import TypeAdapter
from servicelib.progress_bar import ProgressBarData
from settings_library.aws_s3_cli import AwsS3CliSettings
from settings_library.r_clone import RCloneSettings
Expand Down Expand Up @@ -157,7 +158,9 @@ async def test_valid_upload_download(
mock_io_log_redirect_cb: LogRedirectCB,
faker: Faker,
):
async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar:
async with ProgressBarData(
num_steps=2, description=IDStr(faker.pystr())
) as progress_bar:
await data_manager._push_directory( # noqa: SLF001
user_id=user_id,
project_id=project_id,
Expand Down Expand Up @@ -203,7 +206,9 @@ async def test_valid_upload_download_saved_to(
mock_io_log_redirect_cb: LogRedirectCB,
faker: Faker,
):
async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar:
async with ProgressBarData(
num_steps=2, description=IDStr(faker.pystr())
) as progress_bar:
await data_manager._push_directory( # noqa: SLF001
user_id=user_id,
project_id=project_id,
Expand Down Expand Up @@ -251,7 +256,9 @@ async def test_delete_legacy_archive(
temp_dir: Path,
faker: Faker,
):
async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar:
async with ProgressBarData(
num_steps=2, description=IDStr(faker.pystr())
) as progress_bar:
# NOTE: legacy archives can no longer be crated
# generating a "legacy style archive"
archive_into_dir = temp_dir / f"legacy-archive-dir-{uuid4()}"
Expand All @@ -263,8 +270,8 @@ async def test_delete_legacy_archive(
user_id=user_id,
store_id=SIMCORE_LOCATION,
store_name=None,
s3_object=parse_obj_as(
SimcoreS3FileID, f"{project_id}/{node_uuid}/{legacy_archive_name.name}"
s3_object=TypeAdapter(SimcoreS3FileID).validate_python(
f"{project_id}/{node_uuid}/{legacy_archive_name.name}"
),
path_to_upload=legacy_archive_name,
io_log_redirect_cb=None,
Expand Down
Loading

0 comments on commit 2486b38

Please sign in to comment.