diff --git a/api/specs/web-server/_common.py b/api/specs/web-server/_common.py index f3dcd66bc5c..25341fd2a31 100644 --- a/api/specs/web-server/_common.py +++ b/api/specs/web-server/_common.py @@ -8,6 +8,7 @@ from typing import Any, ClassVar, NamedTuple import yaml +from common_library.pydantic_fields_extension import get_type from fastapi import FastAPI from models_library.basic_types import LogLevel from pydantic import BaseModel, Field @@ -116,8 +117,8 @@ def assert_handler_signature_against_model( # query and path parameters implemented_params = [ - ParamSpec(field.name, field.type_, field.field_info) - for field in model_cls.__fields__.values() + ParamSpec(name, get_type(info), info) + for name, info in model_cls.model_fields.items() ] assert {p.name for p in implemented_params}.issubset( # nosec diff --git a/packages/models-library/src/models_library/function_services_catalog/api.py b/packages/models-library/src/models_library/function_services_catalog/api.py index 48488cbf567..0b99e4d6682 100644 --- a/packages/models-library/src/models_library/function_services_catalog/api.py +++ b/packages/models-library/src/models_library/function_services_catalog/api.py @@ -24,7 +24,7 @@ def iter_service_docker_data() -> Iterator[ServiceMetaDataPublished]: for meta_obj in catalog.iter_metadata(): # NOTE: the originals are this way not modified from outside - copied_meta_obj = meta_obj.copy(deep=True) + copied_meta_obj = meta_obj.model_copy(deep=True) assert is_function_service(copied_meta_obj.key) # nosec yield copied_meta_obj diff --git a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py index 429783e7061..aea927de4d6 100644 --- a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py +++ b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py @@ -109,7 +109,7 @@ def create_computation_cb(url, **kwargs) -> CallbackResult: } returned_computation = ComputationTask.model_validate( ComputationTask.model_config["json_schema_extra"]["examples"][0] - ).copy( + ).model_copy( update={ "id": f"{kwargs['json']['project_id']}", "state": state, @@ -133,7 +133,7 @@ def get_computation_cb(url, **kwargs) -> CallbackResult: node_states = FULL_PROJECT_NODE_STATES returned_computation = ComputationTask.model_validate( ComputationTask.model_config["json_schema_extra"]["examples"][0] - ).copy( + ).model_copy( update={ "id": Path(url.path).name, "state": state, diff --git a/packages/service-library/src/servicelib/aiohttp/db_asyncpg_engine.py b/packages/service-library/src/servicelib/aiohttp/db_asyncpg_engine.py index 2ca9d431075..1163a479c68 100644 --- a/packages/service-library/src/servicelib/aiohttp/db_asyncpg_engine.py +++ b/packages/service-library/src/servicelib/aiohttp/db_asyncpg_engine.py @@ -46,7 +46,7 @@ async def connect_to_db(app: web.Application, settings: PostgresSettings) -> Non - sets an engine in app state (use `get_async_engine(app)` to retrieve) """ if settings.POSTGRES_CLIENT_NAME: - settings = settings.copy( + settings = settings.model_copy( update={"POSTGRES_CLIENT_NAME": settings.POSTGRES_CLIENT_NAME + "-asyncpg"} ) diff --git a/packages/service-library/src/servicelib/aiohttp/requests_validation.py b/packages/service-library/src/servicelib/aiohttp/requests_validation.py index 0dd40949ba8..a776ad918f4 100644 --- a/packages/service-library/src/servicelib/aiohttp/requests_validation.py +++ b/packages/service-library/src/servicelib/aiohttp/requests_validation.py @@ -171,7 +171,7 @@ def parse_request_query_parameters_as( # query parameters with the same key. However, we are not using such cases anywhere at the moment. data = dict(request.query) - if hasattr(parameters_schema_cls, "parse_obj"): + if hasattr(parameters_schema_cls, "model_validate"): return parameters_schema_cls.model_validate(data) model: ModelClass = TypeAdapter(parameters_schema_cls).validate_python(data) return model diff --git a/packages/service-library/src/servicelib/project_lock.py b/packages/service-library/src/servicelib/project_lock.py index e1a275dcfc7..f2ae6ce6ddd 100644 --- a/packages/service-library/src/servicelib/project_lock.py +++ b/packages/service-library/src/servicelib/project_lock.py @@ -49,7 +49,7 @@ async def lock_project( value=True, owner=owner, status=status, - ).json(), + ).model_dump_json(), ): msg = f"Lock for project {project_uuid!r} owner {owner!r} could not be acquired" raise ProjectLockError(msg) diff --git a/packages/service-library/src/servicelib/services_utils.py b/packages/service-library/src/servicelib/services_utils.py index 98aace49c6c..889594cbf0c 100644 --- a/packages/service-library/src/servicelib/services_utils.py +++ b/packages/service-library/src/servicelib/services_utils.py @@ -22,7 +22,7 @@ def get_status_as_dict( ) -> dict: """shared between different backend services to guarantee same result to frontend""" return ( - status.dict(by_alias=True) + status.model_dump(by_alias=True) if isinstance(status, DynamicServiceGet) - else status.dict() + else status.model_dump() ) diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py index 5671eda108f..4d5e41dd920 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py @@ -53,7 +53,7 @@ async def _test_task_context_decorator( ) -> web.StreamResponse: """this task context callback tries to get the user_id from the query if available""" query_param = parse_request_query_parameters_as(query_model, request) - request[RQT_LONG_RUNNING_TASKS_CONTEXT_KEY] = query_param.dict() + request[RQT_LONG_RUNNING_TASKS_CONTEXT_KEY] = query_param.model_dump() return await handler(request) return _test_task_context_decorator diff --git a/packages/settings-library/src/settings_library/utils_encoders.py b/packages/settings-library/src/settings_library/utils_encoders.py deleted file mode 100644 index f38e156b6a5..00000000000 --- a/packages/settings-library/src/settings_library/utils_encoders.py +++ /dev/null @@ -1,22 +0,0 @@ -from functools import partial - -from pydantic import BaseModel, SecretStr -from pydantic.json import custom_pydantic_encoder - - -def create_json_encoder_wo_secrets(model_cls: type[BaseModel]): - """Use to reveal secrtes when seriaizng a model via `.dict()` or `.json()` - - Example: - model.dict()['my_secret'] == "********" - show_secrets_encoder = create_json_encoder_wo_secrets(type(model)) - model.dict(encoder=show_secrets_encoder)['my_secret'] == "secret" - """ - current_encoders = getattr(model_cls.model_config, "json_encoders", {}) - return partial( - custom_pydantic_encoder, - { - SecretStr: lambda v: v.get_secret_value(), - **current_encoders, - }, - ) diff --git a/services/agent/tests/unit/test_api_rest__health.py b/services/agent/tests/unit/test_api_rest__health.py index 6e690daa788..9f0904c182e 100644 --- a/services/agent/tests/unit/test_api_rest__health.py +++ b/services/agent/tests/unit/test_api_rest__health.py @@ -14,4 +14,4 @@ def test_health_ok(test_client: TestClient): response = test_client.get("/health") assert response.status_code == status.HTTP_200_OK - assert HealthCheckGet.parse_obj(response.json()) + assert HealthCheckGet.model_validate(response.json()) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py index 097afd95288..c3a8e7ee1b1 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py @@ -460,7 +460,7 @@ async def _get_changed_tasks_from_backend( return [ ( task, - task.copy(update={"state": backend_state}), + task.model_copy(update={"state": backend_state}), ) for task, backend_state in zip( processing_tasks, tasks_backend_status, strict=True diff --git a/services/director-v2/tests/unit/conftest.py b/services/director-v2/tests/unit/conftest.py index 2123a506ad7..1375795f0cb 100644 --- a/services/director-v2/tests/unit/conftest.py +++ b/services/director-v2/tests/unit/conftest.py @@ -267,7 +267,7 @@ def fake_service_specifications(faker: Faker) -> dict[str, Any]: }, }, } - ).dict(by_alias=True, exclude_unset=True) + ).model_dump(by_alias=True, exclude_unset=True) } diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py index 84711cf0936..560794460de 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py @@ -502,7 +502,7 @@ async def test_mark_all_services_in_wallet_for_removal( ) -> None: for wallet_id in [WalletID(1), WalletID(2)]: for _ in range(2): - new_scheduler_data = scheduler_data.copy(deep=True) + new_scheduler_data = scheduler_data.model_copy(deep=True) new_scheduler_data.node_uuid = faker.uuid4(cast_to=None) new_scheduler_data.service_name = ServiceName( f"fake_{new_scheduler_data.node_uuid}" diff --git a/services/director-v2/tests/unit/test_utils_computation.py b/services/director-v2/tests/unit/test_utils_computation.py index 14c9ffa34f3..046326a296c 100644 --- a/services/director-v2/tests/unit/test_utils_computation.py +++ b/services/director-v2/tests/unit/test_utils_computation.py @@ -265,7 +265,7 @@ def test_get_pipeline_state_from_task_states( fake_task: CompTaskAtDB, ): tasks: list[CompTaskAtDB] = [ - fake_task.copy(deep=True, update={"state": s}) for s in task_states + fake_task.model_copy(deep=True, update={"state": s}) for s in task_states ] pipeline_state: RunningState = get_pipeline_state_from_task_states(tasks) diff --git a/services/director-v2/tests/unit/test_utils_dags.py b/services/director-v2/tests/unit/test_utils_dags.py index 125f3153db4..11975ac9e88 100644 --- a/services/director-v2/tests/unit/test_utils_dags.py +++ b/services/director-v2/tests/unit/test_utils_dags.py @@ -422,13 +422,13 @@ def pipeline_test_params( # resolve the comp_tasks resolved_list_comp_tasks = [ - c.copy(update={"node_id": node_name_to_uuid_map[c.node_id]}) + c.model_copy(update={"node_id": node_name_to_uuid_map[c.node_id]}) for c in list_comp_tasks ] # resolved the expected output - resolved_expected_pipeline_details = expected_pipeline_details_output.copy( + resolved_expected_pipeline_details = expected_pipeline_details_output.model_copy( update={ "adjacency_list": { NodeID(node_name_to_uuid_map[node_a]): [ diff --git a/services/director-v2/tests/unit/with_dbs/conftest.py b/services/director-v2/tests/unit/with_dbs/conftest.py index ff55e862abb..fdb3b7d5a64 100644 --- a/services/director-v2/tests/unit/with_dbs/conftest.py +++ b/services/director-v2/tests/unit/with_dbs/conftest.py @@ -245,9 +245,14 @@ def creator(user: dict[str, Any], **cluster_kwargs) -> Cluster: for gid, rights in cluster_kwargs["access_rights"].items(): conn.execute( pg_insert(cluster_to_groups) - .values(cluster_id=created_cluster.id, gid=gid, **rights.dict()) + .values( + cluster_id=created_cluster.id, + gid=gid, + **rights.model_dump(), + ) .on_conflict_do_update( - index_elements=["gid", "cluster_id"], set_=rights.dict() + index_elements=["gid", "cluster_id"], + set_=rights.model_dump(), ) ) access_rights_in_db = {} diff --git a/services/dynamic-sidecar/tests/unit/test_api_rest_containers.py b/services/dynamic-sidecar/tests/unit/test_api_rest_containers.py index 8e4efa04c2c..719d2f38c6c 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_rest_containers.py +++ b/services/dynamic-sidecar/tests/unit/test_api_rest_containers.py @@ -576,7 +576,7 @@ async def test_container_create_outputs_dirs( assert mock_event_filter_enqueue.call_count == 0 json_outputs_labels = { - k: v.dict(by_alias=True) for k, v in mock_outputs_labels.items() + k: v.model_dump(by_alias=True) for k, v in mock_outputs_labels.items() } response = await test_client.post( f"/{API_VTAG}/containers/ports/outputs/dirs", diff --git a/services/invitations/tests/unit/api/test_api_dependencies.py b/services/invitations/tests/unit/api/test_api_dependencies.py index 6104a57236c..38e94a52d74 100644 --- a/services/invitations/tests/unit/api/test_api_dependencies.py +++ b/services/invitations/tests/unit/api/test_api_dependencies.py @@ -18,7 +18,7 @@ def test_invalid_http_basic_auth( ): response = client.post( f"/{API_VTAG}/invitations", - json=invitation_data.dict(), + json=invitation_data.model_dump(), auth=invalid_basic_auth, ) assert response.status_code == status.HTTP_401_UNAUTHORIZED, f"{response.json()=}" diff --git a/services/invitations/tests/unit/test_cli.py b/services/invitations/tests/unit/test_cli.py index 0ca095f1915..0c4bf15c7a8 100644 --- a/services/invitations/tests/unit/test_cli.py +++ b/services/invitations/tests/unit/test_cli.py @@ -46,7 +46,7 @@ def test_invite_user_and_check_invitation( } expected = { - **invitation_data.dict(exclude={"product"}), + **invitation_data.model_dump(exclude={"product"}), "product": environs["INVITATIONS_DEFAULT_PRODUCT"], } diff --git a/services/osparc-gateway-server/src/osparc_gateway_server/backend/osparc.py b/services/osparc-gateway-server/src/osparc_gateway_server/backend/osparc.py index c6e5c0b58f3..f905dfc83a4 100644 --- a/services/osparc-gateway-server/src/osparc_gateway_server/backend/osparc.py +++ b/services/osparc-gateway-server/src/osparc_gateway_server/backend/osparc.py @@ -67,7 +67,7 @@ async def do_setup(self) -> None: assert isinstance(self.log, logging.Logger) # nosec self.log.info( "osparc-gateway-server application settings:\n%s", - self.settings.json(indent=2), + self.settings.model_dump_json(indent=2), ) if self.settings.SC_BOOT_MODE in [BootModeEnum.DEBUG]: diff --git a/services/payments/scripts/example_payment_gateway.py b/services/payments/scripts/example_payment_gateway.py index bc1e93bc59f..f3e3b64627b 100755 --- a/services/payments/scripts/example_payment_gateway.py +++ b/services/payments/scripts/example_payment_gateway.py @@ -429,7 +429,7 @@ def create_app(): override_fastapi_openapi_method(app) app.state.settings = Settings.create_from_envs() - logging.info(app.state.settings.json(indent=2)) + logging.info(app.state.settings.model_dump_json(indent=2)) for factory in ( create_payment_router, diff --git a/services/payments/src/simcore_service_payments/services/payments_gateway.py b/services/payments/src/simcore_service_payments/services/payments_gateway.py index b854c915d41..34af4fa2268 100644 --- a/services/payments/src/simcore_service_payments/services/payments_gateway.py +++ b/services/payments/src/simcore_service_payments/services/payments_gateway.py @@ -70,7 +70,7 @@ def from_http_status_error( def get_detailed_message(self) -> str: err_json = "null" if model := getattr(self, "model", None): - err_json = model.json(indent=1) + err_json = model.model_dump_json(indent=1) curl_cmd = "null" if http_status_error := getattr(self, "http_status_error", None): diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/cli.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/cli.py index e71b614c411..fefb9df5dd7 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/cli.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/cli.py @@ -42,5 +42,5 @@ def evaluate(ctx: typer.Context) -> None: assert ctx # nosec settings = MinimalApplicationSettings.create_from_envs() err_console.print( - f"[yellow]running with configuration:\n{settings.json()}[/yellow]" + f"[yellow]running with configuration:\n{settings.model_dump_json()}[/yellow]" ) diff --git a/services/web/server/src/simcore_service_webserver/api_keys/_api.py b/services/web/server/src/simcore_service_webserver/api_keys/_api.py index 9bbe56f7c6f..6cdc15e2f24 100644 --- a/services/web/server/src/simcore_service_webserver/api_keys/_api.py +++ b/services/web/server/src/simcore_service_webserver/api_keys/_api.py @@ -93,7 +93,7 @@ async def get_or_create_api_key( api_key=api_key, api_secret=api_secret, ) - return ApiKeyGet.construct( + return ApiKeyGet.model_construct( display_name=row.display_name, api_key=row.api_key, api_secret=row.api_secret ) diff --git a/services/web/server/src/simcore_service_webserver/catalog/_api_units.py b/services/web/server/src/simcore_service_webserver/catalog/_api_units.py index a8558e674ec..65e435f6886 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_api_units.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_api_units.py @@ -57,10 +57,10 @@ async def replace_service_input_outputs( # replace if above is successful for input_key, new_input in zip(service["inputs"], new_inputs, strict=True): - service["inputs"][input_key] = new_input.dict(**export_options) + service["inputs"][input_key] = new_input.model_dump(**export_options) for output_key, new_output in zip(service["outputs"], new_outputs, strict=True): - service["outputs"][output_key] = new_output.dict(**export_options) + service["outputs"][output_key] = new_output.model_dump(**export_options) def can_connect( diff --git a/services/web/server/src/simcore_service_webserver/catalog/_models.py b/services/web/server/src/simcore_service_webserver/catalog/_models.py index 2432e98bc96..af137ba11d8 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_models.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_models.py @@ -86,7 +86,7 @@ async def from_catalog_service_api_model( if ureg and (unit_html := get_html_formatted_unit(port, ureg)): # we know data is ok since it was validated above - return ServiceInputGet.construct( + return ServiceInputGet.model_construct( key_id=input_key, unit_long=unit_html.long, unit_short=unit_html.short, @@ -123,7 +123,7 @@ async def from_catalog_service_api_model( unit_html: UnitHtmlFormat | None if ureg and (unit_html := get_html_formatted_unit(port, ureg)): # we know data is ok since it was validated above - return ServiceOutputGet.construct( + return ServiceOutputGet.model_construct( key_id=output_key, unit_long=unit_html.long, unit_short=unit_html.short, diff --git a/services/web/server/src/simcore_service_webserver/groups/_handlers.py b/services/web/server/src/simcore_service_webserver/groups/_handlers.py index 428769b725d..fac761aaf25 100644 --- a/services/web/server/src/simcore_service_webserver/groups/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/groups/_handlers.py @@ -166,7 +166,7 @@ async def update_group(request: web.Request): req_ctx = _GroupsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_GroupPathParams, request) update: GroupUpdate = await parse_request_body_as(GroupUpdate, request) - new_group_values = update.dict(exclude_unset=True) + new_group_values = update.model_dump(exclude_unset=True) updated_group = await api.update_user_group( request.app, req_ctx.user_id, path_params.gid, new_group_values @@ -270,7 +270,7 @@ async def update_group_user(request: web.Request): user_id=req_ctx.user_id, gid=path_params.gid, the_user_id_in_group=path_params.uid, - access_rights=update.access_rights.dict(), + access_rights=update.access_rights.model_dump(), ) assert GroupUserGet.model_validate(user) is not None # nosec return envelope_json_response(user) diff --git a/services/web/server/src/simcore_service_webserver/groups/api.py b/services/web/server/src/simcore_service_webserver/groups/api.py index 37cb859a89f..503eee73839 100644 --- a/services/web/server/src/simcore_service_webserver/groups/api.py +++ b/services/web/server/src/simcore_service_webserver/groups/api.py @@ -32,7 +32,7 @@ async def list_all_user_groups(app: web.Application, user_id: UserID) -> list[Gr async with get_database_engine(app).acquire() as conn: groups_db = await _db.get_all_user_groups(conn, user_id=user_id) - return [Group.construct(**group.model_dump()) for group in groups_db] + return [Group.model_construct(**group.model_dump()) for group in groups_db] async def get_user_group( @@ -199,5 +199,5 @@ async def get_group_from_gid(app: web.Application, gid: GroupID) -> Group | None group_db = await _db.get_group_from_gid(conn, gid=gid) if group_db: - return Group.construct(**group_db.model_dump()) + return Group.model_construct(**group_db.model_dump()) return None diff --git a/services/web/server/src/simcore_service_webserver/login/_registration.py b/services/web/server/src/simcore_service_webserver/login/_registration.py index 1a66b5ba4e5..6471757d183 100644 --- a/services/web/server/src/simcore_service_webserver/login/_registration.py +++ b/services/web/server/src/simcore_service_webserver/login/_registration.py @@ -192,7 +192,7 @@ async def create_invitation_token( return await db.create_confirmation( user_id=user_id, action=ConfirmationAction.INVITATION.name, - data=data_model.json(), + data=data_model.model_dump_json(), ) diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py index d86be96058d..6876c63718d 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py @@ -380,7 +380,9 @@ async def _get_default_pricing_and_hardware_info( _MACHINE_TOTAL_RAM_SAFE_MARGIN_RATIO: Final[ float ] = 0.1 # NOTE: machines always have less available RAM than advertised -_SIDECARS_OPS_SAFE_RAM_MARGIN: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("1GiB") +_SIDECARS_OPS_SAFE_RAM_MARGIN: Final[ByteSize] = TypeAdapter(ByteSize).validate_python( + "1GiB" +) _CPUS_SAFE_MARGIN: Final[float] = 1.4 _MIN_NUM_CPUS: Final[float] = 0.5 @@ -840,7 +842,7 @@ async def start_project_node( workbench = project.get("workbench", {}) if not workbench.get(f"{node_id}"): raise NodeNotFoundError(project_uuid=f"{project_id}", node_uuid=f"{node_id}") - node_details = Node.construct(**workbench[f"{node_id}"]) + node_details = Node.model_construct(**workbench[f"{node_id}"]) await _start_dynamic_service( request, @@ -1511,7 +1513,11 @@ async def is_service_deprecated( app, user_id, service_key, service_version, product_name ) if deprecation_date := service.get("deprecated"): - deprecation_date_bool: bool = datetime.datetime.now(datetime.UTC) > datetime.datetime.fromisoformat(deprecation_date).replace(tzinfo=datetime.UTC) + deprecation_date_bool: bool = datetime.datetime.now( + datetime.UTC + ) > datetime.datetime.fromisoformat(deprecation_date).replace( + tzinfo=datetime.UTC + ) return deprecation_date_bool return False diff --git a/services/web/server/src/simcore_service_webserver/socketio/models.py b/services/web/server/src/simcore_service_webserver/socketio/models.py index 37bb942298b..63f071b2ab8 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/models.py +++ b/services/web/server/src/simcore_service_webserver/socketio/models.py @@ -12,7 +12,7 @@ from models_library.socketio import SocketMessageDict from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import ConfigDict, BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field class WebSocketMessageBase(BaseModel): @@ -26,6 +26,7 @@ def get_event_type(cls) -> str: @abstractmethod def to_socket_dict(self) -> SocketMessageDict: ... + model_config = ConfigDict(frozen=True) @@ -58,7 +59,7 @@ class WebSocketProjectProgress( def from_rabbit_message( cls, message: ProgressRabbitMessageProject ) -> "WebSocketProjectProgress": - return cls.construct( + return cls.model_construct( user_id=message.user_id, project_id=message.project_id, progress_type=message.progress_type, diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py index 647a523f23c..18bd6e96e8f 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py @@ -12,7 +12,7 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.services import ServiceKey, ServiceVersion -from pydantic import field_validator, ConfigDict, BaseModel, ValidationError +from pydantic import BaseModel, ConfigDict, ValidationError, field_validator from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import parse_request_query_parameters_as from servicelib.aiohttp.typing_extension import Handler @@ -81,7 +81,7 @@ def _create_service_info_from(service: ValidService) -> ServiceInfo: ) if service.thumbnail: values_map["thumbnail"] = service.thumbnail - return ServiceInfo.construct(_fields_set=set(values_map.keys()), **values_map) + return ServiceInfo.model_construct(_fields_set=set(values_map.keys()), **values_map) def _handle_errors_with_error_page(handler: Handler): @@ -176,7 +176,8 @@ class ServiceAndFileParams(FileQueryParams, ServiceParams): {"$ref": "#/definitions/FileParams"}, {"$ref": "#/definitions/ServiceParams"}, ] - }) + } + ) class ViewerQueryParams(BaseModel): diff --git a/services/web/server/src/simcore_service_webserver/users/_tokens.py b/services/web/server/src/simcore_service_webserver/users/_tokens.py index 3453309d200..6b4e58c8443 100644 --- a/services/web/server/src/simcore_service_webserver/users/_tokens.py +++ b/services/web/server/src/simcore_service_webserver/users/_tokens.py @@ -34,7 +34,7 @@ async def list_tokens(app: web.Application, user_id: UserID) -> list[ThirdPartyT async for row in conn.execute( sa.select(tokens.c.token_data).where(tokens.c.user_id == user_id) ): - user_tokens.append(ThirdPartyToken.construct(**row["token_data"])) + user_tokens.append(ThirdPartyToken.model_construct(**row["token_data"])) return user_tokens @@ -48,7 +48,7 @@ async def get_token( ) ) if row := await result.first(): - return ThirdPartyToken.construct(**row["token_data"]) + return ThirdPartyToken.model_construct(**row["token_data"]) raise TokenNotFoundError(service_id=service_id) @@ -78,7 +78,7 @@ async def update_token( assert resp.rowcount == 1 # nosec updated_token = await resp.fetchone() assert updated_token # nosec - return ThirdPartyToken.construct(**updated_token["token_data"]) + return ThirdPartyToken.model_construct(**updated_token["token_data"]) async def delete_token(app: web.Application, user_id: UserID, service_id: str) -> None: diff --git a/services/web/server/tests/unit/with_dbs/01/clusters/test_clusters_handlers.py b/services/web/server/tests/unit/with_dbs/01/clusters/test_clusters_handlers.py index edef380dc86..e75aee0866f 100644 --- a/services/web/server/tests/unit/with_dbs/01/clusters/test_clusters_handlers.py +++ b/services/web/server/tests/unit/with_dbs/01/clusters/test_clusters_handlers.py @@ -246,7 +246,7 @@ async def test_update_cluster( url = client.app.router["update_cluster"].url_for(cluster_id=f"{25}") rsp = await client.patch( f"{url}", - json=json.loads(cluster_patch.json(**_PATCH_EXPORT)), + json=json.loads(cluster_patch.model_dump_json(**_PATCH_EXPORT)), ) data, error = await assert_status(rsp, expected.ok) if not error: @@ -497,7 +497,9 @@ async def test_ping_cluster_with_error( ) assert client.app url = client.app.router["ping_cluster"].url_for() - rsp = await client.post(f"{url}", json=json.loads(cluster_ping.json(by_alias=True))) + rsp = await client.post( + f"{url}", json=json.loads(cluster_ping.model_dump_json(by_alias=True)) + ) data, error = await assert_status(rsp, expected_http_error) assert not data assert error diff --git a/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py b/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py index 1572f862421..862a0db06e8 100644 --- a/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py +++ b/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py @@ -262,7 +262,7 @@ async def _mock_catalog_get(*args, **kwarg): assert node.inputs node.inputs["linspace_stop"] = 4 - _new_project_data = new_project.dict(**REQUEST_MODEL_POLICY) + _new_project_data = new_project.model_dump(**REQUEST_MODEL_POLICY) _new_project_data.pop("state") await db.replace_project( json_loads(json_dumps(_new_project_data)),