From a56e919816f4da8eec582572d959ba3e3565f52e Mon Sep 17 00:00:00 2001 From: Max Marrone Date: Wed, 18 Dec 2024 17:36:01 -0500 Subject: [PATCH] refactor: Rename Pydantic v1 methods to their v2 counterparts (#17123) --- api/pytest.ini | 11 +- .../calibration_storage/deck_configuration.py | 4 +- .../calibration_storage/file_operators.py | 4 +- .../calibration_storage/ot2/tip_length.py | 8 +- api/src/opentrons/execute.py | 4 +- .../emulation/module_server/client.py | 2 +- .../emulation/module_server/server.py | 8 +- .../instruments/ot2/instrument_calibration.py | 3 +- .../instruments/ot2/pipette.py | 10 +- .../instruments/ot3/gripper.py | 4 +- .../instruments/ot3/pipette.py | 8 +- .../hardware_control/ot3_calibration.py | 2 +- .../hardware_control/robot_calibration.py | 2 +- .../protocol_api/core/engine/labware.py | 8 +- .../protocol_api/core/engine/protocol.py | 2 +- .../commands/absorbance_reader/read.py | 10 +- .../commands/calibration/calibrate_gripper.py | 4 +- .../commands/calibration/calibrate_pipette.py | 4 +- .../commands/configure_nozzle_layout.py | 8 +- .../protocol_engine/commands/custom.py | 2 +- .../commands/movement_common.py | 8 +- .../protocol_engine/commands/touch_tip.py | 2 +- .../errors/error_occurrence.py | 2 +- .../execution/command_executor.py | 2 +- .../opentrons/protocol_engine/notes/notes.py | 2 +- .../resources/labware_data_provider.py | 2 +- .../resources/module_data_provider.py | 2 +- .../protocol_engine/slot_standardization.py | 18 +-- .../protocol_engine/state/geometry.py | 2 +- .../protocol_engine/state/labware.py | 10 +- .../protocol_engine/state/modules.py | 2 +- .../opentrons/protocol_engine/state/state.py | 2 +- api/src/opentrons/protocol_engine/types.py | 2 +- .../extract_labware_definitions.py | 2 +- .../protocol_reader/file_format_validator.py | 10 +- .../protocol_runner/json_translator.py | 16 +-- .../protocol_runner/legacy_command_mapper.py | 124 +++++++++--------- .../python_protocol_wrappers.py | 2 +- api/src/opentrons/simulate.py | 4 +- .../test_file_operators.py | 2 +- .../test_tip_length_ot2.py | 2 +- .../hardware_control/test_gripper.py | 4 +- .../commands/test_air_gap_in_place.py | 4 +- .../protocol_engine/commands/test_aspirate.py | 8 +- .../commands/test_aspirate_in_place.py | 4 +- .../protocol_engine/commands/test_blow_out.py | 4 +- .../commands/test_blow_out_in_place.py | 2 +- .../protocol_engine/commands/test_dispense.py | 10 +- .../commands/test_dispense_in_place.py | 4 +- .../protocol_engine/commands/test_drop_tip.py | 8 +- .../commands/test_drop_tip_in_place.py | 2 +- .../commands/test_liquid_probe.py | 4 +- .../commands/test_load_liquid_class.py | 2 +- .../commands/test_move_relative.py | 2 +- .../commands/test_move_to_addressable_area.py | 2 +- ...t_move_to_addressable_area_for_drop_tip.py | 2 +- .../commands/test_move_to_coordinates.py | 2 +- .../commands/test_move_to_well.py | 2 +- .../commands/test_pick_up_tip.py | 4 +- .../commands/test_prepare_to_aspirate.py | 2 +- .../opentrons/protocol_engine/conftest.py | 30 +++-- .../resources/test_labware_data_provider.py | 6 +- .../state/test_command_history.py | 6 +- .../smoke_tests/test_legacy_command_mapper.py | 66 +++++----- .../smoke_tests/test_legacy_custom_labware.py | 2 +- .../test_legacy_module_commands.py | 12 +- .../smoke_tests/test_protocol_runner.py | 32 ++--- .../protocol_runner/test_json_translator.py | 28 ++-- .../test_legacy_command_mapper.py | 28 ++-- .../protocol_runner/test_run_orchestrator.py | 6 +- .../protocols/models/test_json_protocol.py | 4 +- hardware/pytest.ini | 6 + robot-server/pytest.ini | 8 +- .../robot_server/client_data/router.py | 8 +- robot-server/robot_server/commands/router.py | 10 +- .../robot_server/data_files/router.py | 18 +-- .../deck_configuration/defaults.py | 52 ++++---- .../robot_server/deck_configuration/router.py | 6 +- .../robot_server/deck_configuration/store.py | 8 +- .../error_recovery/settings/router.py | 4 +- .../robot_server/errors/error_responses.py | 2 +- .../robot_server/errors/global_errors.py | 2 +- .../robot_server/instruments/router.py | 18 +-- .../robot_server/labware_offsets/models.py | 7 +- .../robot_server/labware_offsets/router.py | 14 +- .../maintenance_run_data_manager.py | 4 +- .../maintenance_runs/router/base_router.py | 10 +- .../router/commands_router.py | 14 +- .../maintenance_runs/router/labware_router.py | 6 +- robot-server/robot_server/modules/router.py | 4 +- .../persistence/_migrations/up_to_2.py | 4 +- .../persistence/_migrations/up_to_3.py | 5 +- .../robot_server/persistence/pydantic.py | 6 +- .../robot_server/protocols/analysis_store.py | 10 +- robot-server/robot_server/protocols/router.py | 40 +++--- .../robot_server/robot/control/router.py | 8 +- .../runs/router/actions_router.py | 2 +- .../robot_server/runs/router/base_router.py | 26 ++-- .../runs/router/commands_router.py | 16 +-- .../router/error_recovery_policy_router.py | 6 +- .../runs/router/labware_router.py | 8 +- .../robot_server/runs/run_data_manager.py | 6 +- robot-server/robot_server/service/errors.py | 2 +- .../robot_server/service/json_api/response.py | 24 +++- .../service/legacy/routers/networking.py | 4 +- .../notifications/notification_client.py | 8 +- .../command_execution/callable_executor.py | 2 +- .../robot_server/service/session/router.py | 2 +- .../robot_server/subsystems/router.py | 26 ++-- robot-server/tests/instruments/test_router.py | 10 +- .../router/test_labware_router.py | 2 +- .../maintenance_runs/test_engine_store.py | 2 +- .../tests/modules/test_module_data_mapper.py | 8 +- robot-server/tests/modules/test_router.py | 4 +- .../tests/persistence/test_pydantic.py | 4 +- .../tests/protocols/test_protocol_analyzer.py | 2 +- .../tests/protocols/test_protocols_router.py | 2 +- .../tests/runs/router/test_base_router.py | 4 +- .../tests/runs/router/test_labware_router.py | 4 +- .../tests/runs/test_error_recovery_mapping.py | 8 +- .../tests/runs/test_run_data_manager.py | 2 +- .../tests/runs/test_run_orchestrator_store.py | 2 +- robot-server/tests/runs/test_run_store.py | 6 +- .../tests/service/json_api/test_request.py | 18 +-- .../service/json_api/test_resource_links.py | 6 +- .../tests/service/json_api/test_response.py | 2 +- .../service/session/models/test_command.py | 4 +- robot-server/tests/subsystems/test_router.py | 24 ++-- server-utils/pytest.ini | 6 + .../opentrons_shared_data/gripper/__init__.py | 2 +- .../liquid_classes/__init__.py | 7 +- .../pipette/load_data.py | 10 +- .../pipette/mutable_configurations.py | 8 +- .../pipette/scripts/build_json_script.py | 16 +-- .../scripts/update_configuration_files.py | 8 +- shared-data/python/pytest.ini | 6 + .../python/tests/gripper/test_definition.py | 6 +- .../python/tests/labware/test_validations.py | 4 +- .../python/tests/liquid_classes/test_load.py | 4 +- .../python/tests/pipette/test_load_data.py | 2 +- .../pipette/test_mutable_configurations.py | 4 +- .../tests/pipette/test_pipette_definition.py | 8 +- .../tests/pipette/test_validate_schema.py | 2 +- .../tests/protocol/test_protocol_schema_v6.py | 2 +- .../tests/protocol/test_protocol_schema_v7.py | 2 +- .../tests/protocol/test_protocol_schema_v8.py | 2 +- system-server/pytest.ini | 6 + .../system_server/settings/settings.py | 2 +- 148 files changed, 659 insertions(+), 586 deletions(-) diff --git a/api/pytest.ini b/api/pytest.ini index 61288b3f3c1..78115d41057 100644 --- a/api/pytest.ini +++ b/api/pytest.ini @@ -6,8 +6,13 @@ markers = addopts = --color=yes --strict-markers asyncio_mode = auto -# TODO this should be looked into being removed upon updating the Decoy library. The purpose of this warning is to -# catch missing attributes, but it raises for any property referenced in a test which accounts for about ~250 warnings -# which aren't serving any useful purpose and obscure other warnings. filterwarnings = + # TODO this should be looked into being removed upon updating the Decoy library. The purpose of this warning is to + # catch missing attributes, but it raises for any property referenced in a test which accounts for about ~250 warnings + # which aren't serving any useful purpose and obscure other warnings. ignore::decoy.warnings.MissingSpecAttributeWarning + # Pydantic's shims for its legacy v1 methods (e.g. `BaseModel.construct()`) + # are not type-checked properly. Forbid them, so we're forced to use their newer + # v2 replacements which are type-checked (e.g. ``BaseModel.model_construct()`) + error::pydantic.PydanticDeprecatedSince20 + diff --git a/api/src/opentrons/calibration_storage/deck_configuration.py b/api/src/opentrons/calibration_storage/deck_configuration.py index 5c97ab37f25..857c2c22d3f 100644 --- a/api/src/opentrons/calibration_storage/deck_configuration.py +++ b/api/src/opentrons/calibration_storage/deck_configuration.py @@ -24,9 +24,9 @@ def serialize_deck_configuration( cutout_fixture_placements: List[CutoutFixturePlacement], last_modified: datetime ) -> bytes: """Serialize a deck configuration for storing on the filesystem.""" - data = _DeckConfigurationModel.construct( + data = _DeckConfigurationModel.model_construct( cutoutFixtures=[ - _CutoutFixturePlacementModel.construct( + _CutoutFixturePlacementModel.model_construct( cutoutId=e.cutout_id, cutoutFixtureId=e.cutout_fixture_id, opentronsModuleSerialNumber=e.opentrons_module_serial_number, diff --git a/api/src/opentrons/calibration_storage/file_operators.py b/api/src/opentrons/calibration_storage/file_operators.py index ddc046fd828..bf80a034d54 100644 --- a/api/src/opentrons/calibration_storage/file_operators.py +++ b/api/src/opentrons/calibration_storage/file_operators.py @@ -103,7 +103,7 @@ def save_to_file( directory_path.mkdir(parents=True, exist_ok=True) file_path = directory_path / f"{file_name}.json" json_data = ( - data.json() + data.model_dump_json() if isinstance(data, pydantic.BaseModel) else json.dumps(data, cls=encoder) ) @@ -112,7 +112,7 @@ def save_to_file( def serialize_pydantic_model(data: pydantic.BaseModel) -> bytes: """Safely serialize data from a Pydantic model into a form suitable for storing on disk.""" - return data.json(by_alias=True).encode("utf-8") + return data.model_dump_json(by_alias=True).encode("utf-8") _ModelT = typing.TypeVar("_ModelT", bound=pydantic.BaseModel) diff --git a/api/src/opentrons/calibration_storage/ot2/tip_length.py b/api/src/opentrons/calibration_storage/ot2/tip_length.py index a63472c9cea..979916bd85e 100644 --- a/api/src/opentrons/calibration_storage/ot2/tip_length.py +++ b/api/src/opentrons/calibration_storage/ot2/tip_length.py @@ -31,7 +31,7 @@ def _convert_tip_length_model_to_dict( # add encoders when converting to a dict. dict_of_tip_lengths = {} for key, item in to_dict.items(): - dict_of_tip_lengths[key] = json.loads(item.json()) + dict_of_tip_lengths[key] = json.loads(item.model_dump_json()) return dict_of_tip_lengths @@ -176,12 +176,14 @@ def delete_tip_length_calibration( io.save_to_file(tip_length_dir, pipette_id, dict_of_tip_lengths) else: io.delete_file(tip_length_dir / f"{pipette_id}.json") - elif tiprack_hash and any(tiprack_hash in v.dict() for v in tip_lengths.values()): + elif tiprack_hash and any( + tiprack_hash in v.model_dump() for v in tip_lengths.values() + ): # NOTE this is for backwards compatibilty only # TODO delete this check once the tip_length DELETE router # no longer depends on a tiprack hash for k, v in tip_lengths.items(): - if tiprack_hash in v.dict(): + if tiprack_hash in v.model_dump(): tip_lengths.pop(k) if tip_lengths: dict_of_tip_lengths = _convert_tip_length_model_to_dict(tip_lengths) diff --git a/api/src/opentrons/execute.py b/api/src/opentrons/execute.py index a9b3562d82b..998d6bc6597 100644 --- a/api/src/opentrons/execute.py +++ b/api/src/opentrons/execute.py @@ -560,7 +560,9 @@ def _create_live_context_pe( # Non-async would use call_soon_threadsafe(), which makes the waiting harder. async def add_all_extra_labware() -> None: for labware_definition_dict in extra_labware.values(): - labware_definition = LabwareDefinition.parse_obj(labware_definition_dict) + labware_definition = LabwareDefinition.model_validate( + labware_definition_dict + ) pe.add_labware_definition(labware_definition) # Add extra_labware to ProtocolEngine, being careful not to modify ProtocolEngine from this diff --git a/api/src/opentrons/hardware_control/emulation/module_server/client.py b/api/src/opentrons/hardware_control/emulation/module_server/client.py index 4108fe76069..5adcde0f267 100644 --- a/api/src/opentrons/hardware_control/emulation/module_server/client.py +++ b/api/src/opentrons/hardware_control/emulation/module_server/client.py @@ -66,7 +66,7 @@ async def read(self) -> Message: """Read a message from the module server.""" try: b = await self._reader.readuntil(MessageDelimiter) - m: Message = Message.parse_raw(b) + m: Message = Message.model_validate_json(b) return m except LimitOverrunError as e: raise ModuleServerClientError(str(e)) diff --git a/api/src/opentrons/hardware_control/emulation/module_server/server.py b/api/src/opentrons/hardware_control/emulation/module_server/server.py index 5a3d696eb7b..36878c342e3 100644 --- a/api/src/opentrons/hardware_control/emulation/module_server/server.py +++ b/api/src/opentrons/hardware_control/emulation/module_server/server.py @@ -53,7 +53,9 @@ def on_server_connected( self._connections[identifier] = connection for c in self._clients: c.write( - Message(status="connected", connections=[connection]).json().encode() + Message(status="connected", connections=[connection]) + .model_dump_json() + .encode() ) c.write(b"\n") @@ -72,7 +74,7 @@ def on_server_disconnected(self, identifier: str) -> None: for c in self._clients: c.write( Message(status="disconnected", connections=[connection]) - .json() + .model_dump_json() .encode() ) c.write(MessageDelimiter) @@ -95,7 +97,7 @@ async def _handle_connection( # A client connected. Send a dump of all connected modules. m = Message(status="dump", connections=list(self._connections.values())) - writer.write(m.json().encode()) + writer.write(m.model_dump_json().encode()) writer.write(MessageDelimiter) self._clients.add(writer) diff --git a/api/src/opentrons/hardware_control/instruments/ot2/instrument_calibration.py b/api/src/opentrons/hardware_control/instruments/ot2/instrument_calibration.py index e093763dcd1..b3b82b22421 100644 --- a/api/src/opentrons/hardware_control/instruments/ot2/instrument_calibration.py +++ b/api/src/opentrons/hardware_control/instruments/ot2/instrument_calibration.py @@ -123,7 +123,8 @@ def load_tip_length_for_pipette( ) -> TipLengthCalibration: if isinstance(tiprack, LabwareDefinition): tiprack = typing.cast( - "TypeDictLabwareDef", tiprack.dict(exclude_none=True, exclude_unset=True) + "TypeDictLabwareDef", + tiprack.model_dump(exclude_none=True, exclude_unset=True), ) tip_length_data = calibration_storage.load_tip_length_calibration( diff --git a/api/src/opentrons/hardware_control/instruments/ot2/pipette.py b/api/src/opentrons/hardware_control/instruments/ot2/pipette.py index 2d63342cf19..0881999d435 100644 --- a/api/src/opentrons/hardware_control/instruments/ot2/pipette.py +++ b/api/src/opentrons/hardware_control/instruments/ot2/pipette.py @@ -96,7 +96,7 @@ def __init__( use_old_aspiration_functions: bool = False, ) -> None: self._config = config - self._config_as_dict = config.dict() + self._config_as_dict = config.model_dump() self._pipette_offset = pipette_offset_cal self._pipette_type = self._config.pipette_type self._pipette_version = self._config.version @@ -273,7 +273,7 @@ def update_config_item( self._config, elements, liquid_class ) # Update the cached dict representation - self._config_as_dict = self._config.dict() + self._config_as_dict = self._config.model_dump() def reload_configurations(self) -> None: self._config = load_pipette_data.load_definition( @@ -281,7 +281,7 @@ def reload_configurations(self) -> None: self._pipette_model.pipette_channels, self._pipette_model.pipette_version, ) - self._config_as_dict = self._config.dict() + self._config_as_dict = self._config.model_dump() def reset_state(self) -> None: self._current_volume = 0.0 @@ -656,8 +656,8 @@ def _reload_and_check_skip( # Same config, good enough return attached_instr, True else: - newdict = new_config.dict() - olddict = attached_instr.config.dict() + newdict = new_config.model_dump() + olddict = attached_instr.config.model_dump() changed: Set[str] = set() for k in newdict.keys(): if newdict[k] != olddict[k]: diff --git a/api/src/opentrons/hardware_control/instruments/ot3/gripper.py b/api/src/opentrons/hardware_control/instruments/ot3/gripper.py index ba49ea7d5e7..bd70547ee45 100644 --- a/api/src/opentrons/hardware_control/instruments/ot3/gripper.py +++ b/api/src/opentrons/hardware_control/instruments/ot3/gripper.py @@ -318,8 +318,8 @@ def _reload_gripper( # Same config, good enough return attached_instr, True else: - newdict = new_config.dict() - olddict = attached_instr.config.dict() + newdict = new_config.model_dump() + olddict = attached_instr.config.model_dump() changed: Set[str] = set() for k in newdict.keys(): if newdict[k] != olddict[k]: diff --git a/api/src/opentrons/hardware_control/instruments/ot3/pipette.py b/api/src/opentrons/hardware_control/instruments/ot3/pipette.py index b9355874906..6098b88b964 100644 --- a/api/src/opentrons/hardware_control/instruments/ot3/pipette.py +++ b/api/src/opentrons/hardware_control/instruments/ot3/pipette.py @@ -79,7 +79,7 @@ def __init__( use_old_aspiration_functions: bool = False, ) -> None: self._config = config - self._config_as_dict = config.dict() + self._config_as_dict = config.model_dump() self._plunger_motor_current = config.plunger_motor_configurations self._pick_up_configurations = config.pick_up_tip_configurations self._plunger_homing_configurations = config.plunger_homing_configurations @@ -251,7 +251,7 @@ def reload_configurations(self) -> None: self._pipette_model.pipette_channels, self._pipette_model.pipette_version, ) - self._config_as_dict = self._config.dict() + self._config_as_dict = self._config.model_dump() def reset_state(self) -> None: self._current_volume = 0.0 @@ -770,8 +770,8 @@ def _reload_and_check_skip( # Same config, good enough return attached_instr, True else: - newdict = new_config.dict() - olddict = attached_instr.config.dict() + newdict = new_config.model_dump() + olddict = attached_instr.config.model_dump() changed: Set[str] = set() for k in newdict.keys(): if newdict[k] != olddict[k]: diff --git a/api/src/opentrons/hardware_control/ot3_calibration.py b/api/src/opentrons/hardware_control/ot3_calibration.py index b0ebcd027ce..9303add23d6 100644 --- a/api/src/opentrons/hardware_control/ot3_calibration.py +++ b/api/src/opentrons/hardware_control/ot3_calibration.py @@ -968,7 +968,7 @@ def load_attitude_matrix(to_default: bool = True) -> DeckCalibration: return DeckCalibration( attitude=apply_machine_transform(calibration_data.attitude), source=calibration_data.source, - status=types.CalibrationStatus(**calibration_data.status.dict()), + status=types.CalibrationStatus(**calibration_data.status.model_dump()), belt_attitude=calibration_data.attitude, last_modified=calibration_data.lastModified, pipette_calibrated_with=calibration_data.pipetteCalibratedWith, diff --git a/api/src/opentrons/hardware_control/robot_calibration.py b/api/src/opentrons/hardware_control/robot_calibration.py index 270344fff2f..8ecf6b67be6 100644 --- a/api/src/opentrons/hardware_control/robot_calibration.py +++ b/api/src/opentrons/hardware_control/robot_calibration.py @@ -154,7 +154,7 @@ def load_attitude_matrix() -> DeckCalibration: return DeckCalibration( attitude=calibration_data.attitude, source=calibration_data.source, - status=types.CalibrationStatus(**calibration_data.status.dict()), + status=types.CalibrationStatus(**calibration_data.status.model_dump()), last_modified=calibration_data.last_modified, pipette_calibrated_with=calibration_data.pipette_calibrated_with, tiprack=calibration_data.tiprack, diff --git a/api/src/opentrons/protocol_api/core/engine/labware.py b/api/src/opentrons/protocol_api/core/engine/labware.py index 4d868bd30ac..d462401927f 100644 --- a/api/src/opentrons/protocol_api/core/engine/labware.py +++ b/api/src/opentrons/protocol_api/core/engine/labware.py @@ -92,12 +92,14 @@ def get_name(self) -> str: def get_definition(self) -> LabwareDefinitionDict: """Get the labware's definition as a plain dictionary.""" - return cast(LabwareDefinitionDict, self._definition.dict(exclude_none=True)) + return cast( + LabwareDefinitionDict, self._definition.model_dump(exclude_none=True) + ) def get_parameters(self) -> LabwareParametersDict: return cast( LabwareParametersDict, - self._definition.parameters.dict(exclude_none=True), + self._definition.parameters.model_dump(exclude_none=True), ) def get_quirks(self) -> List[str]: @@ -118,7 +120,7 @@ def set_calibration(self, delta: Point) -> None: details={"kind": "labware-not-in-slot"}, ) - request = LabwareOffsetCreate.construct( + request = LabwareOffsetCreate.model_construct( definitionUri=self.get_uri(), location=offset_location, vector=LabwareOffsetVector(x=delta.x, y=delta.y, z=delta.z), diff --git a/api/src/opentrons/protocol_api/core/engine/protocol.py b/api/src/opentrons/protocol_api/core/engine/protocol.py index cf0a8e1dfd7..bfc808c3091 100644 --- a/api/src/opentrons/protocol_api/core/engine/protocol.py +++ b/api/src/opentrons/protocol_api/core/engine/protocol.py @@ -193,7 +193,7 @@ def add_labware_definition( ) -> LabwareLoadParams: """Add a labware definition to the set of loadable definitions.""" uri = self._engine_client.add_labware_definition( - LabwareDefinition.parse_obj(definition) + LabwareDefinition.model_validate(definition) ) return LabwareLoadParams.from_uri(uri) diff --git a/api/src/opentrons/protocol_engine/commands/absorbance_reader/read.py b/api/src/opentrons/protocol_engine/commands/absorbance_reader/read.py index c557028c283..b06a2527cc8 100644 --- a/api/src/opentrons/protocol_engine/commands/absorbance_reader/read.py +++ b/api/src/opentrons/protocol_engine/commands/absorbance_reader/read.py @@ -124,7 +124,9 @@ async def execute( # noqa: C901 ) asbsorbance_result[wavelength] = converted_values transform_results.append( - ReadData.construct(wavelength=wavelength, data=converted_values) + ReadData.model_construct( + wavelength=wavelength, data=converted_values + ) ) # Handle the virtual module case for data creation (all zeroes) elif self._state_view.config.use_virtual_modules: @@ -138,7 +140,9 @@ async def execute( # noqa: C901 ) asbsorbance_result[wavelength] = converted_values transform_results.append( - ReadData.construct(wavelength=wavelength, data=converted_values) + ReadData.model_construct( + wavelength=wavelength, data=converted_values + ) ) else: raise CannotPerformModuleAction( @@ -153,7 +157,7 @@ async def execute( # noqa: C901 file_ids: list[str] = [] if params.fileName is not None: # Create the Plate Reader Transform - plate_read_result = PlateReaderData.construct( + plate_read_result = PlateReaderData.model_construct( read_results=transform_results, reference_wavelength=abs_reader_substate.reference_wavelength, start_time=start_time, diff --git a/api/src/opentrons/protocol_engine/commands/calibration/calibrate_gripper.py b/api/src/opentrons/protocol_engine/commands/calibration/calibrate_gripper.py index 2dac135c4f5..25ab19e2cd4 100644 --- a/api/src/opentrons/protocol_engine/commands/calibration/calibrate_gripper.py +++ b/api/src/opentrons/protocol_engine/commands/calibration/calibrate_gripper.py @@ -125,8 +125,8 @@ async def execute( calibration_data = result return SuccessData( - public=CalibrateGripperResult.construct( - jawOffset=Vec3f.construct( + public=CalibrateGripperResult.model_construct( + jawOffset=Vec3f.model_construct( x=probe_offset.x, y=probe_offset.y, z=probe_offset.z ), savedCalibration=calibration_data, diff --git a/api/src/opentrons/protocol_engine/commands/calibration/calibrate_pipette.py b/api/src/opentrons/protocol_engine/commands/calibration/calibrate_pipette.py index 70daeecb6d6..cb0eb93876c 100644 --- a/api/src/opentrons/protocol_engine/commands/calibration/calibrate_pipette.py +++ b/api/src/opentrons/protocol_engine/commands/calibration/calibrate_pipette.py @@ -65,8 +65,8 @@ async def execute( await ot3_api.save_instrument_offset(mount=ot3_mount, delta=pipette_offset) return SuccessData( - public=CalibratePipetteResult.construct( - pipetteOffset=InstrumentOffsetVector.construct( + public=CalibratePipetteResult.model_construct( + pipetteOffset=InstrumentOffsetVector.model_construct( x=pipette_offset.x, y=pipette_offset.y, z=pipette_offset.z ) ), diff --git a/api/src/opentrons/protocol_engine/commands/configure_nozzle_layout.py b/api/src/opentrons/protocol_engine/commands/configure_nozzle_layout.py index db06491ab14..072307a0609 100644 --- a/api/src/opentrons/protocol_engine/commands/configure_nozzle_layout.py +++ b/api/src/opentrons/protocol_engine/commands/configure_nozzle_layout.py @@ -61,9 +61,11 @@ async def execute( self, params: ConfigureNozzleLayoutParams ) -> SuccessData[ConfigureNozzleLayoutResult]: """Check that requested pipette can support the requested nozzle layout.""" - primary_nozzle = params.configurationParams.dict().get("primaryNozzle") - front_right_nozzle = params.configurationParams.dict().get("frontRightNozzle") - back_left_nozzle = params.configurationParams.dict().get("backLeftNozzle") + primary_nozzle = params.configurationParams.model_dump().get("primaryNozzle") + front_right_nozzle = params.configurationParams.model_dump().get( + "frontRightNozzle" + ) + back_left_nozzle = params.configurationParams.model_dump().get("backLeftNozzle") nozzle_params = await self._tip_handler.available_for_nozzle_layout( pipette_id=params.pipetteId, style=params.configurationParams.style, diff --git a/api/src/opentrons/protocol_engine/commands/custom.py b/api/src/opentrons/protocol_engine/commands/custom.py index 3190385f2d2..b15b5cdb8d3 100644 --- a/api/src/opentrons/protocol_engine/commands/custom.py +++ b/api/src/opentrons/protocol_engine/commands/custom.py @@ -44,7 +44,7 @@ class CustomImplementation( async def execute(self, params: CustomParams) -> SuccessData[CustomResult]: """A custom command does nothing when executed directly.""" return SuccessData( - public=CustomResult.construct(), + public=CustomResult.model_construct(), ) diff --git a/api/src/opentrons/protocol_engine/commands/movement_common.py b/api/src/opentrons/protocol_engine/commands/movement_common.py index 786b59921b2..babf70b29d9 100644 --- a/api/src/opentrons/protocol_engine/commands/movement_common.py +++ b/api/src/opentrons/protocol_engine/commands/movement_common.py @@ -182,7 +182,7 @@ async def move_to_well( state_update=StateUpdate().clear_all_pipette_locations(), ) else: - deck_point = DeckPoint.construct(x=position.x, y=position.y, z=position.z) + deck_point = DeckPoint.model_construct(x=position.x, y=position.y, z=position.z) return SuccessData( public=DestinationPositionResult( position=deck_point, @@ -222,7 +222,7 @@ async def move_relative( state_update=StateUpdate().clear_all_pipette_locations(), ) else: - deck_point = DeckPoint.construct(x=position.x, y=position.y, z=position.z) + deck_point = DeckPoint.model_construct(x=position.x, y=position.y, z=position.z) return SuccessData( public=DestinationPositionResult( position=deck_point, @@ -277,7 +277,7 @@ async def move_to_addressable_area( .set_addressable_area_used(addressable_area_name=addressable_area_name), ) else: - deck_point = DeckPoint.construct(x=x, y=y, z=z) + deck_point = DeckPoint.model_construct(x=x, y=y, z=z) return SuccessData( public=DestinationPositionResult(position=deck_point), state_update=StateUpdate() @@ -324,7 +324,7 @@ async def move_to_coordinates( state_update=StateUpdate().clear_all_pipette_locations(), ) else: - deck_point = DeckPoint.construct(x=x, y=y, z=z) + deck_point = DeckPoint.model_construct(x=x, y=y, z=z) return SuccessData( public=DestinationPositionResult(position=DeckPoint(x=x, y=y, z=z)), diff --git a/api/src/opentrons/protocol_engine/commands/touch_tip.py b/api/src/opentrons/protocol_engine/commands/touch_tip.py index 4ccced4b3d3..d4591bf1d27 100644 --- a/api/src/opentrons/protocol_engine/commands/touch_tip.py +++ b/api/src/opentrons/protocol_engine/commands/touch_tip.py @@ -154,7 +154,7 @@ async def execute( waypoints=touch_waypoints, speed=touch_speed, ) - final_deck_point = DeckPoint.construct( + final_deck_point = DeckPoint.model_construct( x=final_point.x, y=final_point.y, z=final_point.z ) state_update = center_result.state_update.set_pipette_location( diff --git a/api/src/opentrons/protocol_engine/errors/error_occurrence.py b/api/src/opentrons/protocol_engine/errors/error_occurrence.py index 34f3e1d2ac7..002596d0172 100644 --- a/api/src/opentrons/protocol_engine/errors/error_occurrence.py +++ b/api/src/opentrons/protocol_engine/errors/error_occurrence.py @@ -29,7 +29,7 @@ def from_failed( wrappedErrors = [ cls.from_failed(id, createdAt, err) for err in error.wrapping ] - return cls.construct( + return cls.model_construct( id=id, createdAt=createdAt, errorType=type(error).__name__, diff --git a/api/src/opentrons/protocol_engine/execution/command_executor.py b/api/src/opentrons/protocol_engine/execution/command_executor.py index b6c686e0b11..47184d94ef2 100644 --- a/api/src/opentrons/protocol_engine/execution/command_executor.py +++ b/api/src/opentrons/protocol_engine/execution/command_executor.py @@ -188,7 +188,7 @@ async def execute(self, command_id: str) -> None: "completedAt": self._model_utils.get_timestamp(), "notes": note_tracker.get_notes(), } - succeeded_command = running_command.copy(update=update) + succeeded_command = running_command.model_copy(update=update) self._action_dispatcher.dispatch( SucceedCommandAction( command=succeeded_command, diff --git a/api/src/opentrons/protocol_engine/notes/notes.py b/api/src/opentrons/protocol_engine/notes/notes.py index 8c349d167cd..2ec71d90b55 100644 --- a/api/src/opentrons/protocol_engine/notes/notes.py +++ b/api/src/opentrons/protocol_engine/notes/notes.py @@ -35,7 +35,7 @@ def make_error_recovery_debug_note(type: "ErrorRecoveryType") -> CommandNote: This is intended to be read by developers and support people, not computers. """ message = f"Handling this command failure with {type.name}." - return CommandNote.construct( + return CommandNote.model_construct( noteKind="debugErrorRecovery", shortMessage=message, longMessage=message, diff --git a/api/src/opentrons/protocol_engine/resources/labware_data_provider.py b/api/src/opentrons/protocol_engine/resources/labware_data_provider.py index 0b08720d4e9..8d5cdfc7899 100644 --- a/api/src/opentrons/protocol_engine/resources/labware_data_provider.py +++ b/api/src/opentrons/protocol_engine/resources/labware_data_provider.py @@ -44,7 +44,7 @@ async def get_labware_definition( def _get_labware_definition_sync( load_name: str, namespace: str, version: int ) -> LabwareDefinition: - return LabwareDefinition.parse_obj( + return LabwareDefinition.model_validate( get_labware_definition(load_name, namespace, version) ) diff --git a/api/src/opentrons/protocol_engine/resources/module_data_provider.py b/api/src/opentrons/protocol_engine/resources/module_data_provider.py index a12b85ee5b3..3ee7b5d6bd9 100644 --- a/api/src/opentrons/protocol_engine/resources/module_data_provider.py +++ b/api/src/opentrons/protocol_engine/resources/module_data_provider.py @@ -22,7 +22,7 @@ class ModuleDataProvider: def get_definition(model: ModuleModel) -> ModuleDefinition: """Get the module definition.""" data = load_definition(model_or_loadname=model.value, version="3") - return ModuleDefinition.parse_obj(data) + return ModuleDefinition.model_validate(data) @staticmethod def load_module_calibrations() -> Dict[str, ModuleOffsetData]: diff --git a/api/src/opentrons/protocol_engine/slot_standardization.py b/api/src/opentrons/protocol_engine/slot_standardization.py index b600258bbf0..d940517eebe 100644 --- a/api/src/opentrons/protocol_engine/slot_standardization.py +++ b/api/src/opentrons/protocol_engine/slot_standardization.py @@ -35,9 +35,9 @@ def standardize_labware_offset( original: LabwareOffsetCreate, robot_type: RobotType ) -> LabwareOffsetCreate: """Convert the deck slot in the given `LabwareOffsetCreate` to match the given robot type.""" - return original.copy( + return original.model_copy( update={ - "location": original.location.copy( + "location": original.location.model_copy( update={ "slotName": original.location.slotName.to_equivalent_for_robot_type( robot_type @@ -70,40 +70,40 @@ def standardize_command( def _standardize_load_labware( original: commands.LoadLabwareCreate, robot_type: RobotType ) -> commands.LoadLabwareCreate: - params = original.params.copy( + params = original.params.model_copy( update={ "location": _standardize_labware_location( original.params.location, robot_type ) } ) - return original.copy(update={"params": params}) + return original.model_copy(update={"params": params}) def _standardize_load_module( original: commands.LoadModuleCreate, robot_type: RobotType ) -> commands.LoadModuleCreate: - params = original.params.copy( + params = original.params.model_copy( update={ "location": _standardize_deck_slot_location( original.params.location, robot_type ) } ) - return original.copy(update={"params": params}) + return original.model_copy(update={"params": params}) def _standardize_move_labware( original: commands.MoveLabwareCreate, robot_type: RobotType ) -> commands.MoveLabwareCreate: - params = original.params.copy( + params = original.params.model_copy( update={ "newLocation": _standardize_labware_location( original.params.newLocation, robot_type ) } ) - return original.copy(update={"params": params}) + return original.model_copy(update={"params": params}) _standardize_command_functions: Dict[ @@ -135,6 +135,6 @@ def _standardize_labware_location( def _standardize_deck_slot_location( original: DeckSlotLocation, robot_type: RobotType ) -> DeckSlotLocation: - return original.copy( + return original.model_copy( update={"slotName": original.slotName.to_equivalent_for_robot_type(robot_type)} ) diff --git a/api/src/opentrons/protocol_engine/state/geometry.py b/api/src/opentrons/protocol_engine/state/geometry.py index ed915530b90..e0d9cb1afa1 100644 --- a/api/src/opentrons/protocol_engine/state/geometry.py +++ b/api/src/opentrons/protocol_engine/state/geometry.py @@ -489,7 +489,7 @@ def get_well_position( well_depth=well_depth, operation_volume=operation_volume, ) - offset = offset.copy(update={"z": offset.z + offset_adjustment}) + offset = offset.model_copy(update={"z": offset.z + offset_adjustment}) self.validate_well_position( well_location=well_location, z_offset=offset.z, pipette_id=pipette_id ) diff --git a/api/src/opentrons/protocol_engine/state/labware.py b/api/src/opentrons/protocol_engine/state/labware.py index d81d5bfa756..3f00ad14de7 100644 --- a/api/src/opentrons/protocol_engine/state/labware.py +++ b/api/src/opentrons/protocol_engine/state/labware.py @@ -131,7 +131,7 @@ def __init__( for fixed_labware in deck_fixed_labware } labware_by_id = { - fixed_labware.labware_id: LoadedLabware.construct( + fixed_labware.labware_id: LoadedLabware.model_construct( id=fixed_labware.labware_id, location=fixed_labware.location, loadName=fixed_labware.definition.parameters.loadName, @@ -159,7 +159,7 @@ def handle_action(self, action: Action) -> None: self._set_labware_location(state_update) if isinstance(action, AddLabwareOffsetAction): - labware_offset = LabwareOffset.construct( + labware_offset = LabwareOffset.model_construct( id=action.labware_offset_id, createdAt=action.created_at, definitionUri=action.request.definitionUri, @@ -212,7 +212,7 @@ def _add_loaded_labware(self, state_update: update_types.StateUpdate) -> None: self._state.labware_by_id[ loaded_labware_update.labware_id - ] = LoadedLabware.construct( + ] = LoadedLabware.model_construct( id=loaded_labware_update.labware_id, location=location, loadName=loaded_labware_update.definition.parameters.loadName, @@ -998,12 +998,12 @@ def get_child_gripper_offsets( return None else: return LabwareMovementOffsetData( - pickUpOffset=LabwareOffsetVector.construct( + pickUpOffset=LabwareOffsetVector.model_construct( x=parsed_offsets[offset_key].pickUpOffset.x, y=parsed_offsets[offset_key].pickUpOffset.y, z=parsed_offsets[offset_key].pickUpOffset.z, ), - dropOffset=LabwareOffsetVector.construct( + dropOffset=LabwareOffsetVector.model_construct( x=parsed_offsets[offset_key].dropOffset.x, y=parsed_offsets[offset_key].dropOffset.y, z=parsed_offsets[offset_key].dropOffset.z, diff --git a/api/src/opentrons/protocol_engine/state/modules.py b/api/src/opentrons/protocol_engine/state/modules.py index ebf503c51fb..a0b22f14fcb 100644 --- a/api/src/opentrons/protocol_engine/state/modules.py +++ b/api/src/opentrons/protocol_engine/state/modules.py @@ -654,7 +654,7 @@ def get(self, module_id: str) -> LoadedModule: DeckSlotLocation(slotName=slot_name) if slot_name is not None else None ) - return LoadedModule.construct( + return LoadedModule.model_construct( id=module_id, location=location, model=attached_module.definition.model, diff --git a/api/src/opentrons/protocol_engine/state/state.py b/api/src/opentrons/protocol_engine/state/state.py index 58e977cc2f4..5ff12b739f3 100644 --- a/api/src/opentrons/protocol_engine/state/state.py +++ b/api/src/opentrons/protocol_engine/state/state.py @@ -143,7 +143,7 @@ def get_summary(self) -> StateSummary: """Get protocol run data.""" error = self._commands.get_error() # TODO maybe add summary here for AA - return StateSummary.construct( + return StateSummary.model_construct( status=self._commands.get_status(), errors=[] if error is None else [error], pipettes=self._pipettes.get_all(), diff --git a/api/src/opentrons/protocol_engine/types.py b/api/src/opentrons/protocol_engine/types.py index 2a4ff4bd726..b1388d58212 100644 --- a/api/src/opentrons/protocol_engine/types.py +++ b/api/src/opentrons/protocol_engine/types.py @@ -878,7 +878,7 @@ def dict_to_tuple(d: dict[str, Any]) -> tuple[tuple[str, Any], ...]: for field_name, value in d.items() ) - return hash(dict_to_tuple(self.dict())) + return hash(dict_to_tuple(self.model_dump())) class LiquidClassRecordWithId(LiquidClassRecord, frozen=True): diff --git a/api/src/opentrons/protocol_reader/extract_labware_definitions.py b/api/src/opentrons/protocol_reader/extract_labware_definitions.py index 6bf8946404b..88d7e256a07 100644 --- a/api/src/opentrons/protocol_reader/extract_labware_definitions.py +++ b/api/src/opentrons/protocol_reader/extract_labware_definitions.py @@ -55,7 +55,7 @@ def extract_sync(path: Path) -> List[LabwareDefinition]: # which require this labwareDefinitions key. unvalidated_definitions = json_contents["labwareDefinitions"].values() validated_definitions = [ - LabwareDefinition.parse_obj(u) for u in unvalidated_definitions + LabwareDefinition.model_validate(u) for u in unvalidated_definitions ] return validated_definitions diff --git a/api/src/opentrons/protocol_reader/file_format_validator.py b/api/src/opentrons/protocol_reader/file_format_validator.py index df119ac3ffa..17969fc70fe 100644 --- a/api/src/opentrons/protocol_reader/file_format_validator.py +++ b/api/src/opentrons/protocol_reader/file_format_validator.py @@ -60,7 +60,7 @@ async def validate(files: Iterable[IdentifiedFile]) -> None: async def _validate_labware_definition(info: IdentifiedLabwareDefinition) -> None: def validate_sync() -> None: try: - LabwareDefinition.parse_obj(info.unvalidated_json) + LabwareDefinition.model_validate(info.unvalidated_json) except PydanticValidationError as e: raise FileFormatValidationError( message=f"{info.original_file.name} could not be read as a labware definition.", @@ -133,17 +133,17 @@ async def _validate_json_protocol(info: IdentifiedJsonMain) -> None: def validate_sync() -> None: if info.schema_version == 8: try: - JsonProtocolV8.parse_obj(info.unvalidated_json) + JsonProtocolV8.model_validate(info.unvalidated_json) except PydanticValidationError as pve: _handle_v8_json_protocol_validation_error(info, pve) else: try: if info.schema_version == 7: - JsonProtocolV7.parse_obj(info.unvalidated_json) + JsonProtocolV7.model_validate(info.unvalidated_json) elif info.schema_version == 6: - JsonProtocolV6.parse_obj(info.unvalidated_json) + JsonProtocolV6.model_validate(info.unvalidated_json) else: - JsonProtocolUpToV5.parse_obj(info.unvalidated_json) + JsonProtocolUpToV5.model_validate(info.unvalidated_json) except PydanticValidationError as e: raise FileFormatValidationError._generic_json_failure(info, e) from e diff --git a/api/src/opentrons/protocol_runner/json_translator.py b/api/src/opentrons/protocol_runner/json_translator.py index f75b0d51348..f20bb3464d8 100644 --- a/api/src/opentrons/protocol_runner/json_translator.py +++ b/api/src/opentrons/protocol_runner/json_translator.py @@ -63,7 +63,7 @@ def _translate_labware_command( namespace=protocol.labwareDefinitions[definition_id].namespace, loadName=protocol.labwareDefinitions[definition_id].parameters.loadName, location=LabwareLocationAdapter.validate_python( - location.dict() if isinstance(location, Location) else location + location.model_dump() if isinstance(location, Location) else location ), ), key=command.key, @@ -91,7 +91,7 @@ def _translate_v7_labware_command( namespace=command.params.namespace, loadName=command.params.loadName, location=LabwareLocationAdapter.validate_python( - location.dict() if isinstance(location, Location) else location + location.model_dump() if isinstance(location, Location) else location ), ), key=command.key, @@ -114,8 +114,8 @@ def _translate_module_command( translated_obj = pe_commands.LoadModuleCreate( params=pe_commands.LoadModuleParams( model=ModuleModel(modules[module_id].model), - location=DeckSlotLocation.parse_obj( - location.dict() if isinstance(location, Location) else location + location=DeckSlotLocation.model_validate( + location.model_dump() if isinstance(location, Location) else location ), moduleId=command.params.moduleId, ), @@ -136,8 +136,8 @@ def _translate_v7_module_command( translated_obj = pe_commands.LoadModuleCreate( params=pe_commands.LoadModuleParams( model=ModuleModel(command.params.model), - location=DeckSlotLocation.parse_obj( - location.dict() if isinstance(location, Location) else location + location=DeckSlotLocation.model_validate( + location.model_dump() if isinstance(location, Location) else location ), moduleId=command.params.moduleId, ), @@ -191,7 +191,7 @@ def _translate_simple_command( protocol_schema_v8.Command, ], ) -> pe_commands.CommandCreate: - dict_command = command.dict(exclude_none=True) + dict_command = command.model_dump(exclude_none=True) # map deprecated `delay` commands to `waitForResume` / `waitForDuration` if dict_command["commandType"] == "delay": @@ -305,7 +305,7 @@ def translate_command_annotations( else: command_annotations: List[CommandAnnotation] = [ CommandAnnotationAdapter.validate_python( - command_annotation.dict(), + command_annotation.model_dump(), ) for command_annotation in protocol.commandAnnotations ] diff --git a/api/src/opentrons/protocol_runner/legacy_command_mapper.py b/api/src/opentrons/protocol_runner/legacy_command_mapper.py index 1dbf99e6e6e..f7d50e539d8 100644 --- a/api/src/opentrons/protocol_runner/legacy_command_mapper.py +++ b/api/src/opentrons/protocol_runner/legacy_command_mapper.py @@ -177,9 +177,9 @@ def map_command( # noqa: C901 completed_command: pe_commands.Command if command_error is None: if isinstance(running_command, pe_commands.PickUpTip): - completed_command = running_command.copy( + completed_command = running_command.model_copy( update={ - "result": pe_commands.PickUpTipResult.construct( + "result": pe_commands.PickUpTipResult.model_construct( tipVolume=command["payload"]["location"].max_volume, # type: ignore[typeddict-item] tipLength=command["payload"]["instrument"].hw_pipette[ # type: ignore[typeddict-item] "tip_length" @@ -192,9 +192,9 @@ def map_command( # noqa: C901 } ) elif isinstance(running_command, pe_commands.DropTip): - completed_command = running_command.copy( + completed_command = running_command.model_copy( update={ - "result": pe_commands.DropTipResult.construct( + "result": pe_commands.DropTipResult.model_construct( position=pe_types.DeckPoint(x=0, y=0, z=0) ), "status": pe_commands.CommandStatus.SUCCEEDED, @@ -203,9 +203,9 @@ def map_command( # noqa: C901 } ) elif isinstance(running_command, pe_commands.Aspirate): - completed_command = running_command.copy( + completed_command = running_command.model_copy( update={ - # Don't .construct() result, because we want to validate + # Don't .model_construct() result, because we want to validate # volume. "result": pe_commands.AspirateResult( volume=running_command.params.volume, @@ -217,9 +217,9 @@ def map_command( # noqa: C901 } ) elif isinstance(running_command, pe_commands.Dispense): - completed_command = running_command.copy( + completed_command = running_command.model_copy( update={ - # Don't .construct() result, because we want to validate + # Don't .model_construct() result, because we want to validate # volume. "result": pe_commands.DispenseResult( volume=running_command.params.volume, @@ -231,9 +231,9 @@ def map_command( # noqa: C901 } ) elif isinstance(running_command, pe_commands.BlowOut): - completed_command = running_command.copy( + completed_command = running_command.model_copy( update={ - "result": pe_commands.BlowOutResult.construct( + "result": pe_commands.BlowOutResult.model_construct( position=pe_types.DeckPoint(x=0, y=0, z=0) ), "status": pe_commands.CommandStatus.SUCCEEDED, @@ -242,18 +242,18 @@ def map_command( # noqa: C901 } ) elif isinstance(running_command, pe_commands.Comment): - completed_command = running_command.copy( + completed_command = running_command.model_copy( update={ - "result": pe_commands.CommentResult.construct(), + "result": pe_commands.CommentResult.model_construct(), "status": pe_commands.CommandStatus.SUCCEEDED, "completedAt": now, "notes": [], } ) elif isinstance(running_command, pe_commands.Custom): - completed_command = running_command.copy( + completed_command = running_command.model_copy( update={ - "result": pe_commands.CustomResult.construct(), + "result": pe_commands.CustomResult.model_construct(), "status": pe_commands.CommandStatus.SUCCEEDED, "completedAt": now, "notes": [], @@ -263,7 +263,7 @@ def map_command( # noqa: C901 # TODO(mm, 2024-06-13): This looks potentially wrong. # We're creating a `SUCCEEDED` command that does not have a `result`, # which is not normally possible. - completed_command = running_command.copy( + completed_command = running_command.model_copy( update={ "status": pe_commands.CommandStatus.SUCCEEDED, "completedAt": now, @@ -333,51 +333,51 @@ def _build_initial_command( elif command["name"] == legacy_command_types.BLOW_OUT: return self._build_blow_out(command=command, command_id=command_id, now=now) elif command["name"] == legacy_command_types.PAUSE: - wait_for_resume_running = pe_commands.WaitForResume.construct( + wait_for_resume_running = pe_commands.WaitForResume.model_construct( id=command_id, key=command_id, status=pe_commands.CommandStatus.RUNNING, createdAt=now, startedAt=now, - params=pe_commands.WaitForResumeParams.construct( + params=pe_commands.WaitForResumeParams.model_construct( message=command["payload"]["userMessage"], ), ) wait_for_resume_create: pe_commands.CommandCreate = ( - pe_commands.WaitForResumeCreate.construct( + pe_commands.WaitForResumeCreate.model_construct( key=wait_for_resume_running.key, params=wait_for_resume_running.params, ) ) return wait_for_resume_create, wait_for_resume_running elif command["name"] == legacy_command_types.COMMENT: - comment_running = pe_commands.Comment.construct( + comment_running = pe_commands.Comment.model_construct( id=command_id, key=command_id, status=pe_commands.CommandStatus.RUNNING, createdAt=now, startedAt=now, - params=pe_commands.CommentParams.construct( + params=pe_commands.CommentParams.model_construct( message=command["payload"]["text"], ), ) - comment_create = pe_commands.CommentCreate.construct( + comment_create = pe_commands.CommentCreate.model_construct( key=comment_running.key, params=comment_running.params ) return comment_create, comment_running else: - custom_running = pe_commands.Custom.construct( + custom_running = pe_commands.Custom.model_construct( id=command_id, key=command_id, status=pe_commands.CommandStatus.RUNNING, createdAt=now, startedAt=now, - params=LegacyCommandParams.construct( + params=LegacyCommandParams.model_construct( legacyCommandType=command["name"], legacyCommandText=command["payload"]["text"], ), ) - custom_create = pe_commands.CustomCreate.construct( + custom_create = pe_commands.CustomCreate.model_construct( key=custom_running.key, params=custom_running.params, ) @@ -398,19 +398,19 @@ def _build_drop_tip( labware_id = self._labware_id_by_slot[slot] pipette_id = self._pipette_id_by_mount[mount] - running = pe_commands.DropTip.construct( + running = pe_commands.DropTip.model_construct( id=command_id, key=command_id, status=pe_commands.CommandStatus.RUNNING, createdAt=now, startedAt=now, - params=pe_commands.DropTipParams.construct( + params=pe_commands.DropTipParams.model_construct( pipetteId=pipette_id, labwareId=labware_id, wellName=well_name, ), ) - create = pe_commands.DropTipCreate.construct( + create = pe_commands.DropTipCreate.model_construct( key=running.key, params=running.params, ) @@ -432,19 +432,19 @@ def _build_pick_up_tip( labware_id = self._labware_id_by_slot[slot] pipette_id = self._pipette_id_by_mount[mount] - running = pe_commands.PickUpTip.construct( + running = pe_commands.PickUpTip.model_construct( id=command_id, key=command_id, status=pe_commands.CommandStatus.RUNNING, createdAt=now, startedAt=now, - params=pe_commands.PickUpTipParams.construct( + params=pe_commands.PickUpTipParams.model_construct( pipetteId=pipette_id, labwareId=labware_id, wellName=well_name, ), ) - create = pe_commands.PickUpTipCreate.construct( + create = pe_commands.PickUpTipCreate.model_construct( key=running.key, params=running.params ) return create, running @@ -484,31 +484,31 @@ def _build_liquid_handling( # TODO(mm, 2024-03-22): I don't think this has been true since # https://github.com/Opentrons/opentrons/pull/14211. Can we just use # aspirate and dispense commands now? - move_to_well_running = pe_commands.MoveToWell.construct( + move_to_well_running = pe_commands.MoveToWell.model_construct( id=command_id, key=command_id, status=pe_commands.CommandStatus.RUNNING, createdAt=now, startedAt=now, - params=pe_commands.MoveToWellParams.construct( + params=pe_commands.MoveToWellParams.model_construct( pipetteId=pipette_id, labwareId=labware_id, wellName=well_name, ), ) - move_to_well_create = pe_commands.MoveToWellCreate.construct( + move_to_well_create = pe_commands.MoveToWellCreate.model_construct( key=move_to_well_running.key, params=move_to_well_running.params ) return move_to_well_create, move_to_well_running elif command["name"] == legacy_command_types.ASPIRATE: flow_rate = command["payload"]["rate"] * pipette.flow_rate.aspirate - aspirate_running = pe_commands.Aspirate.construct( + aspirate_running = pe_commands.Aspirate.model_construct( id=command_id, key=command_id, status=pe_commands.CommandStatus.RUNNING, createdAt=now, startedAt=now, - # Don't .construct() params, because we want to validate + # Don't .model_construct() params, because we want to validate # volume and flowRate. params=pe_commands.AspirateParams( pipetteId=pipette_id, @@ -518,13 +518,13 @@ def _build_liquid_handling( flowRate=flow_rate, ), ) - aspirate_create = pe_commands.AspirateCreate.construct( + aspirate_create = pe_commands.AspirateCreate.model_construct( key=aspirate_running.key, params=aspirate_running.params ) return aspirate_create, aspirate_running else: flow_rate = command["payload"]["rate"] * pipette.flow_rate.dispense - dispense_running = pe_commands.Dispense.construct( + dispense_running = pe_commands.Dispense.model_construct( id=command_id, key=command_id, status=pe_commands.CommandStatus.RUNNING, @@ -540,24 +540,24 @@ def _build_liquid_handling( flowRate=flow_rate, ), ) - dispense_create = pe_commands.DispenseCreate.construct( + dispense_create = pe_commands.DispenseCreate.model_construct( key=dispense_running.key, params=dispense_running.params ) return dispense_create, dispense_running else: - running = pe_commands.Custom.construct( + running = pe_commands.Custom.model_construct( id=command_id, key=command_id, status=pe_commands.CommandStatus.RUNNING, createdAt=now, startedAt=now, - params=LegacyCommandParams.construct( + params=LegacyCommandParams.model_construct( legacyCommandType=command["name"], legacyCommandText=command["payload"]["text"], ), ) - create = pe_commands.CustomCreate.construct( + create = pe_commands.CustomCreate.model_construct( key=running.key, params=running.params ) return create, running @@ -586,13 +586,13 @@ def _build_blow_out( well_name = well.well_name pipette_id = self._pipette_id_by_mount[mount] - blow_out_running = pe_commands.BlowOut.construct( + blow_out_running = pe_commands.BlowOut.model_construct( id=command_id, key=command_id, status=pe_commands.CommandStatus.RUNNING, createdAt=now, startedAt=now, - # Don't .construct() params, because we want to validate flowRate. + # Don't .model_construct() params, because we want to validate flowRate. params=pe_commands.BlowOutParams( pipetteId=pipette_id, labwareId=labware_id, @@ -600,7 +600,7 @@ def _build_blow_out( flowRate=flow_rate, ), ) - blow_out_create = pe_commands.BlowOutCreate.construct( + blow_out_create = pe_commands.BlowOutCreate.model_construct( key=blow_out_running.key, params=blow_out_running.params ) return blow_out_create, blow_out_running @@ -608,18 +608,18 @@ def _build_blow_out( # TODO:(jr, 15.08.2022): blow_out commands with no specified labware get filtered # into custom. Refactor this in followup legacy command mapping else: - custom_running = pe_commands.Custom.construct( + custom_running = pe_commands.Custom.model_construct( id=command_id, key=command_id, status=pe_commands.CommandStatus.RUNNING, createdAt=now, startedAt=now, - params=LegacyCommandParams.construct( + params=LegacyCommandParams.model_construct( legacyCommandType=command["name"], legacyCommandText=command["payload"]["text"], ), ) - custom_create = pe_commands.CustomCreate.construct( + custom_create = pe_commands.CustomCreate.model_construct( key=custom_running.key, params=custom_running.params ) return custom_create, custom_running @@ -633,23 +633,23 @@ def _map_labware_load( slot = labware_load_info.deck_slot location: pe_types.LabwareLocation if labware_load_info.on_module: - location = pe_types.ModuleLocation.construct( + location = pe_types.ModuleLocation.model_construct( moduleId=self._module_id_by_slot[slot] ) else: - location = pe_types.DeckSlotLocation.construct(slotName=slot) + location = pe_types.DeckSlotLocation.model_construct(slotName=slot) command_id = f"commands.LOAD_LABWARE-{count}" labware_id = f"labware-{count}" - succeeded_command = pe_commands.LoadLabware.construct( + succeeded_command = pe_commands.LoadLabware.model_construct( id=command_id, key=command_id, status=pe_commands.CommandStatus.SUCCEEDED, createdAt=now, startedAt=now, completedAt=now, - params=pe_commands.LoadLabwareParams.construct( + params=pe_commands.LoadLabwareParams.model_construct( location=location, loadName=labware_load_info.labware_load_name, namespace=labware_load_info.labware_namespace, @@ -657,9 +657,9 @@ def _map_labware_load( displayName=labware_load_info.labware_display_name, ), notes=[], - result=pe_commands.LoadLabwareResult.construct( + result=pe_commands.LoadLabwareResult.model_construct( labwareId=labware_id, - definition=LabwareDefinition.parse_obj( + definition=LabwareDefinition.model_validate( labware_load_info.labware_definition ), offsetId=labware_load_info.offset_id, @@ -668,7 +668,7 @@ def _map_labware_load( queue_action = pe_actions.QueueCommandAction( command_id=succeeded_command.id, created_at=succeeded_command.createdAt, - request=pe_commands.LoadLabwareCreate.construct( + request=pe_commands.LoadLabwareCreate.model_construct( key=succeeded_command.key, params=succeeded_command.params ), request_hash=None, @@ -716,19 +716,19 @@ def _map_instrument_load( pipette_id = f"pipette-{count}" mount = MountType(str(instrument_load_info.mount).lower()) - succeeded_command = pe_commands.LoadPipette.construct( + succeeded_command = pe_commands.LoadPipette.model_construct( id=command_id, key=command_id, status=pe_commands.CommandStatus.SUCCEEDED, createdAt=now, startedAt=now, completedAt=now, - params=pe_commands.LoadPipetteParams.construct( + params=pe_commands.LoadPipetteParams.model_construct( pipetteName=PipetteNameType(instrument_load_info.instrument_load_name), mount=mount, ), notes=[], - result=pe_commands.LoadPipetteResult.construct(pipetteId=pipette_id), + result=pe_commands.LoadPipetteResult.model_construct(pipetteId=pipette_id), ) serial = instrument_load_info.pipette_dict.get("pipette_id", None) or "" state_update = StateUpdate() @@ -751,7 +751,7 @@ def _map_instrument_load( queue_action = pe_actions.QueueCommandAction( command_id=succeeded_command.id, created_at=succeeded_command.createdAt, - request=pe_commands.LoadPipetteCreate.construct( + request=pe_commands.LoadPipetteCreate.model_construct( key=succeeded_command.key, params=succeeded_command.params ), request_hash=None, @@ -793,14 +793,14 @@ def _map_module_load( loaded_model ) or self._module_data_provider.get_definition(loaded_model) - succeeded_command = pe_commands.LoadModule.construct( + succeeded_command = pe_commands.LoadModule.model_construct( id=command_id, key=command_id, status=pe_commands.CommandStatus.SUCCEEDED, createdAt=now, startedAt=now, completedAt=now, - params=pe_commands.LoadModuleParams.construct( + params=pe_commands.LoadModuleParams.model_construct( model=requested_model, location=pe_types.DeckSlotLocation( slotName=module_load_info.deck_slot, @@ -808,7 +808,7 @@ def _map_module_load( moduleId=module_id, ), notes=[], - result=pe_commands.LoadModuleResult.construct( + result=pe_commands.LoadModuleResult.model_construct( moduleId=module_id, serialNumber=module_load_info.module_serial, definition=loaded_definition, @@ -818,7 +818,7 @@ def _map_module_load( queue_action = pe_actions.QueueCommandAction( command_id=succeeded_command.id, created_at=succeeded_command.createdAt, - request=pe_commands.LoadModuleCreate.construct( + request=pe_commands.LoadModuleCreate.model_construct( key=succeeded_command.key, params=succeeded_command.params ), request_hash=None, diff --git a/api/src/opentrons/protocol_runner/python_protocol_wrappers.py b/api/src/opentrons/protocol_runner/python_protocol_wrappers.py index f20012f1dfe..ce063013878 100644 --- a/api/src/opentrons/protocol_runner/python_protocol_wrappers.py +++ b/api/src/opentrons/protocol_runner/python_protocol_wrappers.py @@ -66,7 +66,7 @@ def read( namespace=lw.namespace, load_name=lw.parameters.loadName, version=lw.version, - ): cast(LabwareDefinitionTypedDict, lw.dict(exclude_none=True)) + ): cast(LabwareDefinitionTypedDict, lw.model_dump(exclude_none=True)) for lw in labware_definitions } data_file_paths = [ diff --git a/api/src/opentrons/simulate.py b/api/src/opentrons/simulate.py index e565bab83e0..bed24c68731 100644 --- a/api/src/opentrons/simulate.py +++ b/api/src/opentrons/simulate.py @@ -829,7 +829,9 @@ def _create_live_context_pe( # Non-async would use call_soon_threadsafe(), which makes the waiting harder. async def add_all_extra_labware() -> None: for labware_definition_dict in extra_labware.values(): - labware_definition = LabwareDefinition.parse_obj(labware_definition_dict) + labware_definition = LabwareDefinition.model_validate( + labware_definition_dict + ) pe.add_labware_definition(labware_definition) # Add extra_labware to ProtocolEngine, being careful not to modify ProtocolEngine from this diff --git a/api/tests/opentrons/calibration_storage/test_file_operators.py b/api/tests/opentrons/calibration_storage/test_file_operators.py index 5a95f225fe3..ec25a2279c1 100644 --- a/api/tests/opentrons/calibration_storage/test_file_operators.py +++ b/api/tests/opentrons/calibration_storage/test_file_operators.py @@ -84,7 +84,7 @@ def test_deserialize_pydantic_model_valid() -> None: serialized = b'{"integer_field": 123, "! aliased field !": "abc"}' assert io.deserialize_pydantic_model( serialized, DummyModel - ) == DummyModel.construct(integer_field=123, aliased_field="abc") + ) == DummyModel.model_construct(integer_field=123, aliased_field="abc") def test_deserialize_pydantic_model_invalid_as_json() -> None: diff --git a/api/tests/opentrons/calibration_storage/test_tip_length_ot2.py b/api/tests/opentrons/calibration_storage/test_tip_length_ot2.py index df503241d75..51096866b5d 100644 --- a/api/tests/opentrons/calibration_storage/test_tip_length_ot2.py +++ b/api/tests/opentrons/calibration_storage/test_tip_length_ot2.py @@ -46,7 +46,7 @@ def starting_calibration_data( "tipLength": 27, "lastModified": inside_data.lastModified.isoformat(), "source": inside_data.source, - "status": inside_data.status.dict(), + "status": inside_data.status.model_dump(), "uri": "dummy_namespace/minimal_labware_def/1", } } diff --git a/api/tests/opentrons/hardware_control/test_gripper.py b/api/tests/opentrons/hardware_control/test_gripper.py index 7fc54791e33..fdce4f27d3d 100644 --- a/api/tests/opentrons/hardware_control/test_gripper.py +++ b/api/tests/opentrons/hardware_control/test_gripper.py @@ -104,9 +104,9 @@ def test_reload_instrument_cal_ot3_conf_changed( "fakeid123", jaw_max_offset=15, ) - new_conf = fake_gripper_conf.copy( + new_conf = fake_gripper_conf.model_copy( update={ - "grip_force_profile": fake_gripper_conf.grip_force_profile.copy( + "grip_force_profile": fake_gripper_conf.grip_force_profile.model_copy( update={"default_grip_force": 1} ) }, diff --git a/api/tests/opentrons/protocol_engine/commands/test_air_gap_in_place.py b/api/tests/opentrons/protocol_engine/commands/test_air_gap_in_place.py index 5d66a845dcc..b9d110fd9c2 100644 --- a/api/tests/opentrons/protocol_engine/commands/test_air_gap_in_place.py +++ b/api/tests/opentrons/protocol_engine/commands/test_air_gap_in_place.py @@ -265,7 +265,7 @@ async def test_overpressure_error( if isinstance(location, CurrentWell): assert result == DefinedErrorData( - public=OverpressureError.construct( + public=OverpressureError.model_construct( id=error_id, createdAt=error_timestamp, wrappedErrors=[matchers.Anything()], @@ -275,7 +275,7 @@ async def test_overpressure_error( ) else: assert result == DefinedErrorData( - public=OverpressureError.construct( + public=OverpressureError.model_construct( id=error_id, createdAt=error_timestamp, wrappedErrors=[matchers.Anything()], diff --git a/api/tests/opentrons/protocol_engine/commands/test_aspirate.py b/api/tests/opentrons/protocol_engine/commands/test_aspirate.py index 8e50d1825ae..4a8adbcdc76 100644 --- a/api/tests/opentrons/protocol_engine/commands/test_aspirate.py +++ b/api/tests/opentrons/protocol_engine/commands/test_aspirate.py @@ -411,7 +411,7 @@ async def test_overpressure_error( result = await subject.execute(params) assert result == DefinedErrorData( - public=OverpressureError.construct( + public=OverpressureError.model_construct( id=error_id, createdAt=error_timestamp, wrappedErrors=[matchers.Anything()], @@ -581,7 +581,7 @@ async def test_stall_during_final_movement( result = await subject.execute(params) assert result == DefinedErrorData( - public=StallOrCollisionError.construct( + public=StallOrCollisionError.model_construct( id=error_id, createdAt=error_timestamp, wrappedErrors=[matchers.Anything()], @@ -639,7 +639,7 @@ async def test_stall_during_preparation( result = await subject.execute(params) assert result == DefinedErrorData( - public=StallOrCollisionError.construct( + public=StallOrCollisionError.model_construct( id=error_id, createdAt=error_timestamp, wrappedErrors=[matchers.Anything()] ), state_update=update_types.StateUpdate( @@ -715,7 +715,7 @@ async def test_overpressure_during_preparation( result = await subject.execute(params) assert result == DefinedErrorData( - public=OverpressureError.construct( + public=OverpressureError.model_construct( id=error_id, createdAt=error_timestamp, wrappedErrors=[matchers.Anything()], diff --git a/api/tests/opentrons/protocol_engine/commands/test_aspirate_in_place.py b/api/tests/opentrons/protocol_engine/commands/test_aspirate_in_place.py index 48dba2e0c3e..5a7ca3ee940 100644 --- a/api/tests/opentrons/protocol_engine/commands/test_aspirate_in_place.py +++ b/api/tests/opentrons/protocol_engine/commands/test_aspirate_in_place.py @@ -304,7 +304,7 @@ async def test_overpressure_error( if isinstance(location, CurrentWell): assert result == DefinedErrorData( - public=OverpressureError.construct( + public=OverpressureError.model_construct( id=error_id, createdAt=error_timestamp, wrappedErrors=[matchers.Anything()], @@ -323,7 +323,7 @@ async def test_overpressure_error( ) else: assert result == DefinedErrorData( - public=OverpressureError.construct( + public=OverpressureError.model_construct( id=error_id, createdAt=error_timestamp, wrappedErrors=[matchers.Anything()], diff --git a/api/tests/opentrons/protocol_engine/commands/test_blow_out.py b/api/tests/opentrons/protocol_engine/commands/test_blow_out.py index c06b62ace97..7549141be5b 100644 --- a/api/tests/opentrons/protocol_engine/commands/test_blow_out.py +++ b/api/tests/opentrons/protocol_engine/commands/test_blow_out.py @@ -158,7 +158,7 @@ async def test_overpressure_error( result = await subject.execute(data) assert result == DefinedErrorData( - public=OverpressureError.construct( + public=OverpressureError.model_construct( id=error_id, createdAt=error_timestamp, wrappedErrors=[matchers.Anything()], @@ -233,7 +233,7 @@ async def test_stall_error( result = await subject.execute(data) assert result == DefinedErrorData( - public=StallOrCollisionError.construct( + public=StallOrCollisionError.model_construct( id=error_id, createdAt=error_timestamp, wrappedErrors=[matchers.Anything()], diff --git a/api/tests/opentrons/protocol_engine/commands/test_blow_out_in_place.py b/api/tests/opentrons/protocol_engine/commands/test_blow_out_in_place.py index 97e8e8c0851..50bee696c5a 100644 --- a/api/tests/opentrons/protocol_engine/commands/test_blow_out_in_place.py +++ b/api/tests/opentrons/protocol_engine/commands/test_blow_out_in_place.py @@ -106,7 +106,7 @@ async def test_overpressure_error( result = await subject.execute(data) assert result == DefinedErrorData( - public=OverpressureError.construct( + public=OverpressureError.model_construct( id=error_id, createdAt=error_timestamp, wrappedErrors=[matchers.Anything()], diff --git a/api/tests/opentrons/protocol_engine/commands/test_dispense.py b/api/tests/opentrons/protocol_engine/commands/test_dispense.py index e0e18307b69..5b60b61d4df 100644 --- a/api/tests/opentrons/protocol_engine/commands/test_dispense.py +++ b/api/tests/opentrons/protocol_engine/commands/test_dispense.py @@ -119,7 +119,7 @@ async def test_dispense_implementation( labware_id="labware-id-abc123", well_name="A3", ), - new_deck_point=DeckPoint.construct(x=1, y=2, z=3), + new_deck_point=DeckPoint.model_construct(x=1, y=2, z=3), ), liquid_operated=update_types.LiquidOperatedUpdate( labware_id="labware-id-abc123", @@ -203,7 +203,7 @@ async def test_overpressure_error( result = await subject.execute(data) assert result == DefinedErrorData( - public=OverpressureError.construct( + public=OverpressureError.model_construct( id=error_id, createdAt=error_timestamp, wrappedErrors=[matchers.Anything()], @@ -216,7 +216,7 @@ async def test_overpressure_error( labware_id="labware-id", well_name="well-name", ), - new_deck_point=DeckPoint.construct(x=1, y=2, z=3), + new_deck_point=DeckPoint.model_construct(x=1, y=2, z=3), ), liquid_operated=update_types.LiquidOperatedUpdate( labware_id="labware-id", @@ -234,7 +234,7 @@ async def test_overpressure_error( labware_id="labware-id", well_name="well-name", ), - new_deck_point=DeckPoint.construct(x=1, y=2, z=3), + new_deck_point=DeckPoint.model_construct(x=1, y=2, z=3), ), ), ) @@ -288,7 +288,7 @@ async def test_stall_error( result = await subject.execute(data) assert result == DefinedErrorData( - public=StallOrCollisionError.construct( + public=StallOrCollisionError.model_construct( id=error_id, createdAt=error_timestamp, wrappedErrors=[matchers.Anything()], diff --git a/api/tests/opentrons/protocol_engine/commands/test_dispense_in_place.py b/api/tests/opentrons/protocol_engine/commands/test_dispense_in_place.py index bc39fba4a00..e9c715223de 100644 --- a/api/tests/opentrons/protocol_engine/commands/test_dispense_in_place.py +++ b/api/tests/opentrons/protocol_engine/commands/test_dispense_in_place.py @@ -207,7 +207,7 @@ async def test_overpressure_error( if isinstance(location, CurrentWell): assert result == DefinedErrorData( - public=OverpressureError.construct( + public=OverpressureError.model_construct( id=error_id, createdAt=error_timestamp, wrappedErrors=[matchers.Anything()], @@ -226,7 +226,7 @@ async def test_overpressure_error( ) else: assert result == DefinedErrorData( - public=OverpressureError.construct( + public=OverpressureError.model_construct( id=error_id, createdAt=error_timestamp, wrappedErrors=[matchers.Anything()], diff --git a/api/tests/opentrons/protocol_engine/commands/test_drop_tip.py b/api/tests/opentrons/protocol_engine/commands/test_drop_tip.py index 038ea12255b..430fa8dff32 100644 --- a/api/tests/opentrons/protocol_engine/commands/test_drop_tip.py +++ b/api/tests/opentrons/protocol_engine/commands/test_drop_tip.py @@ -60,7 +60,7 @@ def mock_model_utils(decoy: Decoy) -> ModelUtils: def test_drop_tip_params_defaults() -> None: """A drop tip should use a `WellOrigin.DROP_TIP` by default.""" - default_params = DropTipParams.parse_obj( + default_params = DropTipParams.model_validate( {"pipetteId": "abc", "labwareId": "def", "wellName": "ghj"} ) @@ -71,7 +71,7 @@ def test_drop_tip_params_defaults() -> None: def test_drop_tip_params_default_origin() -> None: """A drop tip should drop a `WellOrigin.DROP_TIP` by default even if an offset is given.""" - default_params = DropTipParams.parse_obj( + default_params = DropTipParams.model_validate( { "pipetteId": "abc", "labwareId": "def", @@ -303,7 +303,7 @@ async def test_tip_attached_error( result = await subject.execute(params) assert result == DefinedErrorData( - public=TipPhysicallyAttachedError.construct( + public=TipPhysicallyAttachedError.model_construct( id="error-id", createdAt=datetime(year=1, month=2, day=3), wrappedErrors=[matchers.Anything()], @@ -396,7 +396,7 @@ async def test_stall_error( result = await subject.execute(params) assert result == DefinedErrorData( - public=StallOrCollisionError.construct( + public=StallOrCollisionError.model_construct( id="error-id", createdAt=datetime(year=1, month=2, day=3), wrappedErrors=[matchers.Anything()], diff --git a/api/tests/opentrons/protocol_engine/commands/test_drop_tip_in_place.py b/api/tests/opentrons/protocol_engine/commands/test_drop_tip_in_place.py index 5565ffea88c..8c4716cf380 100644 --- a/api/tests/opentrons/protocol_engine/commands/test_drop_tip_in_place.py +++ b/api/tests/opentrons/protocol_engine/commands/test_drop_tip_in_place.py @@ -104,7 +104,7 @@ async def test_tip_attached_error( result = await subject.execute(params) assert result == DefinedErrorData( - public=TipPhysicallyAttachedError.construct( + public=TipPhysicallyAttachedError.model_construct( id="error-id", createdAt=datetime(year=1, month=2, day=3), wrappedErrors=[matchers.Anything()], diff --git a/api/tests/opentrons/protocol_engine/commands/test_liquid_probe.py b/api/tests/opentrons/protocol_engine/commands/test_liquid_probe.py index 34b979901aa..c9661512aaa 100644 --- a/api/tests/opentrons/protocol_engine/commands/test_liquid_probe.py +++ b/api/tests/opentrons/protocol_engine/commands/test_liquid_probe.py @@ -350,7 +350,7 @@ async def test_liquid_not_found_error( ) if isinstance(subject, LiquidProbeImplementation): assert result == DefinedErrorData( - public=LiquidNotFoundError.construct( + public=LiquidNotFoundError.model_construct( id=error_id, createdAt=error_timestamp, wrappedErrors=[matchers.Anything()], @@ -726,7 +726,7 @@ async def test_liquid_probe_stall( result = await subject.execute(data) assert result == DefinedErrorData( - public=StallOrCollisionError.construct( + public=StallOrCollisionError.model_construct( id=error_id, createdAt=timestamp, wrappedErrors=[matchers.Anything()] ), state_update=update_types.StateUpdate(pipette_location=update_types.CLEAR), diff --git a/api/tests/opentrons/protocol_engine/commands/test_load_liquid_class.py b/api/tests/opentrons/protocol_engine/commands/test_load_liquid_class.py index 041a7b2f8ca..54de10f3bc2 100644 --- a/api/tests/opentrons/protocol_engine/commands/test_load_liquid_class.py +++ b/api/tests/opentrons/protocol_engine/commands/test_load_liquid_class.py @@ -152,7 +152,7 @@ async def test_load_liquid_class_conflicting_definition_for_id( liquid_class_record ) - new_liquid_class_record = liquid_class_record.copy(deep=True) + new_liquid_class_record = liquid_class_record.model_copy(deep=True) new_liquid_class_record.aspirate.offset.x += 123 # make it different params = LoadLiquidClassParams( liquidClassId="liquid-class-1", liquidClassRecord=new_liquid_class_record diff --git a/api/tests/opentrons/protocol_engine/commands/test_move_relative.py b/api/tests/opentrons/protocol_engine/commands/test_move_relative.py index 1e2d98ebf21..7a993c16d35 100644 --- a/api/tests/opentrons/protocol_engine/commands/test_move_relative.py +++ b/api/tests/opentrons/protocol_engine/commands/test_move_relative.py @@ -85,7 +85,7 @@ async def test_move_relative_stalls( result = await subject.execute(data) assert result == DefinedErrorData( - public=StallOrCollisionError.construct( + public=StallOrCollisionError.model_construct( id=test_id, createdAt=timestamp, wrappedErrors=[matchers.Anything()] ), state_update=update_types.StateUpdate(pipette_location=update_types.CLEAR), diff --git a/api/tests/opentrons/protocol_engine/commands/test_move_to_addressable_area.py b/api/tests/opentrons/protocol_engine/commands/test_move_to_addressable_area.py index 9142f792252..0570d91c8bc 100644 --- a/api/tests/opentrons/protocol_engine/commands/test_move_to_addressable_area.py +++ b/api/tests/opentrons/protocol_engine/commands/test_move_to_addressable_area.py @@ -209,7 +209,7 @@ async def test_move_to_addressable_area_implementation_handles_stalls( result = await subject.execute(data) assert result == DefinedErrorData( - public=StallOrCollisionError.construct( + public=StallOrCollisionError.model_construct( id=test_id, createdAt=timestamp, wrappedErrors=[matchers.Anything()] ), state_update=update_types.StateUpdate( diff --git a/api/tests/opentrons/protocol_engine/commands/test_move_to_addressable_area_for_drop_tip.py b/api/tests/opentrons/protocol_engine/commands/test_move_to_addressable_area_for_drop_tip.py index b6ee2097458..e90bb7271f7 100644 --- a/api/tests/opentrons/protocol_engine/commands/test_move_to_addressable_area_for_drop_tip.py +++ b/api/tests/opentrons/protocol_engine/commands/test_move_to_addressable_area_for_drop_tip.py @@ -133,7 +133,7 @@ async def test_move_to_addressable_area_for_drop_tip_handles_stalls( result = await subject.execute(data) assert result == DefinedErrorData( - public=StallOrCollisionError.construct( + public=StallOrCollisionError.model_construct( id=test_id, createdAt=timestamp, wrappedErrors=[matchers.Anything()] ), state_update=update_types.StateUpdate( diff --git a/api/tests/opentrons/protocol_engine/commands/test_move_to_coordinates.py b/api/tests/opentrons/protocol_engine/commands/test_move_to_coordinates.py index 85afb189988..3c9fc10bb1c 100644 --- a/api/tests/opentrons/protocol_engine/commands/test_move_to_coordinates.py +++ b/api/tests/opentrons/protocol_engine/commands/test_move_to_coordinates.py @@ -97,7 +97,7 @@ async def test_move_to_coordinates_stall( result = await subject.execute(params=params) assert result == DefinedErrorData( - public=StallOrCollisionError.construct( + public=StallOrCollisionError.model_construct( id=test_id, createdAt=timestamp, wrappedErrors=[matchers.Anything()] ), state_update=update_types.StateUpdate(pipette_location=update_types.CLEAR), diff --git a/api/tests/opentrons/protocol_engine/commands/test_move_to_well.py b/api/tests/opentrons/protocol_engine/commands/test_move_to_well.py index db91abd5a41..56a2691bbee 100644 --- a/api/tests/opentrons/protocol_engine/commands/test_move_to_well.py +++ b/api/tests/opentrons/protocol_engine/commands/test_move_to_well.py @@ -158,7 +158,7 @@ async def test_move_to_well_stall_defined_error( result = await subject.execute(data) assert isinstance(result, DefinedErrorData) assert result == DefinedErrorData( - public=StallOrCollisionError.construct( + public=StallOrCollisionError.model_construct( id=error_id, createdAt=error_timestamp, wrappedErrors=[matchers.Anything()] ), state_update=update_types.StateUpdate(pipette_location=update_types.CLEAR), diff --git a/api/tests/opentrons/protocol_engine/commands/test_pick_up_tip.py b/api/tests/opentrons/protocol_engine/commands/test_pick_up_tip.py index 07170e08288..d4c53ea5992 100644 --- a/api/tests/opentrons/protocol_engine/commands/test_pick_up_tip.py +++ b/api/tests/opentrons/protocol_engine/commands/test_pick_up_tip.py @@ -167,7 +167,7 @@ async def test_tip_physically_missing_error( ) assert result == DefinedErrorData( - public=TipPhysicallyMissingError.construct( + public=TipPhysicallyMissingError.model_construct( id=error_id, createdAt=error_created_at, wrappedErrors=[matchers.Anything()] ), state_update=update_types.StateUpdate( @@ -255,7 +255,7 @@ async def test_stall_error( ) assert result == DefinedErrorData( - public=StallOrCollisionError.construct( + public=StallOrCollisionError.model_construct( id=error_id, createdAt=error_created_at, wrappedErrors=[matchers.Anything()] ), state_update=update_types.StateUpdate( diff --git a/api/tests/opentrons/protocol_engine/commands/test_prepare_to_aspirate.py b/api/tests/opentrons/protocol_engine/commands/test_prepare_to_aspirate.py index f9eded1ffa0..5e77529f646 100644 --- a/api/tests/opentrons/protocol_engine/commands/test_prepare_to_aspirate.py +++ b/api/tests/opentrons/protocol_engine/commands/test_prepare_to_aspirate.py @@ -91,7 +91,7 @@ async def test_overpressure_error( result = await subject.execute(data) assert result == DefinedErrorData( - public=OverpressureError.construct( + public=OverpressureError.model_construct( id=error_id, createdAt=error_timestamp, wrappedErrors=[matchers.Anything()], diff --git a/api/tests/opentrons/protocol_engine/conftest.py b/api/tests/opentrons/protocol_engine/conftest.py index 88a166524ba..48ce28e7a98 100644 --- a/api/tests/opentrons/protocol_engine/conftest.py +++ b/api/tests/opentrons/protocol_engine/conftest.py @@ -78,7 +78,7 @@ def ot3_standard_deck_def() -> DeckDefinitionV5: @pytest.fixture(scope="session") def ot2_fixed_trash_def() -> LabwareDefinition: """Get the definition of the OT-2 standard fixed trash.""" - return LabwareDefinition.parse_obj( + return LabwareDefinition.model_validate( load_definition("opentrons_1_trash_1100ml_fixed", 1) ) @@ -86,7 +86,7 @@ def ot2_fixed_trash_def() -> LabwareDefinition: @pytest.fixture(scope="session") def ot2_short_fixed_trash_def() -> LabwareDefinition: """Get the definition of the OT-2 short fixed trash.""" - return LabwareDefinition.parse_obj( + return LabwareDefinition.model_validate( load_definition("opentrons_1_trash_850ml_fixed", 1) ) @@ -94,7 +94,7 @@ def ot2_short_fixed_trash_def() -> LabwareDefinition: @pytest.fixture(scope="session") def ot3_fixed_trash_def() -> LabwareDefinition: """Get the definition of the OT-3 fixed trash.""" - return LabwareDefinition.parse_obj( + return LabwareDefinition.model_validate( load_definition("opentrons_1_trash_3200ml_fixed", 1) ) @@ -102,7 +102,7 @@ def ot3_fixed_trash_def() -> LabwareDefinition: @pytest.fixture(scope="session") def ot3_absorbance_reader_lid() -> LabwareDefinition: """Get the definition of the OT-3 plate reader lid.""" - return LabwareDefinition.parse_obj( + return LabwareDefinition.model_validate( load_definition("opentrons_flex_lid_absorbance_plate_reader_module", 1) ) @@ -110,7 +110,7 @@ def ot3_absorbance_reader_lid() -> LabwareDefinition: @pytest.fixture(scope="session") def well_plate_def() -> LabwareDefinition: """Get the definition of a 96 well plate.""" - return LabwareDefinition.parse_obj( + return LabwareDefinition.model_validate( load_definition("corning_96_wellplate_360ul_flat", 2) ) @@ -118,7 +118,7 @@ def well_plate_def() -> LabwareDefinition: @pytest.fixture(scope="session") def flex_50uL_tiprack() -> LabwareDefinition: """Get the definition of a Flex 50uL tiprack.""" - return LabwareDefinition.parse_obj( + return LabwareDefinition.model_validate( load_definition("opentrons_flex_96_filtertiprack_50ul", 1) ) @@ -126,7 +126,7 @@ def flex_50uL_tiprack() -> LabwareDefinition: @pytest.fixture(scope="session") def adapter_plate_def() -> LabwareDefinition: """Get the definition of a h/s adapter plate.""" - return LabwareDefinition.parse_obj( + return LabwareDefinition.model_validate( load_definition("opentrons_universal_flat_adapter", 1) ) @@ -134,25 +134,31 @@ def adapter_plate_def() -> LabwareDefinition: @pytest.fixture(scope="session") def reservoir_def() -> LabwareDefinition: """Get the definition of single-row reservoir.""" - return LabwareDefinition.parse_obj(load_definition("nest_12_reservoir_15ml", 1)) + return LabwareDefinition.model_validate( + load_definition("nest_12_reservoir_15ml", 1) + ) @pytest.fixture(scope="session") def tip_rack_def() -> LabwareDefinition: """Get the definition of Opentrons 300 uL tip rack.""" - return LabwareDefinition.parse_obj(load_definition("opentrons_96_tiprack_300ul", 1)) + return LabwareDefinition.model_validate( + load_definition("opentrons_96_tiprack_300ul", 1) + ) @pytest.fixture(scope="session") def adapter_def() -> LabwareDefinition: """Get the definition of Opentrons 96 PCR adapter.""" - return LabwareDefinition.parse_obj(load_definition("opentrons_96_pcr_adapter", 1)) + return LabwareDefinition.model_validate( + load_definition("opentrons_96_pcr_adapter", 1) + ) @pytest.fixture(scope="session") def falcon_tuberack_def() -> LabwareDefinition: """Get the definition of the 6-well Falcon tuberack.""" - return LabwareDefinition.parse_obj( + return LabwareDefinition.model_validate( load_definition("opentrons_6_tuberack_falcon_50ml_conical", 1) ) @@ -160,7 +166,7 @@ def falcon_tuberack_def() -> LabwareDefinition: @pytest.fixture(scope="session") def magdeck_well_plate_def() -> LabwareDefinition: """Get the definition of a well place compatible with magdeck.""" - return LabwareDefinition.parse_obj( + return LabwareDefinition.model_validate( load_definition("nest_96_wellplate_100ul_pcr_full_skirt", 1) ) diff --git a/api/tests/opentrons/protocol_engine/resources/test_labware_data_provider.py b/api/tests/opentrons/protocol_engine/resources/test_labware_data_provider.py index 92718c70d89..a666e7a697d 100644 --- a/api/tests/opentrons/protocol_engine/resources/test_labware_data_provider.py +++ b/api/tests/opentrons/protocol_engine/resources/test_labware_data_provider.py @@ -22,7 +22,7 @@ async def test_labware_data_gets_standard_definition() -> None: version=1, ) - assert result == LabwareDefinition.parse_obj(expected) + assert result == LabwareDefinition.model_validate(expected) async def test_labware_hash_match() -> None: @@ -38,9 +38,9 @@ async def test_labware_hash_match() -> None: version=1, ) - labware_model = LabwareDefinition.parse_obj(labware_dict) + labware_model = LabwareDefinition.model_validate(labware_dict) labware_model_dict = cast( - LabwareDefDict, labware_model.dict(exclude_none=True, exclude_unset=True) + LabwareDefDict, labware_model.model_dump(exclude_none=True, exclude_unset=True) ) assert hash_labware_def(labware_dict) == hash_labware_def(labware_model_dict) diff --git a/api/tests/opentrons/protocol_engine/state/test_command_history.py b/api/tests/opentrons/protocol_engine/state/test_command_history.py index 14eaa2a42f3..fabf17e26d1 100644 --- a/api/tests/opentrons/protocol_engine/state/test_command_history.py +++ b/api/tests/opentrons/protocol_engine/state/test_command_history.py @@ -202,13 +202,13 @@ def test_set_fixit_running_command_id(command_history: CommandHistory) -> None: """It should set the ID of the currently running fixit command.""" command_entry = create_queued_command() command_history.append_queued_command(command_entry) - running_command = command_entry.copy( + running_command = command_entry.model_copy( update={ "status": CommandStatus.RUNNING, } ) command_history.set_command_running(running_command) - finished_command = command_entry.copy( + finished_command = command_entry.model_copy( update={ "status": CommandStatus.SUCCEEDED, } @@ -218,7 +218,7 @@ def test_set_fixit_running_command_id(command_history: CommandHistory) -> None: command_id="fixit-id", intent=CommandIntent.FIXIT ) command_history.append_queued_command(fixit_command_entry) - fixit_running_command = fixit_command_entry.copy( + fixit_running_command = fixit_command_entry.model_copy( update={ "status": CommandStatus.RUNNING, } diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py index d4b59513be7..0cc542c4971 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_command_mapper.py @@ -159,7 +159,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: assert len(commands_result) == 32 - assert commands_result[0] == commands.Home.construct( + assert commands_result[0] == commands.Home.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -170,7 +170,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: notes=[], result=commands.HomeResult(), ) - assert commands_result[1] == commands.LoadLabware.construct( + assert commands_result[1] == commands.LoadLabware.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -186,7 +186,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: notes=[], result=tiprack_1_result_captor, ) - assert commands_result[2] == commands.LoadLabware.construct( + assert commands_result[2] == commands.LoadLabware.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -202,7 +202,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: notes=[], result=tiprack_2_result_captor, ) - assert commands_result[3] == commands.LoadModule.construct( + assert commands_result[3] == commands.LoadModule.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -217,7 +217,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: notes=[], result=module_1_result_captor, ) - assert commands_result[4] == commands.LoadLabware.construct( + assert commands_result[4] == commands.LoadLabware.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -233,7 +233,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: notes=[], result=well_plate_1_result_captor, ) - assert commands_result[5] == commands.LoadLabware.construct( + assert commands_result[5] == commands.LoadLabware.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -250,7 +250,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: result=module_plate_1_result_captor, ) - assert commands_result[6] == commands.LoadPipette.construct( + assert commands_result[6] == commands.LoadPipette.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -264,7 +264,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: result=pipette_left_result_captor, ) - assert commands_result[7] == commands.LoadPipette.construct( + assert commands_result[7] == commands.LoadPipette.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -285,7 +285,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: pipette_left_id = pipette_left_result_captor.value.pipetteId pipette_right_id = pipette_right_result_captor.value.pipetteId - assert commands_result[8] == commands.PickUpTip.construct( + assert commands_result[8] == commands.PickUpTip.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -302,7 +302,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: tipVolume=300.0, tipLength=51.83, position=DeckPoint(x=0, y=0, z=0) ), ) - assert commands_result[9] == commands.PickUpTip.construct( + assert commands_result[9] == commands.PickUpTip.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -320,7 +320,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: ), ) - assert commands_result[10] == commands.DropTip.construct( + assert commands_result[10] == commands.DropTip.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -336,7 +336,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: result=commands.DropTipResult(position=DeckPoint(x=0, y=0, z=0)), ) - assert commands_result[11] == commands.PickUpTip.construct( + assert commands_result[11] == commands.PickUpTip.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -353,7 +353,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: tipVolume=300.0, tipLength=51.83, position=DeckPoint(x=0, y=0, z=0) ), ) - assert commands_result[12] == commands.Aspirate.construct( + assert commands_result[12] == commands.Aspirate.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -370,7 +370,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: notes=[], result=commands.AspirateResult(volume=40, position=DeckPoint(x=0, y=0, z=0)), ) - assert commands_result[13] == commands.Dispense.construct( + assert commands_result[13] == commands.Dispense.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -387,7 +387,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: notes=[], result=commands.DispenseResult(volume=35, position=DeckPoint(x=0, y=0, z=0)), ) - assert commands_result[14] == commands.Aspirate.construct( + assert commands_result[14] == commands.Aspirate.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -404,7 +404,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: notes=[], result=commands.AspirateResult(volume=40, position=DeckPoint(x=0, y=0, z=0)), ) - assert commands_result[15] == commands.Dispense.construct( + assert commands_result[15] == commands.Dispense.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -421,7 +421,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: notes=[], result=commands.DispenseResult(volume=35, position=DeckPoint(x=0, y=0, z=0)), ) - assert commands_result[16] == commands.BlowOut.construct( + assert commands_result[16] == commands.BlowOut.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -437,7 +437,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: notes=[], result=commands.BlowOutResult(position=DeckPoint(x=0, y=0, z=0)), ) - assert commands_result[17] == commands.Aspirate.construct( + assert commands_result[17] == commands.Aspirate.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -454,7 +454,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: notes=[], result=commands.AspirateResult(volume=50, position=DeckPoint(x=0, y=0, z=0)), ) - assert commands_result[18] == commands.Dispense.construct( + assert commands_result[18] == commands.Dispense.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -471,7 +471,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: notes=[], result=commands.DispenseResult(volume=50, position=DeckPoint(x=0, y=0, z=0)), ) - assert commands_result[19] == commands.BlowOut.construct( + assert commands_result[19] == commands.BlowOut.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -487,7 +487,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: notes=[], result=commands.BlowOutResult(position=DeckPoint(x=0, y=0, z=0)), ) - assert commands_result[20] == commands.Aspirate.construct( + assert commands_result[20] == commands.Aspirate.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -504,7 +504,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: notes=[], result=commands.AspirateResult(volume=300, position=DeckPoint(x=0, y=0, z=0)), ) - assert commands_result[21] == commands.Dispense.construct( + assert commands_result[21] == commands.Dispense.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -521,7 +521,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: notes=[], result=commands.DispenseResult(volume=300, position=DeckPoint(x=0, y=0, z=0)), ) - assert commands_result[22] == commands.BlowOut.construct( + assert commands_result[22] == commands.BlowOut.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -538,7 +538,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: result=commands.BlowOutResult(position=DeckPoint(x=0, y=0, z=0)), ) # TODO:(jr, 15.08.2022): this should map to move_to when move_to is mapped in a followup ticket RSS-62 - assert commands_result[23] == commands.Custom.construct( + assert commands_result[23] == commands.Custom.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -554,7 +554,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: ) # TODO:(jr, 15.08.2022): aspirate commands with no labware get filtered # into custom. Refactor this in followup legacy command mapping - assert commands_result[24] == commands.Custom.construct( + assert commands_result[24] == commands.Custom.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -570,7 +570,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: ) # TODO:(jr, 15.08.2022): dispense commands with no labware get filtered # into custom. Refactor this in followup legacy command mapping - assert commands_result[25] == commands.Custom.construct( + assert commands_result[25] == commands.Custom.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -586,7 +586,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: ) # TODO:(jr, 15.08.2022): blow_out commands with no labware get filtered # into custom. Refactor this in followup legacy command mapping - assert commands_result[26] == commands.Custom.construct( + assert commands_result[26] == commands.Custom.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -600,7 +600,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: notes=[], result=commands.CustomResult(), ) - assert commands_result[27] == commands.Aspirate.construct( + assert commands_result[27] == commands.Aspirate.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -617,7 +617,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: notes=[], result=commands.AspirateResult(volume=50, position=DeckPoint(x=0, y=0, z=0)), ) - assert commands_result[28] == commands.Dispense.construct( + assert commands_result[28] == commands.Dispense.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -636,7 +636,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: ) # TODO:(jr, 15.08.2022): aspirate commands with no labware get filtered # into custom. Refactor this in followup legacy command mapping - assert commands_result[29] == commands.Custom.construct( + assert commands_result[29] == commands.Custom.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -652,7 +652,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: ) # TODO:(jr, 15.08.2022): dispense commands with no labware get filtered # into custom. Refactor this in followup legacy command mapping - assert commands_result[30] == commands.Custom.construct( + assert commands_result[30] == commands.Custom.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -666,7 +666,7 @@ async def test_big_protocol_commands(big_protocol_file: Path) -> None: notes=[], result=commands.CustomResult(), ) - assert commands_result[31] == commands.DropTip.construct( + assert commands_result[31] == commands.DropTip.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -812,7 +812,7 @@ def run(protocol): ) result_commands = await simulate_and_get_commands(path) [initial_home, comment] = result_commands - assert comment == commands.Comment.construct( + assert comment == commands.Comment.model_construct( status=commands.CommandStatus.SUCCEEDED, params=commands.CommentParams(message="oy."), notes=[], diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py index 7ed54b17ebe..dcc95593c38 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_custom_labware.py @@ -58,7 +58,7 @@ async def test_legacy_custom_labware(custom_labware_protocol_files: List[Path]) ) result = await subject.run(deck_configuration=[], protocol_source=protocol_source) - expected_labware = LoadedLabware.construct( + expected_labware = LoadedLabware.model_construct( id=matchers.Anything(), location=DeckSlotLocation(slotName=DeckSlotName.SLOT_1), loadName="fixture_96_plate", diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py index e27c23faa27..5650312b5f6 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_legacy_module_commands.py @@ -75,7 +75,7 @@ async def test_runner_with_modules_in_legacy_python( thermocycler_result_captor = matchers.Captor() heater_shaker_result_captor = matchers.Captor() - assert commands_result[0] == commands.Home.construct( + assert commands_result[0] == commands.Home.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -86,7 +86,7 @@ async def test_runner_with_modules_in_legacy_python( notes=[], result=commands.HomeResult(), ) - assert commands_result[1] == commands.LoadLabware.construct( + assert commands_result[1] == commands.LoadLabware.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -98,7 +98,7 @@ async def test_runner_with_modules_in_legacy_python( result=matchers.Anything(), ) - assert commands_result[2] == commands.LoadModule.construct( + assert commands_result[2] == commands.LoadModule.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -110,7 +110,7 @@ async def test_runner_with_modules_in_legacy_python( result=temp_module_result_captor, ) - assert commands_result[3] == commands.LoadModule.construct( + assert commands_result[3] == commands.LoadModule.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -122,7 +122,7 @@ async def test_runner_with_modules_in_legacy_python( result=mag_module_result_captor, ) - assert commands_result[4] == commands.LoadModule.construct( + assert commands_result[4] == commands.LoadModule.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -134,7 +134,7 @@ async def test_runner_with_modules_in_legacy_python( result=thermocycler_result_captor, ) - assert commands_result[5] == commands.LoadModule.construct( + assert commands_result[5] == commands.LoadModule.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, diff --git a/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py b/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py index 1a8da30bd76..5db66e55eb2 100644 --- a/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py +++ b/api/tests/opentrons/protocol_runner/smoke_tests/test_protocol_runner.py @@ -58,13 +58,13 @@ async def test_runner_with_python( pipette_id_captor = matchers.Captor() labware_id_captor = matchers.Captor() - expected_pipette = LoadedPipette.construct( + expected_pipette = LoadedPipette.model_construct( id=pipette_id_captor, pipetteName=PipetteNameType.P300_SINGLE, mount=MountType.LEFT, ) - expected_labware = LoadedLabware.construct( + expected_labware = LoadedLabware.model_construct( id=labware_id_captor, location=DeckSlotLocation(slotName=DeckSlotName.SLOT_1), loadName="opentrons_96_tiprack_300ul", @@ -75,7 +75,7 @@ async def test_runner_with_python( offsetId=None, ) - expected_module = LoadedModule.construct( + expected_module = LoadedModule.model_construct( id=matchers.IsA(str), model=ModuleModel.TEMPERATURE_MODULE_V1, location=DeckSlotLocation(slotName=DeckSlotName.SLOT_3), @@ -86,7 +86,7 @@ async def test_runner_with_python( assert expected_labware in labware_result assert expected_module in modules_result - expected_command = commands.PickUpTip.construct( + expected_command = commands.PickUpTip.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -148,7 +148,7 @@ async def test_runner_with_json(json_protocol_file: Path) -> None: assert expected_pipette in pipettes_result assert expected_labware in labware_result - expected_command = commands.PickUpTip.construct( + expected_command = commands.PickUpTip.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -196,13 +196,13 @@ async def test_runner_with_legacy_python(legacy_python_protocol_file: Path) -> N pipette_id_captor = matchers.Captor() labware_id_captor = matchers.Captor() - expected_pipette = LoadedPipette.construct( + expected_pipette = LoadedPipette.model_construct( id=pipette_id_captor, pipetteName=PipetteNameType.P300_SINGLE, mount=MountType.LEFT, ) - expected_labware = LoadedLabware.construct( + expected_labware = LoadedLabware.model_construct( id=labware_id_captor, location=DeckSlotLocation(slotName=DeckSlotName.SLOT_1), loadName="opentrons_96_tiprack_300ul", @@ -215,7 +215,7 @@ async def test_runner_with_legacy_python(legacy_python_protocol_file: Path) -> N assert expected_pipette in pipettes_result assert expected_labware in labware_result - expected_command = commands.PickUpTip.construct( + expected_command = commands.PickUpTip.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -260,13 +260,13 @@ async def test_runner_with_legacy_json(legacy_json_protocol_file: Path) -> None: pipette_id_captor = matchers.Captor() labware_id_captor = matchers.Captor() - expected_pipette = LoadedPipette.construct( + expected_pipette = LoadedPipette.model_construct( id=pipette_id_captor, pipetteName=PipetteNameType.P300_SINGLE, mount=MountType.LEFT, ) - expected_labware = LoadedLabware.construct( + expected_labware = LoadedLabware.model_construct( id=labware_id_captor, location=DeckSlotLocation(slotName=DeckSlotName.SLOT_1), loadName="opentrons_96_tiprack_300ul", @@ -280,7 +280,7 @@ async def test_runner_with_legacy_json(legacy_json_protocol_file: Path) -> None: assert expected_pipette in pipettes_result assert expected_labware in labware_result - expected_command = commands.PickUpTip.construct( + expected_command = commands.PickUpTip.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, @@ -327,13 +327,13 @@ async def test_runner_with_python_and_run_time_parameters( tiprack_id_captor = matchers.Captor() reservoir_id_captor = matchers.Captor() - expected_pipette = LoadedPipette.construct( + expected_pipette = LoadedPipette.model_construct( id=pipette_id_captor, pipetteName=PipetteNameType.P300_SINGLE, mount=MountType.LEFT, ) - expected_tiprack = LoadedLabware.construct( + expected_tiprack = LoadedLabware.model_construct( id=tiprack_id_captor, location=DeckSlotLocation(slotName=DeckSlotName.SLOT_1), loadName="opentrons_96_tiprack_300ul", @@ -344,7 +344,7 @@ async def test_runner_with_python_and_run_time_parameters( offsetId=None, ) - expected_reservoir = LoadedLabware.construct( + expected_reservoir = LoadedLabware.model_construct( id=reservoir_id_captor, location=DeckSlotLocation(slotName=DeckSlotName.SLOT_2), loadName="nest_1_reservoir_195ml", @@ -361,14 +361,14 @@ async def test_runner_with_python_and_run_time_parameters( assert result.state_summary.status == EngineStatus.SUCCEEDED - expected_command = commands.Aspirate.construct( + expected_command = commands.Aspirate.model_construct( id=matchers.IsA(str), key=matchers.IsA(str), status=commands.CommandStatus.SUCCEEDED, createdAt=matchers.IsA(datetime), startedAt=matchers.IsA(datetime), completedAt=matchers.IsA(datetime), - params=commands.AspirateParams.construct( + params=commands.AspirateParams.model_construct( labwareId=reservoir_id_captor.value, wellName=matchers.IsA(str), wellLocation=matchers.Anything(), diff --git a/api/tests/opentrons/protocol_runner/test_json_translator.py b/api/tests/opentrons/protocol_runner/test_json_translator.py index b9abbf4e655..b48c18f95c9 100644 --- a/api/tests/opentrons/protocol_runner/test_json_translator.py +++ b/api/tests/opentrons/protocol_runner/test_json_translator.py @@ -194,7 +194,7 @@ wellName="A1", ), ), - protocol_schema_v8.Command.construct( + protocol_schema_v8.Command.model_construct( commandType="dropTip", params={ "pipetteId": "pipette-id-1", @@ -231,7 +231,7 @@ wellName="A1", ), ), - protocol_schema_v8.Command.construct( + protocol_schema_v8.Command.model_construct( commandType="pickUpTip", params={ "pipetteId": "pipette-id-1", @@ -273,7 +273,7 @@ ), ), ), - protocol_schema_v8.Command.construct( + protocol_schema_v8.Command.model_construct( commandType="touchTip", params={ "pipetteId": "pipette-id-1", @@ -308,7 +308,7 @@ pipetteId="pipette-id-1", mount="left", pipetteName="p10_single" ), ), - protocol_schema_v8.Command.construct( + protocol_schema_v8.Command.model_construct( commandType="loadPipette", params={ "pipetteId": "pipette-id-1", @@ -340,7 +340,7 @@ location=Location(slotName="3"), ), ), - protocol_schema_v8.Command.construct( + protocol_schema_v8.Command.model_construct( commandType="loadModule", params={ "moduleId": "module-id-1", @@ -375,7 +375,7 @@ displayName="Trash", ), ), - protocol_schema_v8.Command.construct( + protocol_schema_v8.Command.model_construct( commandType="loadLabware", params={ "labwareId": "labware-id-2", @@ -424,7 +424,7 @@ flowRate=1.23, ), ), - protocol_schema_v8.Command.construct( + protocol_schema_v8.Command.model_construct( commandType="blowout", params={ "pipetteId": "pipette-id-1", @@ -459,7 +459,7 @@ commandType="delay", params=protocol_schema_v7.Params(waitForResume=True, message="hello world"), ), - protocol_schema_v8.Command.construct( + protocol_schema_v8.Command.model_construct( commandType="delay", params={"waitForResume": True, "message": "hello world"}, ), @@ -476,7 +476,7 @@ commandType="delay", params=protocol_schema_v7.Params(seconds=12.34, message="hello world"), ), - protocol_schema_v8.Command.construct( + protocol_schema_v8.Command.model_construct( commandType="delay", params={"seconds": 12.34, "message": "hello world"}, ), @@ -496,7 +496,7 @@ commandType="waitForResume", params=protocol_schema_v7.Params(message="hello world"), ), - protocol_schema_v8.Command.construct( + protocol_schema_v8.Command.model_construct( commandType="waitForResume", params={"message": "hello world"}, ), @@ -513,7 +513,7 @@ commandType="waitForDuration", params=protocol_schema_v7.Params(seconds=12.34, message="hello world"), ), - protocol_schema_v8.Command.construct( + protocol_schema_v8.Command.model_construct( commandType="waitForDuration", params={"seconds": 12.34, "message": "hello world"}, ), @@ -543,7 +543,7 @@ forceDirect=True, ), ), - protocol_schema_v8.Command.construct( + protocol_schema_v8.Command.model_construct( commandType="moveToCoordinates", params={ "pipetteId": "pipette-id-1", @@ -596,7 +596,7 @@ ], ), ), - protocol_schema_v8.Command.construct( + protocol_schema_v8.Command.model_construct( commandType="thermocycler/runProfile", params={ "moduleId": "module-id-2", @@ -647,7 +647,7 @@ volumeByWell={"A1": 32, "B2": 50}, ), ), - protocol_schema_v8.Command.construct( + protocol_schema_v8.Command.model_construct( commandType="loadLiquid", key=None, params={ diff --git a/api/tests/opentrons/protocol_runner/test_legacy_command_mapper.py b/api/tests/opentrons/protocol_runner/test_legacy_command_mapper.py index 42c589ba7d3..a91066c01f8 100644 --- a/api/tests/opentrons/protocol_runner/test_legacy_command_mapper.py +++ b/api/tests/opentrons/protocol_runner/test_legacy_command_mapper.py @@ -117,7 +117,7 @@ def test_map_after_command() -> None: assert result == [ pe_actions.SucceedCommandAction( - command=pe_commands.Comment.construct( + command=pe_commands.Comment.model_construct( id="command.COMMENT-0", key="command.COMMENT-0", status=pe_commands.CommandStatus.SUCCEEDED, @@ -240,7 +240,7 @@ def test_command_stack() -> None: command_id="command.COMMENT-1", started_at=matchers.IsA(datetime) ), pe_actions.SucceedCommandAction( - command=pe_commands.Comment.construct( + command=pe_commands.Comment.model_construct( id="command.COMMENT-0", key="command.COMMENT-0", status=pe_commands.CommandStatus.SUCCEEDED, @@ -302,7 +302,7 @@ def test_map_labware_load(minimal_labware_def: LabwareDefinition) -> None: started_at=matchers.IsA(datetime), ) expected_succeed = pe_actions.SucceedCommandAction( - command=pe_commands.LoadLabware.construct( + command=pe_commands.LoadLabware.model_construct( id=expected_id_and_key, key=expected_id_and_key, params=expected_params, @@ -310,7 +310,7 @@ def test_map_labware_load(minimal_labware_def: LabwareDefinition) -> None: createdAt=matchers.IsA(datetime), startedAt=matchers.IsA(datetime), completedAt=matchers.IsA(datetime), - result=pe_commands.LoadLabwareResult.construct( + result=pe_commands.LoadLabwareResult.model_construct( labwareId=matchers.IsA(str), # Trusting that the exact fields within in the labware definition # get passed through correctly. @@ -352,7 +352,7 @@ def test_map_instrument_load(decoy: Decoy) -> None: ).then_return(pipette_config) expected_id_and_key = "commands.LOAD_PIPETTE-0" - expected_params = pe_commands.LoadPipetteParams.construct( + expected_params = pe_commands.LoadPipetteParams.model_construct( pipetteName=PipetteNameType.P1000_SINGLE_GEN2, mount=MountType.LEFT ) expected_queue = pe_actions.QueueCommandAction( @@ -367,7 +367,7 @@ def test_map_instrument_load(decoy: Decoy) -> None: command_id=expected_id_and_key, started_at=matchers.IsA(datetime) ) expected_succeed = pe_actions.SucceedCommandAction( - command=pe_commands.LoadPipette.construct( + command=pe_commands.LoadPipette.model_construct( id=expected_id_and_key, key=expected_id_and_key, status=pe_commands.CommandStatus.SUCCEEDED, @@ -410,7 +410,7 @@ def test_map_module_load( module_data_provider: ModuleDataProvider, ) -> None: """It should correctly map a module load.""" - test_definition = ModuleDefinition.parse_obj(minimal_module_def) + test_definition = ModuleDefinition.model_validate(minimal_module_def) input = LegacyModuleLoadInfo( requested_model=TemperatureModuleModel.TEMPERATURE_V1, loaded_model=TemperatureModuleModel.TEMPERATURE_V2, @@ -423,7 +423,7 @@ def test_map_module_load( ).then_return(test_definition) expected_id_and_key = "commands.LOAD_MODULE-0" - expected_params = pe_commands.LoadModuleParams.construct( + expected_params = pe_commands.LoadModuleParams.model_construct( model=ModuleModel.TEMPERATURE_MODULE_V1, location=DeckSlotLocation(slotName=DeckSlotName.SLOT_1), moduleId=matchers.IsA(str), @@ -440,7 +440,7 @@ def test_map_module_load( command_id=expected_id_and_key, started_at=matchers.IsA(datetime) ) expected_succeed = pe_actions.SucceedCommandAction( - command=pe_commands.LoadModule.construct( + command=pe_commands.LoadModule.model_construct( id=expected_id_and_key, key=expected_id_and_key, status=pe_commands.CommandStatus.SUCCEEDED, @@ -448,7 +448,7 @@ def test_map_module_load( startedAt=matchers.IsA(datetime), completedAt=matchers.IsA(datetime), params=expected_params, - result=pe_commands.LoadModuleResult.construct( + result=pe_commands.LoadModuleResult.model_construct( moduleId=matchers.IsA(str), serialNumber="module-serial", definition=test_definition, @@ -481,7 +481,7 @@ def test_map_module_labware_load(minimal_labware_def: LabwareDefinition) -> None ) expected_id_and_key = "commands.LOAD_LABWARE-0" - expected_params = pe_commands.LoadLabwareParams.construct( + expected_params = pe_commands.LoadLabwareParams.model_construct( location=ModuleLocation(moduleId="module-123"), namespace="some_namespace", loadName="some_load_name", @@ -503,7 +503,7 @@ def test_map_module_labware_load(minimal_labware_def: LabwareDefinition) -> None started_at=matchers.IsA(datetime), ) expected_succeed = pe_actions.SucceedCommandAction( - command=pe_commands.LoadLabware.construct( + command=pe_commands.LoadLabware.model_construct( id=expected_id_and_key, key=expected_id_and_key, params=expected_params, @@ -511,7 +511,7 @@ def test_map_module_labware_load(minimal_labware_def: LabwareDefinition) -> None createdAt=matchers.IsA(datetime), startedAt=matchers.IsA(datetime), completedAt=matchers.IsA(datetime), - result=pe_commands.LoadLabwareResult.construct( + result=pe_commands.LoadLabwareResult.model_construct( labwareId=matchers.IsA(str), # Trusting that the exact fields within in the labware definition # get passed through correctly. @@ -578,7 +578,7 @@ def test_map_pause() -> None: started_at=matchers.IsA(datetime), ), pe_actions.SucceedCommandAction( - command=pe_commands.WaitForResume.construct( + command=pe_commands.WaitForResume.model_construct( id="command.PAUSE-0", key="command.PAUSE-0", status=pe_commands.CommandStatus.SUCCEEDED, diff --git a/api/tests/opentrons/protocol_runner/test_run_orchestrator.py b/api/tests/opentrons/protocol_runner/test_run_orchestrator.py index c2cea3e0e7e..b7281953f22 100644 --- a/api/tests/opentrons/protocol_runner/test_run_orchestrator.py +++ b/api/tests/opentrons/protocol_runner/test_run_orchestrator.py @@ -256,11 +256,11 @@ async def test_add_command_and_wait_for_interval( verify_calls: int, ) -> None: """Should add a command a wait for it to complete.""" - load_command = pe_commands.HomeCreate.construct( - params=pe_commands.HomeParams.construct() + load_command = pe_commands.HomeCreate.model_construct( + params=pe_commands.HomeParams.model_construct() ) added_command = pe_commands.Home( - params=pe_commands.HomeParams.construct(), + params=pe_commands.HomeParams.model_construct(), id="test-123", createdAt=datetime(year=2024, month=1, day=1), key="123", diff --git a/api/tests/opentrons/protocols/models/test_json_protocol.py b/api/tests/opentrons/protocols/models/test_json_protocol.py index 696524ac84a..afb2770f21a 100644 --- a/api/tests/opentrons/protocols/models/test_json_protocol.py +++ b/api/tests/opentrons/protocols/models/test_json_protocol.py @@ -25,7 +25,7 @@ def test_json_protocol_model( ) # Create the model - d = json_protocol.Model.parse_obj(fx) + d = json_protocol.Model.model_validate(fx) # Compare the dict created by pydantic to the loaded json - assert d.dict(exclude_unset=True, by_alias=True) == fx + assert d.model_dump(exclude_unset=True, by_alias=True) == fx diff --git a/hardware/pytest.ini b/hardware/pytest.ini index 9337cd62ee1..2c36b03cf37 100644 --- a/hardware/pytest.ini +++ b/hardware/pytest.ini @@ -5,3 +5,9 @@ markers = requires_emulator: mark test as requiring emulator can_filter_func: can message filtering function asyncio_mode = auto + +filterwarnings = + # Pydantic's shims for its legacy v1 methods (e.g. `BaseModel.construct()`) + # are not type-checked properly. Forbid them, so we're forced to use their newer + # v2 replacements which are type-checked (e.g. ``BaseModel.model_construct()`) + error::pydantic.PydanticDeprecatedSince20 diff --git a/robot-server/pytest.ini b/robot-server/pytest.ini index 51ed89fd0d2..210861cb6ca 100644 --- a/robot-server/pytest.ini +++ b/robot-server/pytest.ini @@ -5,7 +5,11 @@ markers = addopts = --color=yes --strict-markers asyncio_mode = auto -# Don't allow any new code that uses features removed in SQLAlchemy 2.0. -# We should remove this when we upgrade to SQLAlchemy 2.0. filterwarnings = + # Don't allow any new code that uses features removed in SQLAlchemy 2.0. + # We should remove this when we upgrade to SQLAlchemy 2.0. error::sqlalchemy.exc.RemovedIn20Warning + # Pydantic's shims for its legacy v1 methods (e.g. `BaseModel.construct()`) + # are not type-checked properly. Forbid them, so we're forced to use their newer + # v2 replacements which are type-checked (e.g. ``BaseModel.model_construct()`) + error::pydantic.PydanticDeprecatedSince20 diff --git a/robot-server/robot_server/client_data/router.py b/robot-server/robot_server/client_data/router.py index 3a619c2111c..4a2850cb8c1 100644 --- a/robot-server/robot_server/client_data/router.py +++ b/robot-server/robot_server/client_data/router.py @@ -73,7 +73,7 @@ async def put_client_data( # noqa: D103 ) -> SimpleBody[ClientData]: store.put(key, request_body.data) client_data_publisher.publish_client_data(key) - return SimpleBody.construct(data=store.get(key)) + return SimpleBody.model_construct(data=store.get(key)) @router.get( @@ -92,7 +92,7 @@ async def get_client_data( # noqa: D103 store: Annotated[ClientDataStore, fastapi.Depends(get_client_data_store)], ) -> SimpleBody[ClientData]: try: - return SimpleBody.construct(data=store.get(key)) + return SimpleBody.model_construct(data=store.get(key)) except KeyError as e: raise ClientDataKeyDoesNotExist.from_exc(e).as_error( fastapi.status.HTTP_404_NOT_FOUND @@ -125,7 +125,7 @@ async def delete_client_data( # noqa: D103 ) from e else: client_data_publisher.publish_client_data(key) - return SimpleEmptyBody.construct() + return SimpleEmptyBody.model_construct() @router.delete( @@ -143,4 +143,4 @@ async def delete_all_client_data( # noqa: D103 store.delete_all() for deleted_key in keys_that_will_be_deleted: client_data_publisher.publish_client_data(deleted_key) - return SimpleEmptyBody.construct() + return SimpleEmptyBody.model_construct() diff --git a/robot-server/robot_server/commands/router.py b/robot-server/robot_server/commands/router.py index e4d2d4a9f13..ce4db3fd515 100644 --- a/robot-server/robot_server/commands/router.py +++ b/robot-server/robot_server/commands/router.py @@ -109,7 +109,9 @@ async def create_command( Comes from a query parameter in the URL. orchestrator: The `RunOrchestrator` handling engine for command to be enqueued. """ - command_create = request_body.data.copy(update={"intent": CommandIntent.SETUP}) + command_create = request_body.data.model_copy( + update={"intent": CommandIntent.SETUP} + ) command = await orchestrator.add_command_and_wait_for_interval( command=command_create, wait_until_complete=waitUntilComplete, timeout=timeout ) @@ -117,7 +119,7 @@ async def create_command( response_data = cast(StatelessCommand, orchestrator.get_command(command.id)) return await PydanticResponse.create( - content=SimpleBody.construct(data=response_data), + content=SimpleBody.model_construct(data=response_data), status_code=status.HTTP_201_CREATED, ) @@ -168,7 +170,7 @@ async def get_commands_list( meta = MultiBodyMeta(cursor=cmd_slice.cursor, totalLength=cmd_slice.total_length) return await PydanticResponse.create( - content=SimpleMultiBody.construct(data=commands, meta=meta), + content=SimpleMultiBody.model_construct(data=commands, meta=meta), status_code=status.HTTP_200_OK, ) @@ -204,6 +206,6 @@ async def get_command( raise CommandNotFound.from_exc(e).as_error(status.HTTP_404_NOT_FOUND) from e return await PydanticResponse.create( - content=SimpleBody.construct(data=cast(StatelessCommand, command)), + content=SimpleBody.model_construct(data=cast(StatelessCommand, command)), status_code=status.HTTP_200_OK, ) diff --git a/robot-server/robot_server/data_files/router.py b/robot-server/robot_server/data_files/router.py index cf4ba9fa649..f9c61afb77a 100644 --- a/robot-server/robot_server/data_files/router.py +++ b/robot-server/robot_server/data_files/router.py @@ -138,8 +138,8 @@ async def upload_data_file( existing_file_info = data_files_store.get_file_info_by_hash(file_hash) if existing_file_info: return await PydanticResponse.create( - content=SimpleBody.construct( - data=DataFile.construct( + content=SimpleBody.model_construct( + data=DataFile.model_construct( id=existing_file_info.id, name=existing_file_info.name, createdAt=existing_file_info.created_at, @@ -162,8 +162,8 @@ async def upload_data_file( ) await data_files_store.insert(file_info) return await PydanticResponse.create( - content=SimpleBody.construct( - data=DataFile.construct( + content=SimpleBody.model_construct( + data=DataFile.model_construct( id=file_info.id, name=file_info.name, createdAt=created_at, @@ -199,8 +199,8 @@ async def get_data_file_info_by_id( raise FileIdNotFound(detail=str(e)).as_error(status.HTTP_404_NOT_FOUND) return await PydanticResponse.create( - content=SimpleBody.construct( - data=DataFile.construct( + content=SimpleBody.model_construct( + data=DataFile.model_construct( id=resource.id, name=resource.name, createdAt=resource.created_at, @@ -264,9 +264,9 @@ async def get_all_data_files( meta = MultiBodyMeta(cursor=0, totalLength=len(data_files)) return await PydanticResponse.create( - content=SimpleMultiBody.construct( + content=SimpleMultiBody.model_construct( data=[ - DataFile.construct( + DataFile.model_construct( id=data_file_info.id, name=data_file_info.name, createdAt=data_file_info.created_at, @@ -307,6 +307,6 @@ async def delete_file_by_id( raise DataFileInUse(detail=str(e)).as_error(status.HTTP_409_CONFLICT) from e return await PydanticResponse.create( - content=SimpleEmptyBody.construct(), + content=SimpleEmptyBody.model_construct(), status_code=status.HTTP_200_OK, ) diff --git a/robot-server/robot_server/deck_configuration/defaults.py b/robot-server/robot_server/deck_configuration/defaults.py index 3ed9a5ed395..fce59673771 100644 --- a/robot-server/robot_server/deck_configuration/defaults.py +++ b/robot-server/robot_server/deck_configuration/defaults.py @@ -4,64 +4,64 @@ from . import models -_for_flex = models.DeckConfigurationRequest.construct( +_for_flex = models.DeckConfigurationRequest.model_construct( cutoutFixtures=[ - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutoutA1", cutoutFixtureId="singleLeftSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutoutB1", cutoutFixtureId="singleLeftSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutoutC1", cutoutFixtureId="singleLeftSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutoutD1", cutoutFixtureId="singleLeftSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutoutA2", cutoutFixtureId="singleCenterSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutoutB2", cutoutFixtureId="singleCenterSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutoutC2", cutoutFixtureId="singleCenterSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutoutD2", cutoutFixtureId="singleCenterSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutoutA3", cutoutFixtureId="trashBinAdapter", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutoutB3", cutoutFixtureId="singleRightSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutoutC3", cutoutFixtureId="singleRightSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutoutD3", cutoutFixtureId="singleRightSlot", opentronsModuleSerialNumber=None, @@ -70,64 +70,64 @@ ) -_for_ot2 = models.DeckConfigurationRequest.construct( +_for_ot2 = models.DeckConfigurationRequest.model_construct( cutoutFixtures=[ - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutout1", cutoutFixtureId="singleStandardSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutout2", cutoutFixtureId="singleStandardSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutout3", cutoutFixtureId="singleStandardSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutout4", cutoutFixtureId="singleStandardSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutout5", cutoutFixtureId="singleStandardSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutout6", cutoutFixtureId="singleStandardSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutout7", cutoutFixtureId="singleStandardSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutout8", cutoutFixtureId="singleStandardSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutout9", cutoutFixtureId="singleStandardSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutout10", cutoutFixtureId="singleStandardSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutout11", cutoutFixtureId="singleStandardSlot", opentronsModuleSerialNumber=None, ), - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutId="cutout12", cutoutFixtureId="fixedTrashSlot", opentronsModuleSerialNumber=None, diff --git a/robot-server/robot_server/deck_configuration/router.py b/robot-server/robot_server/deck_configuration/router.py index cfb31c9f030..77ad38a3975 100644 --- a/robot-server/robot_server/deck_configuration/router.py +++ b/robot-server/robot_server/deck_configuration/router.py @@ -78,12 +78,12 @@ async def put_deck_configuration( # noqa: D103 if len(validation_errors) == 0: success_data = await store.set(request=request_body.data, last_modified_at=now) return await PydanticResponse.create( - content=SimpleBody.construct(data=success_data) + content=SimpleBody.model_construct(data=success_data) ) else: error_data = validation_mapping.map_out(validation_errors) return await PydanticResponse.create( - content=ErrorBody.construct(errors=error_data), + content=ErrorBody.model_construct(errors=error_data), status_code=HTTP_422_UNPROCESSABLE_ENTITY, ) @@ -111,5 +111,5 @@ async def get_deck_configuration( # noqa: D103 ], ) -> PydanticResponse[SimpleBody[models.DeckConfigurationResponse]]: return await PydanticResponse.create( - content=SimpleBody.construct(data=await store.get()) + content=SimpleBody.model_construct(data=await store.get()) ) diff --git a/robot-server/robot_server/deck_configuration/store.py b/robot-server/robot_server/deck_configuration/store.py index 159013e3504..4a383dd0a3d 100644 --- a/robot-server/robot_server/deck_configuration/store.py +++ b/robot-server/robot_server/deck_configuration/store.py @@ -132,7 +132,7 @@ async def get_for_cli(deck_type: DeckType, path: Path) -> bytes: return serialize_deck_configuration(from_storage[0], from_storage[1]) else: default_as_http_response = _get_default(deck_type) - default_as_http_request = models.DeckConfigurationRequest.construct( + default_as_http_request = models.DeckConfigurationRequest.model_construct( cutoutFixtures=default_as_http_response.cutoutFixtures ) storable_default = _http_types_to_storage_types( @@ -162,21 +162,21 @@ def _storage_types_to_http_types( ) -> models.DeckConfigurationResponse: storage_cutout_fixtures, last_modified_at = storage_val http_cutout_fixtures = [ - models.CutoutFixture.construct( + models.CutoutFixture.model_construct( cutoutFixtureId=storage_element.cutout_fixture_id, cutoutId=storage_element.cutout_id, opentronsModuleSerialNumber=storage_element.opentrons_module_serial_number, ) for storage_element in storage_cutout_fixtures ] - return models.DeckConfigurationResponse.construct( + return models.DeckConfigurationResponse.model_construct( cutoutFixtures=http_cutout_fixtures, lastModifiedAt=last_modified_at, ) def _get_default(deck_type: DeckType) -> models.DeckConfigurationResponse: - return models.DeckConfigurationResponse.construct( + return models.DeckConfigurationResponse.model_construct( cutoutFixtures=defaults.for_deck_definition(deck_type.value).cutoutFixtures, lastModifiedAt=None, ) diff --git a/robot-server/robot_server/error_recovery/settings/router.py b/robot-server/robot_server/error_recovery/settings/router.py index 4fdfeee5498..27dde185f0b 100644 --- a/robot-server/robot_server/error_recovery/settings/router.py +++ b/robot-server/robot_server/error_recovery/settings/router.py @@ -62,5 +62,7 @@ async def _get_current_response( ) -> PydanticResponse[SimpleBody[ResponseData]]: is_enabled = store.get_is_enabled() return await PydanticResponse.create( - SimpleBody.construct(data=ResponseData.construct(enabled=is_enabled)) + SimpleBody.model_construct( + data=ResponseData.model_construct(enabled=is_enabled) + ) ) diff --git a/robot-server/robot_server/errors/error_responses.py b/robot-server/robot_server/errors/error_responses.py index 73f8a97f445..82752660692 100644 --- a/robot-server/robot_server/errors/error_responses.py +++ b/robot-server/robot_server/errors/error_responses.py @@ -25,7 +25,7 @@ class BaseErrorBody(BaseResponseBody): def as_error(self, status_code: int) -> ApiError: """Serialize the response as an API error to raise in a handler.""" - return ApiError(status_code=status_code, content=self.dict()) + return ApiError(status_code=status_code, content=self.model_dump()) class ErrorSource(BaseModel): diff --git a/robot-server/robot_server/errors/global_errors.py b/robot-server/robot_server/errors/global_errors.py index 73e460854ba..5f7d73eb234 100644 --- a/robot-server/robot_server/errors/global_errors.py +++ b/robot-server/robot_server/errors/global_errors.py @@ -53,7 +53,7 @@ def from_exc( ) -> "FirmwareUpdateRequired": """Build a FirmwareUpdateRequired from a specific exception. Preserves metadata.""" parent_inst = ErrorDetails.from_exc(exc, **supplemental_kwargs) - inst = FirmwareUpdateRequired(**parent_inst.dict()) + inst = FirmwareUpdateRequired(**parent_inst.model_dump()) if not inst.meta: inst.meta = {"update_url": "/subsystems/update"} else: diff --git a/robot-server/robot_server/instruments/router.py b/robot-server/robot_server/instruments/router.py index a9a3e3bbed3..e059876eb37 100644 --- a/robot-server/robot_server/instruments/router.py +++ b/robot-server/robot_server/instruments/router.py @@ -63,7 +63,7 @@ def _pipette_dict_to_pipette_res( """Convert PipetteDict to Pipette response model.""" if pipette_dict: calibration_data = pipette_offset - return Pipette.construct( + return Pipette.model_construct( firmwareVersion=str(fw_version) if fw_version else None, ok=True, mount=MountType.from_hw_mount(mount).value, @@ -75,7 +75,7 @@ def _pipette_dict_to_pipette_res( channels=pipette_dict["channels"], min_volume=pipette_dict["min_volume"], max_volume=pipette_dict["max_volume"], - calibratedOffset=InstrumentCalibrationData.construct( + calibratedOffset=InstrumentCalibrationData.model_construct( offset=Vec3f( x=calibration_data.offset.x, y=calibration_data.offset.y, @@ -84,9 +84,9 @@ def _pipette_dict_to_pipette_res( source=calibration_data.source, last_modified=calibration_data.last_modified, reasonability_check_failures=[ - InconsistentCalibrationFailure.construct( + InconsistentCalibrationFailure.model_construct( offsets={ - k.name: Vec3f.construct(x=v.x, y=v.y, z=v.z) + k.name: Vec3f.model_construct(x=v.x, y=v.y, z=v.z) for k, v in failure.offsets.items() }, limit=failure.limit, @@ -97,7 +97,7 @@ def _pipette_dict_to_pipette_res( if calibration_data else None, ), - state=PipetteState.parse_obj(pipette_state) if pipette_state else None, + state=PipetteState.model_validate(pipette_state) if pipette_state else None, ) @@ -106,7 +106,7 @@ def _gripper_dict_to_gripper_res( ) -> Gripper: """Convert GripperDict to Gripper response model.""" calibration_data = gripper_dict["calibration_offset"] - return Gripper.construct( + return Gripper.model_construct( firmwareVersion=str(fw_version) if fw_version else None, ok=True, mount=MountType.EXTENSION.value, @@ -115,7 +115,7 @@ def _gripper_dict_to_gripper_res( subsystem=SubSystem.from_hw(HWSubSystem.of_mount(OT3Mount.GRIPPER)), data=GripperData( jawState=gripper_dict["state"].name.lower(), - calibratedOffset=InstrumentCalibrationData.construct( + calibratedOffset=InstrumentCalibrationData.model_construct( offset=Vec3f( x=calibration_data.offset.x, y=calibration_data.offset.y, @@ -219,7 +219,7 @@ async def _get_attached_instruments_ot3( await hardware.cache_instruments(skip_if_would_block=True) response_data = await _get_instrument_data(hardware) return await PydanticResponse.create( - content=SimpleMultiBody.construct( + content=SimpleMultiBody.model_construct( data=response_data, meta=MultiBodyMeta(cursor=0, totalLength=len(response_data)), ), @@ -243,7 +243,7 @@ async def _get_attached_instruments_ot2( if pipette_dict ] return await PydanticResponse.create( - content=SimpleMultiBody.construct( + content=SimpleMultiBody.model_construct( data=response_data, meta=MultiBodyMeta(cursor=0, totalLength=len(response_data)), ), diff --git a/robot-server/robot_server/labware_offsets/models.py b/robot-server/robot_server/labware_offsets/models.py index 8c6dd5760f6..fcc3d2f2200 100644 --- a/robot-server/robot_server/labware_offsets/models.py +++ b/robot-server/robot_server/labware_offsets/models.py @@ -1,8 +1,7 @@ """Request/response models for the `/labwareOffsets` endpoints.""" -from typing import Literal, Type -from typing_extensions import Self +from typing import Literal from robot_server.errors.error_responses import ErrorDetails @@ -14,6 +13,6 @@ class LabwareOffsetNotFound(ErrorDetails): title: str = "Labware Offset Not Found" @classmethod - def build(cls: Type[Self], bad_offset_id: str) -> Self: + def build(cls, bad_offset_id: str) -> "LabwareOffsetNotFound": """Return an error with a standard message.""" - return cls.construct(detail=f'No offset found with ID "{bad_offset_id}".') + return cls.model_construct(detail=f'No offset found with ID "{bad_offset_id}".') diff --git a/robot-server/robot_server/labware_offsets/router.py b/robot-server/robot_server/labware_offsets/router.py index fb017fc1457..241f5b31505 100644 --- a/robot-server/robot_server/labware_offsets/router.py +++ b/robot-server/robot_server/labware_offsets/router.py @@ -47,7 +47,7 @@ async def post_labware_offset( # noqa: D103 new_offset_created_at: Annotated[datetime, fastapi.Depends(get_current_time)], request_body: Annotated[RequestModel[LabwareOffsetCreate], fastapi.Body()], ) -> PydanticResponse[SimpleBody[LabwareOffset]]: - new_offset = LabwareOffset.construct( + new_offset = LabwareOffset.model_construct( id=new_offset_id, createdAt=new_offset_created_at, definitionUri=request_body.data.definitionUri, @@ -56,7 +56,7 @@ async def post_labware_offset( # noqa: D103 ) store.add(new_offset) return await PydanticResponse.create( - content=SimpleBody.construct(data=new_offset), + content=SimpleBody.model_construct(data=new_offset), status_code=201, ) @@ -142,14 +142,14 @@ async def get_labware_offsets( # noqa: D103 location_module_model_filter=location_module_model, ) - meta = MultiBodyMeta.construct( + meta = MultiBodyMeta.model_construct( # todo(mm, 2024-12-06): Update this when pagination is supported. cursor=0, totalLength=len(result_data), ) return await PydanticResponse.create( - SimpleMultiBody[LabwareOffset].construct( + SimpleMultiBody[LabwareOffset].model_construct( data=result_data, meta=meta, ) @@ -174,7 +174,9 @@ async def delete_labware_offset( # noqa: D103 except LabwareOffsetNotFoundError as e: raise LabwareOffsetNotFound.build(bad_offset_id=e.bad_offset_id).as_error(404) else: - return await PydanticResponse.create(SimpleBody.construct(data=deleted_offset)) + return await PydanticResponse.create( + SimpleBody.model_construct(data=deleted_offset) + ) @PydanticResponse.wrap_route( @@ -186,4 +188,4 @@ async def delete_all_labware_offsets( # noqa: D103 store: Annotated[LabwareOffsetStore, fastapi.Depends(get_labware_offset_store)] ) -> PydanticResponse[SimpleEmptyBody]: store.delete_all() - return await PydanticResponse.create(SimpleEmptyBody.construct()) + return await PydanticResponse.create(SimpleEmptyBody.model_construct()) diff --git a/robot-server/robot_server/maintenance_runs/maintenance_run_data_manager.py b/robot-server/robot_server/maintenance_runs/maintenance_run_data_manager.py index c3346e33351..dfc76945f81 100644 --- a/robot-server/robot_server/maintenance_runs/maintenance_run_data_manager.py +++ b/robot-server/robot_server/maintenance_runs/maintenance_run_data_manager.py @@ -24,7 +24,7 @@ def _build_run( created_at: datetime, state_summary: Optional[StateSummary], ) -> MaintenanceRun: - state_summary = state_summary or StateSummary.construct( + state_summary = state_summary or StateSummary.model_construct( status=EngineStatus.IDLE, errors=[], labware=[], @@ -37,7 +37,7 @@ def _build_run( liquidClasses=[], hasEverEnteredErrorRecovery=False, ) - return MaintenanceRun.construct( + return MaintenanceRun.model_construct( id=run_id, createdAt=created_at, status=state_summary.status, diff --git a/robot-server/robot_server/maintenance_runs/router/base_router.py b/robot-server/robot_server/maintenance_runs/router/base_router.py index 0e9abc62553..6f6abaf89b5 100644 --- a/robot-server/robot_server/maintenance_runs/router/base_router.py +++ b/robot-server/robot_server/maintenance_runs/router/base_router.py @@ -188,7 +188,7 @@ async def create_run( log.info(f'Created an empty run "{run_id}"".') return await PydanticResponse.create( - content=SimpleBody.construct(data=run_data), + content=SimpleBody.model_construct(data=run_data), status_code=status.HTTP_201_CREATED, ) @@ -221,11 +221,11 @@ async def get_current_run( data = run_data_manager.get(current_run_id) links = AllRunsLinks( - current=ResourceLink.construct(href=f"/maintenance_runs/{current_run_id}") + current=ResourceLink.model_construct(href=f"/maintenance_runs/{current_run_id}") ) return await PydanticResponse.create( - content=Body.construct(data=data, links=links), + content=Body.model_construct(data=data, links=links), status_code=status.HTTP_200_OK, ) @@ -249,7 +249,7 @@ async def get_run( run_data: Data of the run specified in the runId url parameter. """ return await PydanticResponse.create( - content=SimpleBody.construct(data=run_data), + content=SimpleBody.model_construct(data=run_data), status_code=status.HTTP_200_OK, ) @@ -285,6 +285,6 @@ async def remove_run( raise RunNotFound(detail=str(e)).as_error(status.HTTP_404_NOT_FOUND) from e return await PydanticResponse.create( - content=SimpleEmptyBody.construct(), + content=SimpleEmptyBody.model_construct(), status_code=status.HTTP_200_OK, ) diff --git a/robot-server/robot_server/maintenance_runs/router/commands_router.py b/robot-server/robot_server/maintenance_runs/router/commands_router.py index afc5e03779b..40c9b5d782a 100644 --- a/robot-server/robot_server/maintenance_runs/router/commands_router.py +++ b/robot-server/robot_server/maintenance_runs/router/commands_router.py @@ -155,7 +155,7 @@ async def create_run_command( # TODO(mc, 2022-05-26): increment the HTTP API version so that default # behavior is to pass through `command_intent` without overriding it command_intent = pe_commands.CommandIntent.SETUP - command_create = request_body.data.copy(update={"intent": command_intent}) + command_create = request_body.data.model_copy(update={"intent": command_intent}) command = await run_orchestrator_store.add_command_and_wait_for_interval( request=command_create, wait_until_complete=waitUntilComplete, timeout=timeout ) @@ -163,7 +163,7 @@ async def create_run_command( response_data = run_orchestrator_store.get_command(command.id) return await PydanticResponse.create( - content=SimpleBody.construct(data=response_data), + content=SimpleBody.model_construct(data=response_data), status_code=status.HTTP_201_CREATED, ) @@ -228,7 +228,7 @@ async def get_run_commands( recovery_target_command = run_data_manager.get_recovery_target_command(run_id=runId) data = [ - RunCommandSummary.construct( + RunCommandSummary.model_construct( id=c.id, key=c.key, commandType=c.commandType, @@ -249,13 +249,13 @@ async def get_run_commands( totalLength=command_slice.total_length, ) - links = CommandCollectionLinks.construct( + links = CommandCollectionLinks.model_construct( current=_make_command_link(runId, current_command), currentlyRecoveringFrom=_make_command_link(runId, recovery_target_command), ) return await PydanticResponse.create( - content=MultiBody.construct(data=data, meta=meta, links=links), + content=MultiBody.model_construct(data=data, meta=meta, links=links), status_code=status.HTTP_200_OK, ) @@ -297,7 +297,7 @@ async def get_run_command( raise CommandNotFound(detail=str(e)).as_error(status.HTTP_404_NOT_FOUND) from e return await PydanticResponse.create( - content=SimpleBody.construct(data=command), + content=SimpleBody.model_construct(data=command), status_code=status.HTTP_200_OK, ) @@ -306,7 +306,7 @@ def _make_command_link( run_id: str, command_pointer: Optional[CommandPointer] ) -> Optional[CommandLink]: return ( - CommandLink.construct( + CommandLink.model_construct( href=f"/maintenance_runs/{run_id}/commands/{command_pointer.command_id}", meta=CommandLinkMeta( runId=run_id, diff --git a/robot-server/robot_server/maintenance_runs/router/labware_router.py b/robot-server/robot_server/maintenance_runs/router/labware_router.py index 72fc09d911a..938cee77af7 100644 --- a/robot-server/robot_server/maintenance_runs/router/labware_router.py +++ b/robot-server/robot_server/maintenance_runs/router/labware_router.py @@ -54,7 +54,7 @@ async def add_labware_offset( log.info(f'Added labware offset "{added_offset.id}"' f' to run "{run.id}".') return await PydanticResponse.create( - content=SimpleBody.construct(data=added_offset), + content=SimpleBody.model_construct(data=added_offset), status_code=status.HTTP_201_CREATED, ) @@ -93,8 +93,8 @@ async def add_labware_definition( log.info(f'Added labware definition "{uri}"' f' to run "{run.id}".') return PydanticResponse( - content=SimpleBody.construct( - data=LabwareDefinitionSummary.construct(definitionUri=uri) + content=SimpleBody.model_construct( + data=LabwareDefinitionSummary.model_construct(definitionUri=uri) ), status_code=status.HTTP_201_CREATED, ) diff --git a/robot-server/robot_server/modules/router.py b/robot-server/robot_server/modules/router.py index 1f630d9bdb6..2f566eb7207 100644 --- a/robot-server/robot_server/modules/router.py +++ b/robot-server/robot_server/modules/router.py @@ -67,7 +67,7 @@ async def get_attached_modules( module_identity=module_identity, live_data=mod.live_data, usb_port=mod.usb_port, - module_offset=ModuleCalibrationData.construct( + module_offset=ModuleCalibrationData.model_construct( offset=Vec3f( x=calibrated.offset.x, y=calibrated.offset.y, @@ -83,7 +83,7 @@ async def get_attached_modules( ) return await PydanticResponse.create( - content=SimpleMultiBody.construct( + content=SimpleMultiBody.model_construct( data=response_data, meta=MultiBodyMeta(cursor=0, totalLength=len(response_data)), ), diff --git a/robot-server/robot_server/persistence/_migrations/up_to_2.py b/robot-server/robot_server/persistence/_migrations/up_to_2.py index 69e9cc57875..2e6ba069b36 100644 --- a/robot-server/robot_server/persistence/_migrations/up_to_2.py +++ b/robot-server/robot_server/persistence/_migrations/up_to_2.py @@ -215,11 +215,11 @@ def _migrate_data_1_to_2(transaction: sqlalchemy.engine.Connection) -> None: f"Migrating analysis {index+1}/{len(rows_needing_migration)}, {row.id}..." ) - v1_completed_analysis = CompletedAnalysis.parse_obj( + v1_completed_analysis = CompletedAnalysis.model_validate( _legacy_pickle.loads(row.completed_analysis) ) - v2_completed_analysis_as_document = v1_completed_analysis.json( + v2_completed_analysis_as_document = v1_completed_analysis.model_dump_json( # by_alias and exclude_none should match how # FastAPI + Pydantic + our customizations serialize these objects # over the `GET /protocols/:id/analyses/:id` endpoint. diff --git a/robot-server/robot_server/persistence/_migrations/up_to_3.py b/robot-server/robot_server/persistence/_migrations/up_to_3.py index 73053d10391..b4382545dbf 100644 --- a/robot-server/robot_server/persistence/_migrations/up_to_3.py +++ b/robot-server/robot_server/persistence/_migrations/up_to_3.py @@ -23,7 +23,6 @@ from typing import List from opentrons.protocol_engine import StateSummary -import pydantic import sqlalchemy from ..pydantic import pydantic_to_json @@ -138,9 +137,7 @@ def _migrate_run_table_excluding_commands( new_state_summary = ( None if old_row.state_summary is None - else pydantic_to_json( - pydantic.parse_obj_as(StateSummary, old_state_summary) - ) + else pydantic_to_json(StateSummary.model_validate(old_state_summary)) ) dest_transaction.execute( insert_new_run, diff --git a/robot-server/robot_server/persistence/pydantic.py b/robot-server/robot_server/persistence/pydantic.py index 8c6383f0ae1..a8eb14b0916 100644 --- a/robot-server/robot_server/persistence/pydantic.py +++ b/robot-server/robot_server/persistence/pydantic.py @@ -11,7 +11,7 @@ def pydantic_to_json(obj: BaseModel) -> str: """Serialize a Pydantic object for storing in the SQL database.""" - return obj.json( + return obj.model_dump_json( # by_alias and exclude_none should match how # FastAPI + Pydantic + our customizations serialize these objects by_alias=True, @@ -21,7 +21,9 @@ def pydantic_to_json(obj: BaseModel) -> str: def pydantic_list_to_json(obj_list: Sequence[BaseModel]) -> str: """Serialize a list of Pydantic objects for storing in the SQL database.""" - return json.dumps([obj.dict(by_alias=True, exclude_none=True) for obj in obj_list]) + return json.dumps( + [obj.model_dump(by_alias=True, exclude_none=True) for obj in obj_list] + ) @overload diff --git a/robot-server/robot_server/protocols/analysis_store.py b/robot-server/robot_server/protocols/analysis_store.py index be69601466f..d03c8cabb31 100644 --- a/robot-server/robot_server/protocols/analysis_store.py +++ b/robot-server/robot_server/protocols/analysis_store.py @@ -195,7 +195,7 @@ async def update( else: result = AnalysisResult.OK - completed_analysis = CompletedAnalysis.construct( + completed_analysis = CompletedAnalysis.model_construct( id=analysis_id, result=result, robotType=robot_type, @@ -237,7 +237,7 @@ async def save_initialization_failed_analysis( errors: List[ErrorOccurrence], ) -> None: """Commit the failed analysis to store.""" - completed_analysis = CompletedAnalysis.construct( + completed_analysis = CompletedAnalysis.model_construct( id=analysis_id, result=AnalysisResult.NOT_OK, robotType=robot_type, @@ -305,7 +305,9 @@ def get_summaries_by_protocol(self, protocol_id: str) -> List[AnalysisSummary]: protocol_id=protocol_id ) completed_analysis_summaries = [ - AnalysisSummary.construct(id=analysis_id, status=AnalysisStatus.COMPLETED) + AnalysisSummary.model_construct( + id=analysis_id, status=AnalysisStatus.COMPLETED + ) for analysis_id in completed_analysis_ids ] @@ -455,7 +457,7 @@ def add( protocol_id not in self._analysis_ids_by_protocol_id ), "Protocol must not already have a pending analysis." - new_pending_analysis = PendingAnalysis.construct( + new_pending_analysis = PendingAnalysis.model_construct( id=analysis_id, runTimeParameters=run_time_parameters, ) diff --git a/robot-server/robot_server/protocols/router.py b/robot-server/robot_server/protocols/router.py index ff6521b70d6..28294f99c7d 100644 --- a/robot-server/robot_server/protocols/router.py +++ b/robot-server/robot_server/protocols/router.py @@ -364,13 +364,13 @@ async def _get_cached_protocol_analysis() -> PydanticResponse[ status.HTTP_503_SERVICE_UNAVAILABLE ) from error - data = Protocol.construct( + data = Protocol.model_construct( id=cached_protocol_id, createdAt=resource.created_at, protocolKind=resource.protocol_kind, protocolType=resource.source.config.protocol_type, robotType=resource.source.robot_type, - metadata=Metadata.parse_obj(resource.source.metadata), + metadata=Metadata.model_validate(resource.source.metadata), analysisSummaries=analysis_summaries, key=resource.protocol_key, files=[ @@ -385,7 +385,7 @@ async def _get_cached_protocol_analysis() -> PydanticResponse[ ) return await PydanticResponse.create( - content=SimpleBody.construct(data=data), + content=SimpleBody.model_construct(data=data), # not returning a 201 because we're not actually creating a new resource status_code=status.HTTP_200_OK, ) @@ -443,7 +443,7 @@ async def _get_cached_protocol_analysis() -> PydanticResponse[ protocolKind=protocol_kind, protocolType=source.config.protocol_type, robotType=source.robot_type, - metadata=Metadata.parse_obj(source.metadata), + metadata=Metadata.model_validate(source.metadata), analysisSummaries=analysis_summaries, key=key, files=[ProtocolFile(name=f.path.name, role=f.role) for f in source.files], @@ -452,7 +452,7 @@ async def _get_cached_protocol_analysis() -> PydanticResponse[ log.info(f'Created protocol "{protocol_id}" and started analysis "{analysis_id}".') return await PydanticResponse.create( - content=SimpleBody.construct(data=data), + content=SimpleBody.model_construct(data=data), status_code=status.HTTP_201_CREATED, ) @@ -549,13 +549,13 @@ async def get_protocols( """ protocol_resources = protocol_store.get_all() data = [ - Protocol.construct( + Protocol.model_construct( id=r.protocol_id, createdAt=r.created_at, protocolKind=r.protocol_kind, protocolType=r.source.config.protocol_type, robotType=r.source.robot_type, - metadata=Metadata.parse_obj(r.source.metadata), + metadata=Metadata.model_validate(r.source.metadata), analysisSummaries=analysis_store.get_summaries_by_protocol(r.protocol_id), key=r.protocol_key, files=[ProtocolFile(name=f.path.name, role=f.role) for f in r.source.files], @@ -566,7 +566,7 @@ async def get_protocols( meta = MultiBodyMeta(cursor=0, totalLength=len(data)) return await PydanticResponse.create( - content=SimpleMultiBody.construct(data=data, meta=meta), + content=SimpleMultiBody.model_construct(data=data, meta=meta), status_code=status.HTTP_200_OK, ) @@ -597,7 +597,7 @@ async def get_protocol_ids( meta = MultiBodyMeta(cursor=0, totalLength=len(protocol_ids)) return await PydanticResponse.create( - content=SimpleMultiBody.construct(data=protocol_ids, meta=meta) + content=SimpleMultiBody.model_construct(data=protocol_ids, meta=meta) ) @@ -630,13 +630,13 @@ async def get_protocol_by_id( analyses = analysis_store.get_summaries_by_protocol(protocol_id=protocolId) referencing_run_ids = protocol_store.get_referencing_run_ids(protocolId) - data = Protocol.construct( + data = Protocol.model_construct( id=protocolId, createdAt=resource.created_at, protocolKind=resource.protocol_kind, protocolType=resource.source.config.protocol_type, robotType=resource.source.robot_type, - metadata=Metadata.parse_obj(resource.source.metadata), + metadata=Metadata.model_validate(resource.source.metadata), analysisSummaries=analyses, key=resource.protocol_key, files=[ @@ -644,15 +644,15 @@ async def get_protocol_by_id( ], ) - links = ProtocolLinks.construct( + links = ProtocolLinks.model_construct( referencingRuns=[ - RunLink.construct(id=run_id, href=f"/runs/{run_id}") + RunLink.model_construct(id=run_id, href=f"/runs/{run_id}") for run_id in referencing_run_ids ] ) return await PydanticResponse.create( - content=Body.construct( + content=Body.model_construct( data=data, links=links, ), @@ -690,7 +690,7 @@ async def delete_protocol_by_id( raise ProtocolUsedByRun(detail=str(e)).as_error(status.HTTP_409_CONFLICT) from e return await PydanticResponse.create( - content=SimpleEmptyBody.construct(), + content=SimpleEmptyBody.model_construct(), status_code=status.HTTP_200_OK, ) @@ -772,7 +772,7 @@ async def create_protocol_analysis( status.HTTP_503_SERVICE_UNAVAILABLE ) from error return await PydanticResponse.create( - content=SimpleMultiBody.construct( + content=SimpleMultiBody.model_construct( data=analysis_summaries, meta=MultiBodyMeta(cursor=0, totalLength=len(analysis_summaries)), ), @@ -813,7 +813,7 @@ async def get_protocol_analyses( analyses = await analysis_store.get_by_protocol(protocolId) return await PydanticResponse.create( - content=SimpleMultiBody.construct( + content=SimpleMultiBody.model_construct( data=analyses, meta=MultiBodyMeta(cursor=0, totalLength=len(analyses)), ) @@ -859,7 +859,9 @@ async def get_protocol_analysis_by_id( status.HTTP_404_NOT_FOUND ) from error - return await PydanticResponse.create(content=SimpleBody.construct(data=analysis)) + return await PydanticResponse.create( + content=SimpleBody.model_construct(data=analysis) + ) @protocols_router.get( @@ -951,7 +953,7 @@ async def get_protocol_data_files( data_files = await protocol_store.get_referenced_data_files(protocolId) return await PydanticResponse.create( - content=SimpleMultiBody.construct( + content=SimpleMultiBody.model_construct( data=data_files, meta=MultiBodyMeta(cursor=0, totalLength=len(data_files)) ) ) diff --git a/robot-server/robot_server/robot/control/router.py b/robot-server/robot_server/robot/control/router.py index 35910748115..a39e7fcab7e 100644 --- a/robot-server/robot_server/robot/control/router.py +++ b/robot-server/robot_server/robot/control/router.py @@ -29,12 +29,12 @@ async def _get_estop_status_response( estop_handler: EstopHandler, ) -> PydanticResponse[SimpleBody[EstopStatusModel]]: """Helper to generate the current Estop Status as a response model.""" - data = EstopStatusModel.construct( + data = EstopStatusModel.model_construct( status=estop_handler.get_state(), leftEstopPhysicalStatus=estop_handler.get_left_physical_status(), rightEstopPhysicalStatus=estop_handler.get_right_physical_status(), ) - return await PydanticResponse.create(content=SimpleBody.construct(data=data)) + return await PydanticResponse.create(content=SimpleBody.model_construct(data=data)) @PydanticResponse.wrap_route( @@ -91,8 +91,8 @@ async def get_door_status( door_required: Annotated[bool, Depends(get_door_switch_required)], ) -> PydanticResponse[SimpleBody[DoorStatusModel]]: return await PydanticResponse.create( - content=SimpleBody.construct( - data=DoorStatusModel.construct( + content=SimpleBody.model_construct( + data=DoorStatusModel.model_construct( status=DoorState.from_hw_physical_status(hardware.door_state), doorRequiredClosedForProtocol=door_required, ) diff --git a/robot-server/robot_server/runs/router/actions_router.py b/robot-server/robot_server/runs/router/actions_router.py index c7c24162cfb..80a461f3b59 100644 --- a/robot-server/robot_server/runs/router/actions_router.py +++ b/robot-server/robot_server/runs/router/actions_router.py @@ -155,6 +155,6 @@ async def create_run_action( raise RunNotFound.from_exc(e).as_error(status.HTTP_404_NOT_FOUND) from e return await PydanticResponse.create( - content=SimpleBody.construct(data=action), + content=SimpleBody.model_construct(data=action), status_code=status.HTTP_201_CREATED, ) diff --git a/robot-server/robot_server/runs/router/base_router.py b/robot-server/robot_server/runs/router/base_router.py index 4fb4be2b401..c51c02de1e4 100644 --- a/robot-server/robot_server/runs/router/base_router.py +++ b/robot-server/robot_server/runs/router/base_router.py @@ -288,7 +288,7 @@ async def create_run( # noqa: C901 log.info(f'Created protocol run "{run_id}" from protocol "{protocol_id}".') return await PydanticResponse.create( - content=SimpleBody.construct(data=run_data), + content=SimpleBody.model_construct(data=run_data), status_code=status.HTTP_201_CREATED, ) @@ -328,13 +328,13 @@ async def get_runs( current_run_id = run_data_manager.current_run_id meta = MultiBodyMeta(cursor=0, totalLength=len(data)) links = AllRunsLinks( - current=ResourceLink.construct(href=f"/runs/{current_run_id}") + current=ResourceLink.model_construct(href=f"/runs/{current_run_id}") if current_run_id is not None else None ) return await PydanticResponse.create( - content=MultiBody.construct(data=data, links=links, meta=meta), + content=MultiBody.model_construct(data=data, links=links, meta=meta), status_code=status.HTTP_200_OK, ) @@ -358,7 +358,7 @@ async def get_run( run_data: Data of the run specified in the runId url parameter. """ return await PydanticResponse.create( - content=SimpleBody.construct(data=run_data), + content=SimpleBody.model_construct(data=run_data), status_code=status.HTTP_200_OK, ) @@ -393,7 +393,7 @@ async def remove_run( raise RunNotFound(detail=str(e)).as_error(status.HTTP_404_NOT_FOUND) from e return await PydanticResponse.create( - content=SimpleEmptyBody.construct(), + content=SimpleEmptyBody.model_construct(), status_code=status.HTTP_200_OK, ) @@ -433,7 +433,7 @@ async def update_run( raise RunNotFound(detail=str(e)).as_error(status.HTTP_404_NOT_FOUND) from e return await PydanticResponse.create( - content=SimpleBody.construct(data=run_data), + content=SimpleBody.model_construct(data=run_data), status_code=status.HTTP_200_OK, ) @@ -509,7 +509,7 @@ async def get_run_commands_error( ) return await PydanticResponse.create( - content=SimpleMultiBody.construct( + content=SimpleMultiBody.model_construct( data=command_error_slice.commands_errors, meta=meta, ), @@ -554,7 +554,7 @@ async def get_current_state( # noqa: C901 active_nozzle_maps = run_data_manager.get_nozzle_maps(run_id=runId) nozzle_layouts = { - pipetteId: ActiveNozzleLayout.construct( + pipetteId: ActiveNozzleLayout.model_construct( startingNozzle=nozzle_map.starting_nozzle, activeNozzles=nozzle_map.active_nozzles, config=NozzleLayoutConfig(nozzle_map.configuration.value.lower()), @@ -563,7 +563,7 @@ async def get_current_state( # noqa: C901 } tip_states = { - pipette_id: TipState.construct(hasTip=has_tip) + pipette_id: TipState.model_construct(hasTip=has_tip) for pipette_id, has_tip in run_data_manager.get_tip_attached( run_id=runId ).items() @@ -625,8 +625,8 @@ async def get_current_state( # noqa: C901 break last_completed_command = run_data_manager.get_last_completed_command(run_id=runId) - links = CurrentStateLinks.construct( - lastCompleted=CommandLinkNoMeta.construct( + links = CurrentStateLinks.model_construct( + lastCompleted=CommandLinkNoMeta.model_construct( id=last_completed_command.command_id, href=f"/runs/{runId}/commands/{last_completed_command.command_id}", ) @@ -635,8 +635,8 @@ async def get_current_state( # noqa: C901 ) return await PydanticResponse.create( - content=Body.construct( - data=RunCurrentState.construct( + content=Body.model_construct( + data=RunCurrentState.model_construct( estopEngaged=estop_engaged, activeNozzleLayouts=nozzle_layouts, tipStates=tip_states, diff --git a/robot-server/robot_server/runs/router/commands_router.py b/robot-server/robot_server/runs/router/commands_router.py index 577606a1446..99828010750 100644 --- a/robot-server/robot_server/runs/router/commands_router.py +++ b/robot-server/robot_server/runs/router/commands_router.py @@ -221,7 +221,7 @@ async def create_run_command( # TODO(mc, 2022-05-26): increment the HTTP API version so that default # behavior is to pass through `command_intent` without overriding it command_intent = request_body.data.intent or pe_commands.CommandIntent.SETUP - command_create = request_body.data.copy(update={"intent": command_intent}) + command_create = request_body.data.model_copy(update={"intent": command_intent}) try: command = await run_orchestrator_store.add_command_and_wait_for_interval( @@ -241,7 +241,7 @@ async def create_run_command( response_data = run_orchestrator_store.get_command(command.id) return await PydanticResponse.create( - content=SimpleBody.construct(data=response_data), + content=SimpleBody.model_construct(data=response_data), status_code=status.HTTP_201_CREATED, ) @@ -315,7 +315,7 @@ async def get_run_commands( recovery_target_command = run_data_manager.get_recovery_target_command(run_id=runId) data = [ - RunCommandSummary.construct( + RunCommandSummary.model_construct( id=c.id, key=c.key, commandType=c.commandType, @@ -337,13 +337,13 @@ async def get_run_commands( totalLength=command_slice.total_length, ) - links = CommandCollectionLinks.construct( + links = CommandCollectionLinks.model_construct( current=_make_command_link(runId, current_command), currentlyRecoveringFrom=_make_command_link(runId, recovery_target_command), ) return await PydanticResponse.create( - content=MultiBody.construct(data=data, meta=meta, links=links), + content=MultiBody.model_construct(data=data, meta=meta, links=links), status_code=status.HTTP_200_OK, ) @@ -401,7 +401,7 @@ async def get_run_commands_as_pre_serialized_list( status.HTTP_503_SERVICE_UNAVAILABLE ) from e return await PydanticResponse.create( - content=SimpleMultiBody.construct( + content=SimpleMultiBody.model_construct( data=commands, meta=MultiBodyMeta(cursor=0, totalLength=len(commands)) ) ) @@ -442,7 +442,7 @@ async def get_run_command( raise CommandNotFound.from_exc(e).as_error(status.HTTP_404_NOT_FOUND) from e return await PydanticResponse.create( - content=SimpleBody.construct(data=command), + content=SimpleBody.model_construct(data=command), status_code=status.HTTP_200_OK, ) @@ -451,7 +451,7 @@ def _make_command_link( run_id: str, command_pointer: Optional[CommandPointer] ) -> Optional[CommandLink]: return ( - CommandLink.construct( + CommandLink.model_construct( href=f"/runs/{run_id}/commands/{command_pointer.command_id}", meta=CommandLinkMeta( runId=run_id, diff --git a/robot-server/robot_server/runs/router/error_recovery_policy_router.py b/robot-server/robot_server/runs/router/error_recovery_policy_router.py index 4653d564244..a5c3ae0543d 100644 --- a/robot-server/robot_server/runs/router/error_recovery_policy_router.py +++ b/robot-server/robot_server/runs/router/error_recovery_policy_router.py @@ -59,7 +59,7 @@ async def put_error_recovery_policy( raise RunStopped(detail=str(e)).as_error(status.HTTP_409_CONFLICT) from e return await PydanticResponse.create( - content=SimpleEmptyBody.construct(), + content=SimpleEmptyBody.model_construct(), status_code=status.HTTP_200_OK, ) @@ -90,8 +90,8 @@ async def get_error_recovery_policy( raise RunStopped(detail=str(e)).as_error(status.HTTP_409_CONFLICT) from e return await PydanticResponse.create( - content=SimpleBody.construct( - data=ErrorRecoveryPolicy.construct(policyRules=rules) + content=SimpleBody.model_construct( + data=ErrorRecoveryPolicy.model_construct(policyRules=rules) ), status_code=status.HTTP_200_OK, ) diff --git a/robot-server/robot_server/runs/router/labware_router.py b/robot-server/robot_server/runs/router/labware_router.py index 5d244e5438b..2a0396b3b86 100644 --- a/robot-server/robot_server/runs/router/labware_router.py +++ b/robot-server/robot_server/runs/router/labware_router.py @@ -70,7 +70,7 @@ async def add_labware_offset( log.info(f'Added labware offset "{added_offset.id}"' f' to run "{run.id}".') return await PydanticResponse.create( - content=SimpleBody.construct(data=added_offset), + content=SimpleBody.model_construct(data=added_offset), status_code=status.HTTP_201_CREATED, ) @@ -114,8 +114,8 @@ async def add_labware_definition( log.info(f'Added labware definition "{uri}"' f' to run "{run.id}".') return PydanticResponse( - content=SimpleBody.construct( - data=LabwareDefinitionSummary.construct(definitionUri=uri) + content=SimpleBody.model_construct( + data=LabwareDefinitionSummary.model_construct(definitionUri=uri) ), status_code=status.HTTP_201_CREATED, ) @@ -156,6 +156,6 @@ async def get_run_loaded_labware_definitions( raise RunStopped(detail=str(e)).as_error(status.HTTP_409_CONFLICT) from e return await PydanticResponse.create( - content=SimpleBody.construct(data=labware_definitions), + content=SimpleBody.model_construct(data=labware_definitions), status_code=status.HTTP_200_OK, ) diff --git a/robot-server/robot_server/runs/run_data_manager.py b/robot-server/robot_server/runs/run_data_manager.py index 9999e040523..fa937f7cb68 100644 --- a/robot-server/robot_server/runs/run_data_manager.py +++ b/robot-server/robot_server/runs/run_data_manager.py @@ -49,7 +49,7 @@ def _build_run( # such that this default summary object is not needed if run_resource.ok and isinstance(state_summary, StateSummary): - return Run.construct( + return Run.model_construct( id=run_resource.run_id, protocolId=run_resource.protocol_id, createdAt=run_resource.created_at, @@ -72,7 +72,7 @@ def _build_run( errors: List[EnumeratedError] = [] if isinstance(state_summary, BadStateSummary): - state = StateSummary.construct( + state = StateSummary.model_construct( status=EngineStatus.STOPPED, errors=[], labware=[], @@ -109,7 +109,7 @@ def _build_run( AssertionError("Logic error in parsing invalid run.") ) - return BadRun.construct( + return BadRun.model_construct( dataError=run_loading_error, id=run_resource.run_id, protocolId=run_resource.protocol_id, diff --git a/robot-server/robot_server/service/errors.py b/robot-server/robot_server/service/errors.py index 94a8d758563..8503b767258 100644 --- a/robot-server/robot_server/service/errors.py +++ b/robot-server/robot_server/service/errors.py @@ -79,7 +79,7 @@ def __init__( *wrapped_details, ), links=links, - ).dict(exclude_none=True) + ).model_dump(exclude_none=True) super().__init__( status_code=definition.status_code, diff --git a/robot-server/robot_server/service/json_api/response.py b/robot-server/robot_server/service/json_api/response.py index 0d2500b5297..9b815e75fbf 100644 --- a/robot-server/robot_server/service/json_api/response.py +++ b/robot-server/robot_server/service/json_api/response.py @@ -41,7 +41,7 @@ class BaseResponseBody(BaseModel): """ @override - def dict(self, *args: Any, **kwargs: Any) -> Dict[str, Any]: + def model_dump(self, *args: Any, **kwargs: Any) -> Dict[str, Any]: """Always exclude `None` when serializing to an object. With Pydantic v1, the OpenAPI spec described `Optional`(i.e., possibly @@ -56,13 +56,25 @@ def dict(self, *args: Any, **kwargs: Any) -> Dict[str, Any]: serialization behavior at this point would risk breaking things on the client. """ kwargs["exclude_none"] = True + return super().model_dump(*args, **kwargs) + + @override + def dict(self, *args: Any, **kwargs: Any) -> Dict[str, Any]: + """See notes in `model_dump()`.""" + kwargs["exclude_none"] = True return super().dict(*args, **kwargs) + @override + def model_dump_json(self, *args: Any, **kwargs: Any) -> str: + """See notes in `.model_dump()`.""" + kwargs["exclude_none"] = True + return super().model_dump_json(*args, **kwargs) + @override def json(self, *args: Any, **kwargs: Any) -> str: - """See notes in `.dict()`.""" + """See notes in `.model_dump()`.""" kwargs["exclude_none"] = True - return super().json(*args, **kwargs) + return super().model_dump_json(*args, **kwargs) class SimpleBody(BaseResponseBody, Generic[ResponseDataT]): @@ -112,8 +124,8 @@ class SimpleMultiBody(BaseResponseBody, Generic[ResponseDataT]): # non-validating classmethod is taken from the type of this member, and there we really # want the arguments to be Sequence so they can accept narrower subtypes. For instance, # if you define a function as returning SimpleMultiBody[Union[A, B]], you should really - # be able to do return SimpleMultiBody.construct([A(), A(), A()]) or even - # SimpleMultiBody[Union[A, B]].construct([A(), A(), A()]). However, because construct's + # be able to do return SimpleMultiBody.model_construct([A(), A(), A()]) or even + # SimpleMultiBody[Union[A, B]].model_construct([A(), A(), A()]). However, because construct's # params are defined based on the dataclass fields, the only way to get the arguments # to be covariant is to make data the covariant Sequence protocol. meta: MultiBodyMeta = Field( @@ -229,7 +241,7 @@ async def create( def render(self, content: ResponseBodyT) -> bytes: """Render the response body to JSON bytes.""" - return content.json().encode(self.charset) + return content.model_dump_json().encode(self.charset) # TODO(mc, 2021-12-09): remove this model diff --git a/robot-server/robot_server/service/legacy/routers/networking.py b/robot-server/robot_server/service/legacy/routers/networking.py index 3c9ea08be2c..ea3c4543ea8 100644 --- a/robot-server/robot_server/service/legacy/routers/networking.py +++ b/robot-server/robot_server/service/legacy/routers/networking.py @@ -190,7 +190,7 @@ async def post_wifi_key(key: UploadFile = File(...)): else: # We return a JSONResponse because we want the 200 status code. response.message = "Key file already present" - return JSONResponse(content=response.dict()) + return JSONResponse(content=response.model_dump()) @router.delete( @@ -274,4 +274,4 @@ async def post_wifi_disconnect(wifi_ssid: WifiNetwork): ) else: stat = status.HTTP_500_INTERNAL_SERVER_ERROR - return JSONResponse(status_code=stat, content=result.dict()) + return JSONResponse(status_code=stat, content=result.model_dump()) diff --git a/robot-server/robot_server/service/notifications/notification_client.py b/robot-server/robot_server/service/notifications/notification_client.py index 052bb272cf9..1727ee0c880 100644 --- a/robot-server/robot_server/service/notifications/notification_client.py +++ b/robot-server/robot_server/service/notifications/notification_client.py @@ -91,8 +91,8 @@ def publish_advise_refetch( Args: topic: The topic to publish the message on. """ - message = NotifyRefetchBody.construct() - payload = message.json() + message = NotifyRefetchBody.model_construct() + payload = message.model_dump_json() self._client.publish( topic=topic, payload=payload, @@ -109,8 +109,8 @@ def publish_advise_unsubscribe( Args: topic: The topic to publish the message on. """ - message = NotifyUnsubscribeBody.construct() - payload = message.json() + message = NotifyUnsubscribeBody.model_construct() + payload = message.model_dump_json() self._client.publish( topic=topic, payload=payload, diff --git a/robot-server/robot_server/service/session/command_execution/callable_executor.py b/robot-server/robot_server/service/session/command_execution/callable_executor.py index 39a62cbe3ae..2503ff952e3 100644 --- a/robot-server/robot_server/service/session/command_execution/callable_executor.py +++ b/robot-server/robot_server/service/session/command_execution/callable_executor.py @@ -26,7 +26,7 @@ async def execute(self, command: Command) -> CompletedCommand: with duration() as time_it: name_arg = command.request.command data = command.request.data - data_arg = data.dict() if data else {} + data_arg = data.model_dump() if data else {} await self._callable(name_arg, data_arg) diff --git a/robot-server/robot_server/service/session/router.py b/robot-server/robot_server/service/session/router.py index d950572ce8a..83f5a5578ab 100644 --- a/robot-server/robot_server/service/session/router.py +++ b/robot-server/robot_server/service/session/router.py @@ -169,7 +169,7 @@ async def session_command_execute_handler( log.debug(f"Command result: {command_result}") - return CommandResponse.construct( + return CommandResponse.model_construct( data=command_result, links=get_valid_session_links(sessionId, router) ) diff --git a/robot-server/robot_server/subsystems/router.py b/robot-server/robot_server/subsystems/router.py index e64e7390063..0ce265cc7e6 100644 --- a/robot-server/robot_server/subsystems/router.py +++ b/robot-server/robot_server/subsystems/router.py @@ -122,7 +122,7 @@ async def get_attached_subsystems( """Return all subsystems currently present on the machine.""" hardware = get_ot3_hardware(thread_manager) data = [ - PresentSubsystem.construct( + PresentSubsystem.model_construct( name=SubSystem.from_hw(subsystem_id), ok=subsystem_details.ok, current_fw_version=str(subsystem_details.current_fw_version), @@ -134,7 +134,7 @@ async def get_attached_subsystems( ] meta = MultiBodyMeta(cursor=0, totalLength=len(data)) return await PydanticResponse.create( - content=SimpleMultiBody.construct(data=data, meta=meta) + content=SimpleMultiBody.model_construct(data=data, meta=meta) ) @@ -164,8 +164,8 @@ async def get_attached_subsystem( status.HTTP_404_NOT_FOUND ) return await PydanticResponse.create( - content=SimpleBody.construct( - data=PresentSubsystem.construct( + content=SimpleBody.model_construct( + data=PresentSubsystem.model_construct( name=subsystem, ok=subsystem_status.ok, current_fw_version=str(subsystem_status.current_fw_version), @@ -197,7 +197,7 @@ async def get_subsystem_updates( """Return all currently-running firmware update process summaries.""" handles = await update_manager.all_ongoing_processes() data = [ - UpdateProgressSummary.construct( + UpdateProgressSummary.model_construct( id=handle.process_details.update_id, subsystem=handle.process_details.subsystem, updateStatus=handle.cached_state, @@ -207,7 +207,7 @@ async def get_subsystem_updates( ] meta = MultiBodyMeta(cursor=0, totalLength=len(data)) return await PydanticResponse.create( - content=SimpleMultiBody.construct(data=data, meta=meta) + content=SimpleMultiBody.model_construct(data=data, meta=meta) ) @@ -238,8 +238,8 @@ async def get_subsystem_update( ) from e progress = await handle.get_progress() return await PydanticResponse.create( - content=SimpleBody.construct( - data=UpdateProgressData.construct( + content=SimpleBody.model_construct( + data=UpdateProgressData.model_construct( id=handle.process_details.update_id, createdAt=handle.process_details.created_at, subsystem=handle.process_details.subsystem, @@ -282,7 +282,7 @@ async def get_update_processes( ] meta = MultiBodyMeta(cursor=0, totalLength=len(data)) return await PydanticResponse.create( - content=SimpleMultiBody.construct(data=data, meta=meta) + content=SimpleMultiBody.model_construct(data=data, meta=meta) ) @@ -306,8 +306,8 @@ async def get_update_process( raise IDNotFound(detail=id).as_error(status.HTTP_404_NOT_FOUND) from e progress = await handle.get_progress() return await PydanticResponse.create( - content=SimpleBody.construct( - data=UpdateProgressData.construct( + content=SimpleBody.model_construct( + data=UpdateProgressData.model_construct( id=handle.process_details.update_id, subsystem=handle.process_details.subsystem, createdAt=handle.process_details.created_at, @@ -375,8 +375,8 @@ async def begin_subsystem_update( ) progress = await summary.get_progress() return await PydanticResponse.create( - content=SimpleBody.construct( - data=UpdateProgressData.construct( + content=SimpleBody.model_construct( + data=UpdateProgressData.model_construct( id=summary.process_details.update_id, createdAt=summary.process_details.created_at, subsystem=subsystem, diff --git a/robot-server/tests/instruments/test_router.py b/robot-server/tests/instruments/test_router.py index fe401828284..9989d6b0409 100644 --- a/robot-server/tests/instruments/test_router.py +++ b/robot-server/tests/instruments/test_router.py @@ -212,7 +212,7 @@ async def rehearse_instrument_retrievals(skip_if_would_block: bool = False) -> N result = await get_attached_instruments(hardware=ot3_hardware_api) assert result.content.data == [ - Pipette.construct( + Pipette.model_construct( ok=True, mount="left", instrumentType="pipette", @@ -234,7 +234,7 @@ async def rehearse_instrument_retrievals(skip_if_would_block: bool = False) -> N ), state=PipetteState(tip_detected=True), ), - Pipette.construct( + Pipette.model_construct( ok=True, mount="right", firmwareVersion="11", @@ -256,7 +256,7 @@ async def rehearse_instrument_retrievals(skip_if_would_block: bool = False) -> N ), state=PipetteState(tip_detected=False), ), - Gripper.construct( + Gripper.model_construct( ok=True, mount="extension", firmwareVersion="11", @@ -305,7 +305,7 @@ async def test_get_ot2_instruments( decoy.verify(await ot2_hardware_api.cache_instruments(), times=0) assert result2.status_code == 200 assert result2.content.data == [ - Pipette.construct( + Pipette.model_construct( ok=True, mount="right", instrumentType="pipette", @@ -351,7 +351,7 @@ async def test_get_96_channel_instruments( decoy.when(ot3_hardware_api.get_instrument_offset(OT3Mount.RIGHT)).then_return(None) assert result2.status_code == 200 assert result2.content.data == [ - Pipette.construct( + Pipette.model_construct( ok=True, mount="left", instrumentType="pipette", diff --git a/robot-server/tests/maintenance_runs/router/test_labware_router.py b/robot-server/tests/maintenance_runs/router/test_labware_router.py index 4e5ae1152f2..d23204aa2d2 100644 --- a/robot-server/tests/maintenance_runs/router/test_labware_router.py +++ b/robot-server/tests/maintenance_runs/router/test_labware_router.py @@ -46,7 +46,7 @@ def run() -> MaintenanceRun: @pytest.fixture() def labware_definition(minimal_labware_def: LabwareDefDict) -> LabwareDefinition: """Create a labware definition fixture.""" - return LabwareDefinition.parse_obj(minimal_labware_def) + return LabwareDefinition.model_validate(minimal_labware_def) async def test_add_labware_offset( diff --git a/robot-server/tests/maintenance_runs/test_engine_store.py b/robot-server/tests/maintenance_runs/test_engine_store.py index bf01c653df1..ed9987f5e77 100644 --- a/robot-server/tests/maintenance_runs/test_engine_store.py +++ b/robot-server/tests/maintenance_runs/test_engine_store.py @@ -109,7 +109,7 @@ async def test_create_engine_with_labware_offsets( ) assert result.labwareOffsets == [ - pe_types.LabwareOffset.construct( + pe_types.LabwareOffset.model_construct( id=matchers.IsA(str), createdAt=matchers.IsA(datetime), definitionUri="namespace/load_name/version", diff --git a/robot-server/tests/modules/test_module_data_mapper.py b/robot-server/tests/modules/test_module_data_mapper.py index 62fa54e9a49..9fef6e607f4 100644 --- a/robot-server/tests/modules/test_module_data_mapper.py +++ b/robot-server/tests/modules/test_module_data_mapper.py @@ -138,7 +138,7 @@ def test_maps_magnetic_module_data( has_available_update=True, live_data=input_data, usb_port=hardware_usb_port, - module_offset=ModuleCalibrationData.construct( + module_offset=ModuleCalibrationData.model_construct( offset=Vec3f(x=0, y=0, z=0), ), ) @@ -213,7 +213,7 @@ def test_maps_temperature_module_data( has_available_update=True, live_data=input_data, usb_port=hardware_usb_port, - module_offset=ModuleCalibrationData.construct( + module_offset=ModuleCalibrationData.model_construct( offset=Vec3f(x=0, y=0, z=0), ), ) @@ -321,7 +321,7 @@ def test_maps_thermocycler_module_data( has_available_update=True, live_data=input_data, usb_port=hardware_usb_port, - module_offset=ModuleCalibrationData.construct( + module_offset=ModuleCalibrationData.model_construct( offset=Vec3f(x=0, y=0, z=0), ), ) @@ -426,7 +426,7 @@ def test_maps_heater_shaker_module_data( has_available_update=True, live_data=input_data, usb_port=hardware_usb_port, - module_offset=ModuleCalibrationData.construct( + module_offset=ModuleCalibrationData.model_construct( offset=Vec3f(x=0, y=0, z=0), ), ) diff --git a/robot-server/tests/modules/test_router.py b/robot-server/tests/modules/test_router.py index 287041f17cf..f63f46f7c27 100644 --- a/robot-server/tests/modules/test_router.py +++ b/robot-server/tests/modules/test_router.py @@ -99,7 +99,7 @@ async def test_get_modules_maps_data_and_id( hubPort=None, path="/dev/null", ), - moduleOffset=ModuleCalibrationData.construct( + moduleOffset=ModuleCalibrationData.model_construct( offset=Vec3f(x=0, y=0, z=0), ), data=MagneticModuleData( @@ -171,7 +171,7 @@ async def test_get_modules_maps_data_and_id( port_group=PortGroup.UNKNOWN, hub_port=None, ), - module_offset=ModuleCalibrationData.construct( + module_offset=ModuleCalibrationData.model_construct( offset=Vec3f( x=calibration_offset.offset.x, y=calibration_offset.offset.y, diff --git a/robot-server/tests/persistence/test_pydantic.py b/robot-server/tests/persistence/test_pydantic.py index 4e25b8c0fca..ea4ea1887a8 100644 --- a/robot-server/tests/persistence/test_pydantic.py +++ b/robot-server/tests/persistence/test_pydantic.py @@ -13,7 +13,7 @@ class _DummyModel(BaseModel): def test_round_trip() -> None: """Test Python->JSON->Python round trips.""" - original = _DummyModel.construct(field="hello", aliasedField="world") + original = _DummyModel.model_construct(field="hello", aliasedField="world") after_round_trip = subject.json_to_pydantic( _DummyModel, subject.pydantic_to_json(original) ) @@ -28,7 +28,7 @@ def test_round_trip() -> None: def test_field_aliases() -> None: """The JSON should contain field aliases, not the Python attribute names.""" - original = _DummyModel.construct(field="hello", aliasedField="world") + original = _DummyModel.model_construct(field="hello", aliasedField="world") json = subject.pydantic_to_json(original) json_list = subject.pydantic_list_to_json([original]) assert '"aliasedFieldAlias"' in json diff --git a/robot-server/tests/protocols/test_protocol_analyzer.py b/robot-server/tests/protocols/test_protocol_analyzer.py index 5f581afebb4..39f0feda73b 100644 --- a/robot-server/tests/protocols/test_protocol_analyzer.py +++ b/robot-server/tests/protocols/test_protocol_analyzer.py @@ -246,7 +246,7 @@ async def test_analyze_updates_pending_on_error( raised_exception = Exception("You got me!!") - error_occurrence = pe_errors.ErrorOccurrence.construct( + error_occurrence = pe_errors.ErrorOccurrence.model_construct( id="internal-error", createdAt=datetime(year=2023, month=3, day=3), errorType="EnumeratedError", diff --git a/robot-server/tests/protocols/test_protocols_router.py b/robot-server/tests/protocols/test_protocols_router.py index 0ae2c591ebd..56005162e93 100644 --- a/robot-server/tests/protocols/test_protocols_router.py +++ b/robot-server/tests/protocols/test_protocols_router.py @@ -392,7 +392,7 @@ async def test_get_protocol_by_id( key="dummy-key-111", ) - assert result.content.links == ProtocolLinks.construct(referencingRuns=[]) + assert result.content.links == ProtocolLinks.model_construct(referencingRuns=[]) assert result.status_code == 200 diff --git a/robot-server/tests/runs/router/test_base_router.py b/robot-server/tests/runs/router/test_base_router.py index 9cf42061faa..0350bb4d0b0 100644 --- a/robot-server/tests/runs/router/test_base_router.py +++ b/robot-server/tests/runs/router/test_base_router.py @@ -777,7 +777,7 @@ async def test_get_run_commands_errors_raises_no_run( @pytest.mark.parametrize( "error_list, expected_cursor_result", - [([], 0), ([pe_errors.ErrorOccurrence.construct(id="error-id")], 1)], + [([], 0), ([pe_errors.ErrorOccurrence.model_construct(id="error-id")], 1)], ) async def test_get_run_commands_errors_defualt_cursor( decoy: Decoy, @@ -865,7 +865,7 @@ async def test_get_current_state_success( ) assert result.status_code == 200 - assert result.content.data == RunCurrentState.construct( + assert result.content.data == RunCurrentState.model_construct( estopEngaged=False, activeNozzleLayouts={ "mock-pipette-id": ActiveNozzleLayout( diff --git a/robot-server/tests/runs/router/test_labware_router.py b/robot-server/tests/runs/router/test_labware_router.py index a81c6726e6d..25f80048df6 100644 --- a/robot-server/tests/runs/router/test_labware_router.py +++ b/robot-server/tests/runs/router/test_labware_router.py @@ -93,7 +93,7 @@ async def test_add_labware_offset_not_current( run: Run, ) -> None: """It should 409 if the run is not current.""" - not_current_run = run.copy(update={"current": False}) + not_current_run = run.model_copy(update={"current": False}) labware_offset_request = pe_types.LabwareOffsetCreate( definitionUri="namespace_1/load_name_1/123", @@ -142,7 +142,7 @@ async def test_add_labware_definition_not_current( labware_definition: LabwareDefinition, ) -> None: """It should 409 if the run is not current.""" - not_current_run = run.copy(update={"current": False}) + not_current_run = run.model_copy(update={"current": False}) with pytest.raises(ApiError) as exc_info: await add_labware_definition( diff --git a/robot-server/tests/runs/test_error_recovery_mapping.py b/robot-server/tests/runs/test_error_recovery_mapping.py index 0212fd0b6ed..6cab88243e3 100644 --- a/robot-server/tests/runs/test_error_recovery_mapping.py +++ b/robot-server/tests/runs/test_error_recovery_mapping.py @@ -116,9 +116,9 @@ def test_create_error_recovery_policy_defined_error( @pytest.mark.parametrize("enabled", [True, False]) def test_enabled_boolean(enabled: bool) -> None: """enabled=False should override any rules and always fail the run.""" - command = LiquidProbe.construct() + command = LiquidProbe.model_construct() # type: ignore[call-arg] error_data = DefinedErrorData[LiquidNotFoundError]( - public=LiquidNotFoundError.construct() + public=LiquidNotFoundError.model_construct() # type: ignore[call-arg] ) rules = [ @@ -160,9 +160,9 @@ def test_enabled_on_flex_disabled_on_ot2( robot_type: RobotType, expect_error_recovery_to_be_enabled: bool ) -> None: """On OT-2s, the run should always fail regardless of any input rules.""" - command = LiquidProbe.construct() + command = LiquidProbe.model_construct() # type: ignore[call-arg] error_data = DefinedErrorData[LiquidNotFoundError]( - public=LiquidNotFoundError.construct() + public=LiquidNotFoundError.model_construct() # type: ignore[call-arg] ) rules = [ diff --git a/robot-server/tests/runs/test_run_data_manager.py b/robot-server/tests/runs/test_run_data_manager.py index 31b58f9950a..8d139b4edff 100644 --- a/robot-server/tests/runs/test_run_data_manager.py +++ b/robot-server/tests/runs/test_run_data_manager.py @@ -967,7 +967,7 @@ def test_get_commands_errors_slice_historical_run( mock_run_store: RunStore, ) -> None: """Should get a sliced command error list from engine store.""" - expected_commands_errors_result = [ErrorOccurrence.construct(id="error-id")] + expected_commands_errors_result = [ErrorOccurrence.model_construct(id="error-id")] # type: ignore[call-arg] command_error_slice = CommandErrorSlice( cursor=1, total_length=3, commands_errors=expected_commands_errors_result diff --git a/robot-server/tests/runs/test_run_orchestrator_store.py b/robot-server/tests/runs/test_run_orchestrator_store.py index 1774215acfd..b0f8354e494 100644 --- a/robot-server/tests/runs/test_run_orchestrator_store.py +++ b/robot-server/tests/runs/test_run_orchestrator_store.py @@ -120,7 +120,7 @@ async def test_create_engine_with_labware_offsets( ) assert result.labwareOffsets == [ - pe_types.LabwareOffset.construct( + pe_types.LabwareOffset.model_construct( id=matchers.IsA(str), createdAt=matchers.IsA(datetime), definitionUri="namespace/load_name/version", diff --git a/robot-server/tests/runs/test_run_store.py b/robot-server/tests/runs/test_run_store.py index cfbe89a44e4..400e5ef6a06 100644 --- a/robot-server/tests/runs/test_run_store.py +++ b/robot-server/tests/runs/test_run_store.py @@ -122,7 +122,7 @@ def protocol_commands_errors() -> List[pe_commands.Command]: params=pe_commands.WaitForResumeParams(message="hello world"), result=pe_commands.WaitForResumeResult(), intent=pe_commands.CommandIntent.PROTOCOL, - error=ErrorOccurrence.construct( + error=ErrorOccurrence.model_construct( id="error-id", createdAt=datetime(2024, 1, 1), errorType="blah-blah", @@ -137,7 +137,7 @@ def protocol_commands_errors() -> List[pe_commands.Command]: params=pe_commands.WaitForResumeParams(message="hey world"), result=pe_commands.WaitForResumeResult(), intent=pe_commands.CommandIntent.PROTOCOL, - error=ErrorOccurrence.construct( + error=ErrorOccurrence.model_construct( id="error-id-2", createdAt=datetime(2024, 1, 1), errorType="blah-blah", @@ -738,7 +738,7 @@ def test_get_run_time_parameters_invalid( state_summary: StateSummary, ) -> None: """It should return an empty list if there invalid parameters.""" - bad_parameters = [pe_types.BooleanParameter.construct(foo="bar")] + bad_parameters = [pe_types.BooleanParameter.model_construct(foo="bar")] # type: ignore[call-arg] subject.insert( run_id="run-id", protocol_id=None, diff --git a/robot-server/tests/service/json_api/test_request.py b/robot-server/tests/service/json_api/test_request.py index 71cd3d0cbcc..8c2012b158b 100644 --- a/robot-server/tests/service/json_api/test_request.py +++ b/robot-server/tests/service/json_api/test_request.py @@ -9,22 +9,22 @@ def test_attributes_as_dict(): DictRequest = RequestModel[dict] obj_to_validate = {"data": {"some_data": 1}} - my_request_obj = DictRequest.parse_obj(obj_to_validate) - assert my_request_obj.dict() == {"data": {"some_data": 1}} + my_request_obj = DictRequest.model_validate(obj_to_validate) + assert my_request_obj.model_dump() == {"data": {"some_data": 1}} def test_attributes_as_item_model(): ItemRequest = RequestModel[ItemModel] obj_to_validate = {"data": {"name": "apple", "quantity": 10, "price": 1.20}} - my_request_obj = ItemRequest.parse_obj(obj_to_validate) - assert my_request_obj.dict() == obj_to_validate + my_request_obj = ItemRequest.model_validate(obj_to_validate) + assert my_request_obj.model_dump() == obj_to_validate def test_attributes_as_item_model_empty_dict(): ItemRequest = RequestModel[ItemModel] obj_to_validate: Dict[str, Any] = {"data": {}} with raises(ValidationError) as e: - ItemRequest.parse_obj(obj_to_validate) + ItemRequest.model_validate(obj_to_validate) assert e.value.errors() == [ { @@ -55,7 +55,7 @@ def test_attributes_required(): MyRequest = RequestModel[dict] obj_to_validate = {"data": None} with raises(ValidationError) as e: - MyRequest.parse_obj(obj_to_validate) + MyRequest.model_validate(obj_to_validate) assert e.value.errors() == [ { @@ -72,7 +72,7 @@ def test_data_required(): MyRequest = RequestModel[dict] obj_to_validate = {"data": None} with raises(ValidationError) as e: - MyRequest.parse_obj(obj_to_validate) + MyRequest.model_validate(obj_to_validate) assert e.value.errors() == [ { @@ -90,7 +90,7 @@ def test_request_with_id(): obj_to_validate = { "data": {"type": "item", "attributes": {}, "id": "abc123"}, } - my_request_obj = MyRequest.parse_obj(obj_to_validate) - assert my_request_obj.dict() == { + my_request_obj = MyRequest.model_validate(obj_to_validate) + assert my_request_obj.model_dump() == { "data": {"type": "item", "attributes": {}, "id": "abc123"}, } diff --git a/robot-server/tests/service/json_api/test_resource_links.py b/robot-server/tests/service/json_api/test_resource_links.py index 804fa77b837..581c018892a 100644 --- a/robot-server/tests/service/json_api/test_resource_links.py +++ b/robot-server/tests/service/json_api/test_resource_links.py @@ -14,8 +14,8 @@ def test_follows_structure(): "self": {"href": "/items/1", "meta": None}, } } - validated = ThingWithLink.parse_obj(structure_to_validate) - assert validated.dict() == structure_to_validate + validated = ThingWithLink.model_validate(structure_to_validate) + assert validated.model_dump() == structure_to_validate def test_must_be_self_key_with_string_value(): @@ -25,7 +25,7 @@ def test_must_be_self_key_with_string_value(): } } with raises(ValidationError) as e: - ThingWithLink.parse_obj(invalid_structure_to_validate) + ThingWithLink.model_validate(invalid_structure_to_validate) assert e.value.errors() == [ { "loc": ("links",), diff --git a/robot-server/tests/service/json_api/test_response.py b/robot-server/tests/service/json_api/test_response.py index 6952468229b..05e01f45b6c 100644 --- a/robot-server/tests/service/json_api/test_response.py +++ b/robot-server/tests/service/json_api/test_response.py @@ -126,4 +126,4 @@ class ResponseSpec(NamedTuple): @pytest.mark.parametrize(ResponseSpec._fields, RESPONSE_SPECS) def test_response_to_dict(subject: BaseModel, expected: Dict[str, Any]) -> None: - assert subject.dict() == expected + assert subject.model_dump() == expected diff --git a/robot-server/tests/service/session/models/test_command.py b/robot-server/tests/service/session/models/test_command.py index 67d99f409ac..695638262a4 100644 --- a/robot-server/tests/service/session/models/test_command.py +++ b/robot-server/tests/service/session/models/test_command.py @@ -15,7 +15,7 @@ ) def test_empty(command_def: command_definitions.CommandDefinition): """Test creation of empty command request and response.""" - request = command.CommandRequest.parse_obj( + request = command.CommandRequest.model_validate( {"data": {"command": command_def.value, "data": {}}} ) assert request.data.command == command_def @@ -57,6 +57,6 @@ def test_empty(command_def: command_definitions.CommandDefinition): def test_requires_data(command_def: command_definitions.CommandDefinition): """Test creation of command requiring data will fail with empty body.""" with pytest.raises(ValidationError): - command.CommandRequest.parse_obj( + command.CommandRequest.model_validate( {"data": {"command": command_def.value, "data": {}}} ) diff --git a/robot-server/tests/subsystems/test_router.py b/robot-server/tests/subsystems/test_router.py index a5fb9f86ca6..8b6357a0c1e 100644 --- a/robot-server/tests/subsystems/test_router.py +++ b/robot-server/tests/subsystems/test_router.py @@ -104,7 +104,7 @@ def _build_attached_subsystems( def _build_subsystem_data( subsystem: SubSystem, state: SubSystemState ) -> PresentSubsystem: - return PresentSubsystem.construct( + return PresentSubsystem.model_construct( name=subsystem, ok=state.ok, current_fw_version=str(state.current_fw_version), @@ -230,25 +230,25 @@ async def test_get_subsystem_updates_with_some( response = await get_subsystem_updates(update_manager) assert response.content.data == [ - UpdateProgressSummary.construct( + UpdateProgressSummary.model_construct( id=x_process_details.update_id, createdAt=x_process_details.created_at, subsystem=x_process_details.subsystem, updateStatus=x_state, ), - UpdateProgressSummary.construct( + UpdateProgressSummary.model_construct( id=y_process_details.update_id, createdAt=y_process_details.created_at, subsystem=y_process_details.subsystem, updateStatus=y_state, ), - UpdateProgressSummary.construct( + UpdateProgressSummary.model_construct( id=head_process_details.update_id, createdAt=head_process_details.created_at, subsystem=head_process_details.subsystem, updateStatus=head_state, ), - UpdateProgressSummary.construct( + UpdateProgressSummary.model_construct( id=pipette_process_details.update_id, createdAt=pipette_process_details.created_at, subsystem=pipette_process_details.subsystem, @@ -284,7 +284,7 @@ async def test_get_subsystem_update_succeeds( await update_manager.get_ongoing_update_process_handle_by_subsystem(subsystem) ).then_return(handle) response = await get_subsystem_update(subsystem, update_manager) - assert response.content.data == UpdateProgressData.construct( + assert response.content.data == UpdateProgressData.model_construct( id=details.update_id, createdAt=details.created_at, subsystem=details.subsystem, @@ -329,7 +329,7 @@ async def test_get_subsystem_update_error( await update_manager.get_ongoing_update_process_handle_by_subsystem(subsystem) ).then_return(handle) response = await get_subsystem_update(subsystem, update_manager) - assert response.content.data == UpdateProgressData.construct( + assert response.content.data == UpdateProgressData.model_construct( id=details.update_id, createdAt=details.created_at, subsystem=details.subsystem, @@ -386,25 +386,25 @@ async def test_get_all_updates_some( ) response = await get_update_processes(update_manager) assert response.content.data == [ - UpdateProgressSummary.construct( + UpdateProgressSummary.model_construct( id=x_process_details.update_id, createdAt=x_process_details.created_at, subsystem=x_process_details.subsystem, updateStatus=x_state, ), - UpdateProgressSummary.construct( + UpdateProgressSummary.model_construct( id=y_process_details.update_id, createdAt=y_process_details.created_at, subsystem=y_process_details.subsystem, updateStatus=y_state, ), - UpdateProgressSummary.construct( + UpdateProgressSummary.model_construct( id=head_process_details.update_id, createdAt=head_process_details.created_at, subsystem=head_process_details.subsystem, updateStatus=head_state, ), - UpdateProgressSummary.construct( + UpdateProgressSummary.model_construct( id=pipette_process_details.update_id, createdAt=pipette_process_details.created_at, subsystem=pipette_process_details.subsystem, @@ -526,7 +526,7 @@ async def test_begin_update( headers["Location"] == f"http://127.0.0.1:31950/subsystems/updates/current/{subsystem.value}" ) - assert response_data.content.data == UpdateProgressData.construct( + assert response_data.content.data == UpdateProgressData.model_construct( id=update_id, createdAt=created_at, subsystem=subsystem, diff --git a/server-utils/pytest.ini b/server-utils/pytest.ini index 49f04412746..a2cc2091144 100644 --- a/server-utils/pytest.ini +++ b/server-utils/pytest.ini @@ -1,3 +1,9 @@ [pytest] addopts = --color=yes --strict-markers asyncio_mode = auto + +filterwarnings = + # Pydantic's shims for its legacy v1 methods (e.g. `BaseModel.construct()`) + # are not type-checked properly. Forbid them, so we're forced to use their newer + # v2 replacements which are type-checked (e.g. ``BaseModel.model_construct()`) + error::pydantic.PydanticDeprecatedSince20 diff --git a/shared-data/python/opentrons_shared_data/gripper/__init__.py b/shared-data/python/opentrons_shared_data/gripper/__init__.py index c9ee59543b6..ab5ab38af72 100644 --- a/shared-data/python/opentrons_shared_data/gripper/__init__.py +++ b/shared-data/python/opentrons_shared_data/gripper/__init__.py @@ -45,7 +45,7 @@ def load_definition( """Load gripper definition based on schema version and gripper model.""" try: path = Path("gripper") / "definitions" / f"{version}" / f"{model.value}.json" - return GripperDefinition.parse_obj(json.loads(load_shared_data(path))) + return GripperDefinition.model_validate(json.loads(load_shared_data(path))) except FileNotFoundError: raise InvalidGripperDefinition( f"Gripper model {model} definition in schema version {version} does not exist." diff --git a/shared-data/python/opentrons_shared_data/liquid_classes/__init__.py b/shared-data/python/opentrons_shared_data/liquid_classes/__init__.py index b5da3d7ba52..1b8458adf25 100644 --- a/shared-data/python/opentrons_shared_data/liquid_classes/__init__.py +++ b/shared-data/python/opentrons_shared_data/liquid_classes/__init__.py @@ -1,5 +1,4 @@ """Types and functions for accessing liquid class definitions.""" -import json from .. import load_shared_data from .liquid_class_definition import LiquidClassSchemaV1 @@ -18,10 +17,8 @@ def load_definition(name: str, version: int = DEFAULT_VERSION) -> LiquidClassSch Note: this is an expensive operation and should be called sparingly. """ try: - return LiquidClassSchemaV1.parse_obj( - json.loads( - load_shared_data(f"liquid-class/definitions/{version}/{name}.json") - ) + return LiquidClassSchemaV1.model_validate_json( + load_shared_data(f"liquid-class/definitions/{version}/{name}.json") ) except FileNotFoundError: raise LiquidClassDefinitionDoesNotExist( diff --git a/shared-data/python/opentrons_shared_data/pipette/load_data.py b/shared-data/python/opentrons_shared_data/pipette/load_data.py index 40027d54394..66ba690356a 100644 --- a/shared-data/python/opentrons_shared_data/pipette/load_data.py +++ b/shared-data/python/opentrons_shared_data/pipette/load_data.py @@ -155,7 +155,7 @@ def load_liquid_model( ) -> Dict[str, PipetteLiquidPropertiesDefinition]: liquid_dict = _liquid(channels, model, version) return { - k: PipetteLiquidPropertiesDefinition.parse_obj(v) + k: PipetteLiquidPropertiesDefinition.model_validate(v) for k, v in liquid_dict.items() } @@ -213,7 +213,7 @@ def update_pipette_configuration( Given an input of v1 mutable configs, look up the equivalent keyed value of that configuration.""" quirks_list = [] - dict_of_base_model = base_configurations.dict(by_alias=True) + dict_of_base_model = base_configurations.model_dump(by_alias=True) for c, v in v1_configuration_changes.items(): lookup_key = _change_to_camel_case(c) @@ -245,7 +245,7 @@ def update_pipette_configuration( k.name: v for k, v in dict_of_base_model["plungerPositionsConfigurations"].items() } - return PipetteConfigurations.parse_obj(dict_of_base_model) + return PipetteConfigurations.model_validate(dict_of_base_model) def load_definition( @@ -266,7 +266,7 @@ def load_definition( generation = PipetteGenerationType(physical_dict["displayCategory"]) mount_configs = MOUNT_CONFIG_LOOKUP_TABLE[generation][channels] - return PipetteConfigurations.parse_obj( + return PipetteConfigurations.model_validate( { **geometry_dict, **physical_dict, @@ -289,4 +289,4 @@ def load_valid_nozzle_maps( raise KeyError("Pipette version not found.") physical_dict = _physical(channels, model, version) - return ValidNozzleMaps.parse_obj(physical_dict["validNozzleMaps"]) + return ValidNozzleMaps.model_validate(physical_dict["validNozzleMaps"]) diff --git a/shared-data/python/opentrons_shared_data/pipette/mutable_configurations.py b/shared-data/python/opentrons_shared_data/pipette/mutable_configurations.py index 7e1beb5dd35..23943ef9082 100644 --- a/shared-data/python/opentrons_shared_data/pipette/mutable_configurations.py +++ b/shared-data/python/opentrons_shared_data/pipette/mutable_configurations.py @@ -80,7 +80,7 @@ def _migrate_to_v2_configurations( Given an input of v1 mutable configs, look up the equivalent keyed value of that configuration.""" quirks_list = [] - dict_of_base_model = base_configurations.dict(by_alias=True) + dict_of_base_model = base_configurations.model_dump(by_alias=True) for c, v in v1_mutable_configs.items(): if isinstance(v, str): # ignore the saved model @@ -112,7 +112,7 @@ def _migrate_to_v2_configurations( k.name: v for k, v in dict_of_base_model["plungerPositionsConfigurations"].items() } - return PipetteConfigurations.parse_obj(dict_of_base_model) + return PipetteConfigurations.model_validate(dict_of_base_model) def _load_available_overrides( @@ -239,7 +239,7 @@ def _load_full_mutable_configs( pipette_model.pipette_channels, pipette_model.pipette_version, ) - base_configs_dict = base_configs.dict(by_alias=True) + base_configs_dict = base_configs.model_dump(by_alias=True) full_mutable_configs = _list_all_mutable_configs(overrides, base_configs_dict) if not full_mutable_configs.get("name"): @@ -432,7 +432,7 @@ def save_overrides( pipette_model.pipette_channels, pipette_model.pipette_version, ) - base_configs_dict = base_configs.dict(by_alias=True) + base_configs_dict = base_configs.model_dump(by_alias=True) try: existing_overrides = _load_available_overrides( pipette_serial_number, pipette_override_path diff --git a/shared-data/python/opentrons_shared_data/pipette/scripts/build_json_script.py b/shared-data/python/opentrons_shared_data/pipette/scripts/build_json_script.py index 510d0ae5251..e3631d04669 100644 --- a/shared-data/python/opentrons_shared_data/pipette/scripts/build_json_script.py +++ b/shared-data/python/opentrons_shared_data/pipette/scripts/build_json_script.py @@ -138,7 +138,7 @@ def _build_partial_tip_configurations(channels: int) -> PartialTipDefinition: def build_geometry_model_v2( input_dictionary: Dict[str, Any] ) -> PipetteGeometryDefinition: - return PipetteGeometryDefinition.parse_obj(input_dictionary) + return PipetteGeometryDefinition.model_validate(input_dictionary) def build_liquid_model_v2( @@ -147,11 +147,11 @@ def build_liquid_model_v2( ) -> PipetteLiquidPropertiesDefinition: if input_dictionary: if input_dictionary.get("partialTipConfigurations"): - return PipetteLiquidPropertiesDefinition.parse_obj( + return PipetteLiquidPropertiesDefinition.model_validate( {**input_dictionary, "supportedTips": supported_tip_configurations} ) else: - return PipetteLiquidPropertiesDefinition.parse_obj( + return PipetteLiquidPropertiesDefinition.model_validate( { **input_dictionary, "supportedTips": supported_tip_configurations, @@ -163,7 +163,7 @@ def build_liquid_model_v2( "please input the load names of default tipracks separated by commas\n" ) list_default_tipracks = default_tipracks.split(",") - return PipetteLiquidPropertiesDefinition.parse_obj( + return PipetteLiquidPropertiesDefinition.model_validate( { "supportedTips": supported_tip_configurations, "maxVolume": max_volume, @@ -181,7 +181,7 @@ def build_physical_model_v2( sensors=input_dictionary.pop("availableSensors", []) ) back_compat_names = input_dictionary.pop("backCompatNames", []) - return PipettePhysicalPropertiesDefinition.parse_obj( + return PipettePhysicalPropertiesDefinition.model_validate( { **input_dictionary, "availableSensors": available_sensors, @@ -213,7 +213,7 @@ def build_physical_model_v2( back_compat_names = [i.strip() for i in back_compat_names_str.split(",")] else: back_compat_names = [] - return PipettePhysicalPropertiesDefinition.parse_obj( + return PipettePhysicalPropertiesDefinition.model_validate( { "displayName": display_name, "model": pipette_type, @@ -235,7 +235,7 @@ def build_physical_model_v2( def build_supported_tips(input_dictionary: Dict[str, Any]) -> SupportedTipsDefinition: - return SupportedTipsDefinition.parse_obj(input_dictionary) + return SupportedTipsDefinition.model_validate(input_dictionary) def save_to_file( @@ -332,7 +332,7 @@ def build_new_pipette_model_v2( top_level_pipette_model["liquid"], pipette_functions_dict, ) - liquid_model_dict = liquid_model.dict(by_alias=True) + liquid_model_dict = liquid_model.model_dump(by_alias=True) liquid_model_dict["supportedTips"] = { k.name: v for k, v in liquid_model_dict["supportedTips"].items() } diff --git a/shared-data/python/opentrons_shared_data/pipette/scripts/update_configuration_files.py b/shared-data/python/opentrons_shared_data/pipette/scripts/update_configuration_files.py index 740504501b0..2c8a13a3e17 100644 --- a/shared-data/python/opentrons_shared_data/pipette/scripts/update_configuration_files.py +++ b/shared-data/python/opentrons_shared_data/pipette/scripts/update_configuration_files.py @@ -218,7 +218,7 @@ def load_and_update_file_from_config( geometry["nozzleOffset"] = value_to_update else: geometry = update(geometry, camel_list_to_update, value_to_update) - PipetteGeometryDefinition.parse_obj(geometry) + PipetteGeometryDefinition.model_validate(geometry) filepath = ( ROOT @@ -240,7 +240,7 @@ def load_and_update_file_from_config( physical = update(physical, camel_list_to_update, value_to_update) - PipettePhysicalPropertiesDefinition.parse_obj(physical) + PipettePhysicalPropertiesDefinition.model_validate(physical) filepath = ( ROOT / "general" @@ -274,7 +274,7 @@ def load_and_update_file_from_config( liquid[c.name.lower()], camel_list_to_update, value_to_update ) - PipetteLiquidPropertiesDefinition.parse_obj(liquid) + PipetteLiquidPropertiesDefinition.model_validate(liquid) filepath = ( ROOT / "liquid" @@ -292,7 +292,7 @@ def load_and_update_file_from_config( liquid = update( liquid[lc.name.lower()], camel_list_to_update, value_to_update ) - PipetteLiquidPropertiesDefinition.parse_obj(liquid) + PipetteLiquidPropertiesDefinition.model_validate(liquid) filepath = ( ROOT diff --git a/shared-data/python/pytest.ini b/shared-data/python/pytest.ini index e552559af25..588979766ed 100644 --- a/shared-data/python/pytest.ini +++ b/shared-data/python/pytest.ini @@ -1,2 +1,8 @@ [pytest] addopts = --color=yes --strict-markers + +filterwarnings = + # Pydantic's shims for its legacy v1 methods (e.g. `BaseModel.construct()`) + # are not type-checked properly. Forbid them, so we're forced to use their newer + # v2 replacements which are type-checked (e.g. ``BaseModel.model_construct()`) + error::pydantic.PydanticDeprecatedSince20 diff --git a/shared-data/python/tests/gripper/test_definition.py b/shared-data/python/tests/gripper/test_definition.py index 6cbfbc77fd5..3128531b95f 100644 --- a/shared-data/python/tests/gripper/test_definition.py +++ b/shared-data/python/tests/gripper/test_definition.py @@ -17,14 +17,14 @@ def test_gripper_definition() -> None: def test_gripper_definition_type() -> None: - assert GripperDefinition.parse_obj(GRIPPER_DEF) + assert GripperDefinition.model_validate(GRIPPER_DEF) # missing key del GRIPPER_DEF["gripForceProfile"] with pytest.raises(ValidationError): - assert GripperDefinition.parse_obj(GRIPPER_DEF) + assert GripperDefinition.model_validate(GRIPPER_DEF) # no missing key but with incorrect value GRIPPER_DEF["geometry"]["gripForceProfile"] = {"min": 1.0, "max": "0.0"} with pytest.raises(ValidationError): - assert GripperDefinition.parse_obj(GRIPPER_DEF) + assert GripperDefinition.model_validate(GRIPPER_DEF) diff --git a/shared-data/python/tests/labware/test_validations.py b/shared-data/python/tests/labware/test_validations.py index 39052e5d150..b4d06a40e1d 100644 --- a/shared-data/python/tests/labware/test_validations.py +++ b/shared-data/python/tests/labware/test_validations.py @@ -11,11 +11,11 @@ def test_loadname_regex_applied() -> None: defdict = load_definition(*get_ot_defs()[0]) defdict["parameters"]["loadName"] = "ALSJHDAKJLA" with pytest.raises(ValidationError): - LabwareDefinition.parse_obj(defdict) + LabwareDefinition.model_validate(defdict) def test_namespace_regex_applied() -> None: defdict = load_definition(*get_ot_defs()[0]) defdict["namespace"] = "ALSJHDAKJLA" with pytest.raises(ValidationError): - LabwareDefinition.parse_obj(defdict) + LabwareDefinition.model_validate(defdict) diff --git a/shared-data/python/tests/liquid_classes/test_load.py b/shared-data/python/tests/liquid_classes/test_load.py index c8bf7b25244..d0d96fd00fe 100644 --- a/shared-data/python/tests/liquid_classes/test_load.py +++ b/shared-data/python/tests/liquid_classes/test_load.py @@ -14,9 +14,9 @@ def test_load_liquid_class_schema_v1() -> None: fixture_data = load_shared_data("liquid-class/definitions/1/water.json") - liquid_class_model = LiquidClassSchemaV1.parse_raw(fixture_data) + liquid_class_model = LiquidClassSchemaV1.model_validate_json(fixture_data) liquid_class_def_from_model = json.loads( - liquid_class_model.json(exclude_unset=True) + liquid_class_model.model_dump_json(exclude_unset=True) ) expected_liquid_class_def = json.loads(fixture_data) assert liquid_class_def_from_model == expected_liquid_class_def diff --git a/shared-data/python/tests/pipette/test_load_data.py b/shared-data/python/tests/pipette/test_load_data.py index 012aed7baca..386af05de5b 100644 --- a/shared-data/python/tests/pipette/test_load_data.py +++ b/shared-data/python/tests/pipette/test_load_data.py @@ -90,7 +90,7 @@ def test_update_pipette_configuration( base_configurations, v1_configuration_changes, liquid_class ) - updated_configurations_dict = updated_configurations.dict() + updated_configurations_dict = updated_configurations.model_dump() for k, v in v1_configuration_changes.items(): if k == "tip_length": for i in updated_configurations_dict["liquid_properties"][liquid_class][ diff --git a/shared-data/python/tests/pipette/test_mutable_configurations.py b/shared-data/python/tests/pipette/test_mutable_configurations.py index 38920c473e8..d7a6c8ed1db 100644 --- a/shared-data/python/tests/pipette/test_mutable_configurations.py +++ b/shared-data/python/tests/pipette/test_mutable_configurations.py @@ -272,7 +272,7 @@ def test_load_with_overrides( ) if serial_number == TEST_SERIAL_NUMBER: - dict_loaded_configs = loaded_base_configurations.dict(by_alias=True) + dict_loaded_configs = loaded_base_configurations.model_dump(by_alias=True) for map_key in dict_loaded_configs["pickUpTipConfigurations"]["pressFit"][ "configurationsByNozzleMap" ]: @@ -283,7 +283,7 @@ def test_load_with_overrides( "configurationsByNozzleMap" ][map_key][tip_key]["speed"] = 5.0 - updated_configurations_dict = updated_configurations.dict(by_alias=True) + updated_configurations_dict = updated_configurations.model_dump(by_alias=True) assert set(dict_loaded_configs.pop("quirks")) == set( updated_configurations_dict.pop("quirks") ) diff --git a/shared-data/python/tests/pipette/test_pipette_definition.py b/shared-data/python/tests/pipette/test_pipette_definition.py index 9fd134ec059..2d5e2aec87a 100644 --- a/shared-data/python/tests/pipette/test_pipette_definition.py +++ b/shared-data/python/tests/pipette/test_pipette_definition.py @@ -19,10 +19,10 @@ def get_liquid_definition_for( liquid_class: LiquidClasses, ) -> PipetteLiquidPropertiesDefinition: if liquid_class == LiquidClasses.lowVolumeDefault: - return PipetteLiquidPropertiesDefinition.parse_obj( + return PipetteLiquidPropertiesDefinition.model_validate( { "supportedTips": { - "t50": SupportedTipsDefinition.parse_obj( + "t50": SupportedTipsDefinition.model_validate( { "defaultAspirateFlowRate": { "default": 5, @@ -52,10 +52,10 @@ def get_liquid_definition_for( } ) else: - return PipetteLiquidPropertiesDefinition.parse_obj( + return PipetteLiquidPropertiesDefinition.model_validate( { "supportedTips": { - "t50": SupportedTipsDefinition.parse_obj( + "t50": SupportedTipsDefinition.model_validate( { "defaultAspirateFlowRate": { "default": 5, diff --git a/shared-data/python/tests/pipette/test_validate_schema.py b/shared-data/python/tests/pipette/test_validate_schema.py index a29d59f4eda..a002c38cfb2 100644 --- a/shared-data/python/tests/pipette/test_validate_schema.py +++ b/shared-data/python/tests/pipette/test_validate_schema.py @@ -163,7 +163,7 @@ def test_serializer() -> None: assert loaded_model.display_category == types.PipetteGenerationType.FLEX assert loaded_model.channels == types.PipetteChannelType.NINETY_SIX_CHANNEL - model_dict = loaded_model.dict() + model_dict = loaded_model.model_dump() # each field should be the value of the enum class assert ( isinstance(model_dict["pipette_type"], str) diff --git a/shared-data/python/tests/protocol/test_protocol_schema_v6.py b/shared-data/python/tests/protocol/test_protocol_schema_v6.py index 051219f5408..67ccfdc6a9f 100644 --- a/shared-data/python/tests/protocol/test_protocol_schema_v6.py +++ b/shared-data/python/tests/protocol/test_protocol_schema_v6.py @@ -20,7 +20,7 @@ def test_v6_types(defpath: Path) -> None: def_data = load_shared_data(defpath) def_model = protocol_schema_v6.ProtocolSchemaV6.model_validate_json(def_data) - def_dict_from_model = def_model.dict( + def_dict_from_model = def_model.model_dump( exclude_unset=True, # 'schemaVersion' in python is '$schemaVersion' in JSON by_alias=True, diff --git a/shared-data/python/tests/protocol/test_protocol_schema_v7.py b/shared-data/python/tests/protocol/test_protocol_schema_v7.py index 4045ccf35d7..0e5609148c3 100644 --- a/shared-data/python/tests/protocol/test_protocol_schema_v7.py +++ b/shared-data/python/tests/protocol/test_protocol_schema_v7.py @@ -13,7 +13,7 @@ def test_v7_types(defpath: Path) -> None: def_data = load_shared_data(defpath) def_model = protocol_schema_v7.ProtocolSchemaV7.model_validate_json(def_data) - def_dict_from_model = def_model.dict( + def_dict_from_model = def_model.model_dump( exclude_unset=True, # 'schemaVersion' in python is '$schemaVersion' in JSON by_alias=True, diff --git a/shared-data/python/tests/protocol/test_protocol_schema_v8.py b/shared-data/python/tests/protocol/test_protocol_schema_v8.py index 48c9b6d242c..d2488904235 100644 --- a/shared-data/python/tests/protocol/test_protocol_schema_v8.py +++ b/shared-data/python/tests/protocol/test_protocol_schema_v8.py @@ -12,6 +12,6 @@ def test_v8_types(defpath: Path) -> None: def_data = load_shared_data(defpath) def_model = protocol_schema_v8.ProtocolSchemaV8.model_validate_json(def_data) - def_dict_from_model = def_model.dict(by_alias=True, exclude_unset=True) + def_dict_from_model = def_model.model_dump(by_alias=True, exclude_unset=True) expected_def_dict = json.loads(def_data) assert def_dict_from_model == expected_def_dict diff --git a/system-server/pytest.ini b/system-server/pytest.ini index 3c283534412..61e37dad0cb 100644 --- a/system-server/pytest.ini +++ b/system-server/pytest.ini @@ -2,3 +2,9 @@ addopts = --cov=system_server --cov-report term-missing:skip-covered --cov-report xml:coverage.xml --color=yes --strict-markers asyncio_mode = auto tavern-global-cfg = tests/integration/common.yaml + +filterwarnings = + # Pydantic's shims for its legacy v1 methods (e.g. `BaseModel.construct()`) + # are not type-checked properly. Forbid them, so we're forced to use their newer + # v2 replacements which are type-checked (e.g. ``BaseModel.model_construct()`) + error::pydantic.PydanticDeprecatedSince20 diff --git a/system-server/system_server/settings/settings.py b/system-server/system_server/settings/settings.py index d7b90658e8f..5256a1e09e1 100644 --- a/system-server/system_server/settings/settings.py +++ b/system-server/system_server/settings/settings.py @@ -77,7 +77,7 @@ def save_settings(settings: SystemServerSettings) -> bool: env_path = env_path or f"{settings.persistence_directory}/system.env" prefix = settings.model_config.get("env_prefix") try: - for key, val in settings.dict().items(): + for key, val in settings.model_dump().items(): name = f"{prefix}{key}" value = str(val) if val is not None else "" set_key(env_path, name, value)