diff --git a/.github/workflows/python_actions.yml b/.github/workflows/python_actions.yml index aa8a2e4d32..ceac632b58 100644 --- a/.github/workflows/python_actions.yml +++ b/.github/workflows/python_actions.yml @@ -27,5 +27,6 @@ jobs: coverage-package: spinn_front_end_common flake8-packages: spinn_front_end_common unittests fec_integration_tests pylint-packages: spinn_front_end_common - mypy-packages: spinn_front_end_common unittests fec_integration_tests + mypy-packages: unittests fec_integration_tests + mypy-full_packages: spinn_front_end_common secrets: inherit diff --git a/mypy.bash b/mypy.bash index fc1bd31147..c82937aed3 100755 --- a/mypy.bash +++ b/mypy.bash @@ -25,4 +25,4 @@ man="../SpiNNMan/spinnman" pacman="../PACMAN/pacman" spalloc="../spalloc/spalloc_client" -mypy --python-version 3.8 $$utils $machine $man $pacman $spalloc spinn_front_end_common +mypy --python-version 3.8 $utils $machine $man $pacman $spalloc spinn_front_end_common unittests fec_integration_tests diff --git a/mypyd.bash b/mypyd.bash new file mode 100755 index 0000000000..8482308881 --- /dev/null +++ b/mypyd.bash @@ -0,0 +1,29 @@ +#!/bin/bash + +# Copyright (c) 2024 The University of Manchester +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This bash assumes that other repositories are installed in paralled + +# requires the latest mypy +# pip install --upgrade mypy + +utils="../SpiNNUtils/spinn_utilities" +machine="../SpiNNMachine/spinn_machine" +man="../SpiNNMan/spinnman" +pacman="../PACMAN/pacman" +spalloc="../spalloc/spalloc_client" + +mypy --python-version 3.8 --disallow-untyped-defs $utils $machine $man $pacman $spalloc spinn_front_end_common + diff --git a/spinn_front_end_common/abstract_models/abstract_generates_data_specification.py b/spinn_front_end_common/abstract_models/abstract_generates_data_specification.py index 7795b8733d..e3da2f85f3 100644 --- a/spinn_front_end_common/abstract_models/abstract_generates_data_specification.py +++ b/spinn_front_end_common/abstract_models/abstract_generates_data_specification.py @@ -14,8 +14,12 @@ from __future__ import annotations from typing import TYPE_CHECKING from spinn_utilities.abstract_base import AbstractBase, abstractmethod +from spinn_utilities.overrides import overrides from spinn_utilities.require_subclass import require_subclass +from pacman.model.graphs.machine import MachineVertex from pacman.model.placements import Placement +from pacman.model.resources import AbstractSDRAM + from .abstract_has_associated_binary import AbstractHasAssociatedBinary if TYPE_CHECKING: from spinn_front_end_common.interface.ds import DataSpecificationGenerator @@ -30,8 +34,8 @@ class AbstractGeneratesDataSpecification(object, metaclass=AbstractBase): __slots__ = () @abstractmethod - def generate_data_specification( - self, spec: DataSpecificationGenerator, placement: Placement): + def generate_data_specification(self, spec: DataSpecificationGenerator, + placement: Placement) -> None: """ Generate a data specification. @@ -41,3 +45,10 @@ def generate_data_specification( The placement the vertex is located at """ raise NotImplementedError + + @property + @overrides(MachineVertex.sdram_required) + @abstractmethod + def sdram_required(self) -> AbstractSDRAM: + # pylint: disable=missing-function-docstring + raise NotImplementedError diff --git a/spinn_front_end_common/abstract_models/abstract_rewrites_data_specification.py b/spinn_front_end_common/abstract_models/abstract_rewrites_data_specification.py index 985028a1e9..e4ff8b3f86 100644 --- a/spinn_front_end_common/abstract_models/abstract_rewrites_data_specification.py +++ b/spinn_front_end_common/abstract_models/abstract_rewrites_data_specification.py @@ -32,8 +32,8 @@ class AbstractRewritesDataSpecification(object, metaclass=AbstractBase): __slots__ = () @abstractmethod - def regenerate_data_specification( - self, spec: DataSpecificationReloader, placement: Placement): + def regenerate_data_specification(self, spec: DataSpecificationReloader, + placement: Placement) -> None: """ Regenerate the data specification, only generating regions that have changed and need to be reloaded. @@ -55,7 +55,7 @@ def reload_required(self) -> bool: raise NotImplementedError @abstractmethod - def set_reload_required(self, new_value: bool): + def set_reload_required(self, new_value: bool) -> None: """ Indicate that the regions have been reloaded. diff --git a/spinn_front_end_common/abstract_models/impl/machine_allocation_controller.py b/spinn_front_end_common/abstract_models/impl/machine_allocation_controller.py index 057a28937c..c4453c6ed1 100644 --- a/spinn_front_end_common/abstract_models/impl/machine_allocation_controller.py +++ b/spinn_front_end_common/abstract_models/impl/machine_allocation_controller.py @@ -51,7 +51,7 @@ def __init__(self, thread_name: str, hostname: Optional[str] = None, thread.start() @abstractmethod - def extend_allocation(self, new_total_run_time: float): + def extend_allocation(self, new_total_run_time: float) -> None: """ Extend the allocation of the machine from the original run time. @@ -191,7 +191,7 @@ def proxying(self) -> bool: """ return False - def make_report(self, filename: str): + def make_report(self, filename: str) -> None: """ Asks the controller to make a report of details of allocations. By default, this does nothing. diff --git a/spinn_front_end_common/abstract_models/impl/machine_data_specable_vertex.py b/spinn_front_end_common/abstract_models/impl/machine_data_specable_vertex.py index 6494ac83b1..5851abdcae 100644 --- a/spinn_front_end_common/abstract_models/impl/machine_data_specable_vertex.py +++ b/spinn_front_end_common/abstract_models/impl/machine_data_specable_vertex.py @@ -32,8 +32,8 @@ class MachineDataSpecableVertex( __slots__ = () @overrides(AbstractGeneratesDataSpecification.generate_data_specification) - def generate_data_specification( - self, spec: DataSpecificationGenerator, placement: Placement): + def generate_data_specification(self, spec: DataSpecificationGenerator, + placement: Placement) -> None: tags = FecDataView.get_tags() iptags = tags.get_ip_tags_for_vertex(placement.vertex) reverse_iptags = tags.get_reverse_ip_tags_for_vertex(placement.vertex) @@ -44,7 +44,7 @@ def generate_data_specification( def generate_machine_data_specification( self, spec: DataSpecificationGenerator, placement: Placement, iptags: Optional[Iterable[IPTag]], - reverse_iptags: Optional[Iterable[ReverseIPTag]]): + reverse_iptags: Optional[Iterable[ReverseIPTag]]) -> None: """ :param ~data_specification.DataSpecificationGenerator spec: The data specification to write into. diff --git a/spinn_front_end_common/data/fec_data_view.py b/spinn_front_end_common/data/fec_data_view.py index 1fdfef5255..5a7a92994f 100644 --- a/spinn_front_end_common/data/fec_data_view.py +++ b/spinn_front_end_common/data/fec_data_view.py @@ -870,7 +870,8 @@ def get_system_provenance_dir_path(cls) -> str: cls.get_provenance_dir_path(), "system_provenance_data") @classmethod - def _child_folder(cls, parent, child_name, must_create=False): + def _child_folder(cls, parent: str, child_name: str, + must_create: bool = False) -> str: """ :param str parent: :param str child_name: @@ -979,7 +980,7 @@ def get_live_packet_recorder_params(cls) -> Dict[ def add_live_packet_gatherer_parameters( cls, live_packet_gatherer_params: LivePacketGatherParameters, vertex_to_record_from: ApplicationVertex, - partition_ids: Iterable[str]): + partition_ids: Iterable[str]) -> None: """ Adds parameters for a new live packet gatherer (LPG) if needed, or adds to the tracker for parameters. @@ -1252,7 +1253,7 @@ def get_n_database_socket_addresses(cls) -> int: @classmethod def add_database_socket_address( - cls, database_socket_address: SocketAddress): + cls, database_socket_address: SocketAddress) -> None: """ Adds a socket address to the list of known addresses. @@ -1267,7 +1268,8 @@ def add_database_socket_address( @classmethod def add_database_socket_addresses( - cls, database_socket_addresses: Optional[Iterable[SocketAddress]]): + cls, database_socket_addresses: Optional[Iterable[SocketAddress]] + ) -> None: """ Adds all socket addresses to the list of known addresses. @@ -1297,7 +1299,7 @@ def get_notification_protocol(cls) -> NotificationProtocol: @classmethod def add_live_output_vertex( - cls, vertex: ApplicationVertex, partition_id: str): + cls, vertex: ApplicationVertex, partition_id: str) -> None: """ Add a vertex that is to be output live, and so wants its atom IDs recorded in the database. @@ -1324,9 +1326,9 @@ def iterate_live_output_vertices( return iter(cls.__fec_data._live_output_vertices) @classmethod - def get_next_ds_references(cls, number): + def get_next_ds_references(cls, number: int) -> List[int]: """ - Get a a list of unique data specification references + Get a list of unique data specification references These will be unique since the last hard reset @@ -1339,7 +1341,7 @@ def get_next_ds_references(cls, number): return list(references) @classmethod - def add_live_output_device(cls, device: LiveOutputDevice): + def add_live_output_device(cls, device: LiveOutputDevice) -> None: """ Add a live output device. diff --git a/spinn_front_end_common/data/fec_data_writer.py b/spinn_front_end_common/data/fec_data_writer.py index 8f822be6e2..93578950ce 100644 --- a/spinn_front_end_common/data/fec_data_writer.py +++ b/spinn_front_end_common/data/fec_data_writer.py @@ -110,7 +110,7 @@ def _soft_reset(self) -> None: def __create_run_dir_path(self) -> None: self.set_run_dir_path(self._child_folder( - self.__fec_data._timestamp_dir_path, + self.get_timestamp_dir_path(), f"run_{self.__fec_data._run_number}")) def __create_reports_directory(self) -> None: @@ -150,7 +150,7 @@ def write_finished_file(self) -> None: f.writelines(self._get_timestamp()) def set_allocation_controller(self, allocation_controller: Optional[ - MachineAllocationController]): + MachineAllocationController]) -> None: """ Sets the allocation controller variable. @@ -170,7 +170,7 @@ def set_allocation_controller(self, allocation_controller: Optional[ "Expecting only the SpallocJobController to be proxying") self.__fec_data._spalloc_job = allocation_controller.job - def set_buffer_manager(self, buffer_manager: BufferManager): + def set_buffer_manager(self, buffer_manager: BufferManager) -> None: """ Sets the Buffer manager variable. @@ -180,7 +180,8 @@ def set_buffer_manager(self, buffer_manager: BufferManager): raise TypeError("buffer_manager must be a BufferManager") self.__fec_data._buffer_manager = buffer_manager - def increment_current_run_timesteps(self, increment: Optional[int]): + def increment_current_run_timesteps( + self, increment: Optional[int]) -> None: """ Increment the current_run_timesteps and sets first_machine_time_step. @@ -224,7 +225,7 @@ def set_current_run_timesteps(self, current_run_timesteps: int) -> None: "Last run was longer than duration supported by recording") self.__fec_data._current_run_timesteps = current_run_timesteps - def set_max_run_time_steps(self, max_run_time_steps: int): + def set_max_run_time_steps(self, max_run_time_steps: int) -> None: """ Sets the max_run_time_steps value @@ -240,7 +241,7 @@ def set_max_run_time_steps(self, max_run_time_steps: int): def set_up_timings( self, simulation_time_step_us: Optional[int], time_scale_factor: Optional[float], - default_time_scale_factor: Optional[float] = None): + default_time_scale_factor: Optional[float] = None) -> None: """ Set up timings for the simulation. @@ -276,7 +277,7 @@ def set_up_timings( raise def _set_simulation_time_step( - self, simulation_time_step_us: Optional[int]): + self, simulation_time_step_us: Optional[int]) -> None: """ :param simulation_time_step_us: An explicitly specified time step for the simulation. If `None`, @@ -307,7 +308,7 @@ def _set_simulation_time_step( def _set_time_scale_factor( self, time_scale_factor: Optional[float], - default_time_scale_factor: Optional[float]): + default_time_scale_factor: Optional[float]) -> None: """ Set up time_scale_factor. @@ -374,7 +375,7 @@ def _set_hardware_timestep(self) -> None: def set_system_multicast_routing_data( self, data: Tuple[ - MulticastRoutingTables, Dict[XY, int], Dict[XY, int]]): + MulticastRoutingTables, Dict[XY, int], Dict[XY, int]]) -> None: """ Sets the system_multicast_routing_data. @@ -398,7 +399,7 @@ def set_system_multicast_routing_data( self.__fec_data._data_in_multicast_routing_tables = routing_tables self.__fec_data._system_multicast_router_timeout_keys = timeout_keys - def set_ipaddress(self, ip_address: str): + def set_ipaddress(self, ip_address: str) -> None: """ :param str ip_address: """ @@ -407,7 +408,7 @@ def set_ipaddress(self, ip_address: str): self.__fec_data._ipaddress = ip_address def set_fixed_routes( - self, fixed_routes: Dict[Tuple[int, int], RoutingEntry]): + self, fixed_routes: Dict[Tuple[int, int], RoutingEntry]) -> None: """ :param fixed_routes: :type fixed_routes: @@ -417,7 +418,7 @@ def set_fixed_routes( raise TypeError("fixed_routes must be a dict") self.__fec_data._fixed_routes = fixed_routes - def set_java_caller(self, java_caller: JavaCaller): + def set_java_caller(self, java_caller: JavaCaller) -> None: """ :param JavaCaller java_caller: """ @@ -432,7 +433,7 @@ def reset_sync_signal(self) -> None: self.__fec_data._next_sync_signal = Signal.SYNC0 def set_executable_types(self, executable_types: Dict[ - ExecutableType, CoreSubsets]): + ExecutableType, CoreSubsets]) -> None: """ :param executable_types: :type executable_types: dict( @@ -443,15 +444,8 @@ def set_executable_types(self, executable_types: Dict[ raise TypeError("executable_types must be a Dict") self.__fec_data._executable_types = executable_types - def set_live_packet_gatherer_parameters(self, params): - """ - testing method will not work outside of mock - """ - if not self._is_mocked(): - raise NotImplementedError("This call is only for testing") - self.__fec_data._live_packet_recorder_params = params - - def set_database_file_path(self, database_file_path: Optional[str]): + def set_database_file_path( + self, database_file_path: Optional[str]) -> None: """ Sets the database_file_path variable. Possibly to `None`. @@ -462,7 +456,8 @@ def set_database_file_path(self, database_file_path: Optional[str]): raise TypeError("database_file_path must be a str or None") self.__fec_data._database_file_path = database_file_path - def set_executable_targets(self, executable_targets: ExecutableTargets): + def set_executable_targets( + self, executable_targets: ExecutableTargets) -> None: """ Sets the executable_targets @@ -472,7 +467,7 @@ def set_executable_targets(self, executable_targets: ExecutableTargets): raise TypeError("executable_targets must be a ExecutableTargets") self.__fec_data._executable_targets = executable_targets - def set_ds_database_path(self, ds_database_path: str): + def set_ds_database_path(self, ds_database_path: str) -> None: """ Sets the Data Spec targets database. @@ -489,7 +484,7 @@ def __gatherer_map_error(self) -> TypeError: "DataSpeedUpPacketGatherMachineVertex)") def set_gatherer_map(self, gatherer_map: Dict[ - Chip, DataSpeedUpPacketGatherMachineVertex]): + Chip, DataSpeedUpPacketGatherMachineVertex]) -> None: """ Sets the map of Chip to Gatherer Vertices. @@ -516,7 +511,7 @@ def __monitor_map_error(self) -> TypeError: "ExtraMonitorSupportMachineVertex)") def set_monitor_map(self, monitor_map: Dict[ - Chip, ExtraMonitorSupportMachineVertex]): + Chip, ExtraMonitorSupportMachineVertex]) -> None: """ Sets the map of Chip to Monitor Vertices. @@ -540,7 +535,7 @@ def set_monitor_map(self, monitor_map: Dict[ self.__fec_data._monitor_map = monitor_map def set_notification_protocol( - self, notification_protocol: NotificationProtocol): + self, notification_protocol: NotificationProtocol) -> None: """ Sets the notification_protocol. @@ -568,7 +563,7 @@ def add_vertex(cls, vertex: ApplicationVertex) -> None: # Avoid the safety check in FecDataView PacmanDataWriter.add_vertex(vertex) - def set_n_run_steps(self, n_run_steps: int): + def set_n_run_steps(self, n_run_steps: int) -> None: """ Sets the number of expected run-steps diff --git a/spinn_front_end_common/interface/abstract_spinnaker_base.py b/spinn_front_end_common/interface/abstract_spinnaker_base.py index da3f9b4056..d23085118c 100644 --- a/spinn_front_end_common/interface/abstract_spinnaker_base.py +++ b/spinn_front_end_common/interface/abstract_spinnaker_base.py @@ -26,8 +26,9 @@ import types from threading import Condition from typing import ( - Dict, Iterable, Optional, Sequence, Tuple, Type, + Any, Dict, Iterable, Optional, Sequence, Tuple, Type, TypeVar, Union, cast, final) +from types import FrameType import ebrains_drive # type: ignore[import] from numpy import __version__ as numpy_version @@ -245,7 +246,7 @@ def _reset_remove_data(self) -> None: with FecTimer("Cleanup reports folder based on cfg", TimerWork.REPORT): self.__reset_remove_data() - def __reset_remove_data(self): + def __reset_remove_data(self) -> None: run_dir = self._data_writer.get_run_dir_path() if not get_config_bool("Reports", "keep_json_files"): @@ -276,7 +277,7 @@ def __reset_remove_data(self): except OSError: pass - def _stop_remove_data(self): + def _stop_remove_data(self) -> None: with FecTimer("Cleanup reports folder based on cfg", TimerWork.REPORT): self.__reset_remove_data() @@ -298,7 +299,8 @@ def _setup_java_caller(self) -> None: if get_config_bool("Java", "use_java"): self._data_writer.set_java_caller(JavaCaller()) - def __signal_handler(self, _signal, _frame) -> None: + def __signal_handler( + self, _signal: int, _frame: Optional[FrameType]) -> None: """ Handles closing down of script via keyboard interrupt @@ -398,7 +400,7 @@ def __get_collab_id_from_folder( def exception_handler( self, exc_type: Type[BaseException], value: BaseException, - traceback_obj: Optional[types.TracebackType]): + traceback_obj: Optional[types.TracebackType]) -> None: """ Handler of exceptions. @@ -427,7 +429,7 @@ def _should_run(self) -> bool: "Therefore the run call will exit immediately.") return False - def run_until_complete(self, n_steps: Optional[int] = None): + def run_until_complete(self, n_steps: Optional[int] = None) -> None: """ Run a simulation until it completes. @@ -441,7 +443,7 @@ def run_until_complete(self, n_steps: Optional[int] = None): self._run(n_steps, sync_time=0.0) FecTimer.end_category(TimerCategory.RUN_OTHER) - def run(self, run_time: Optional[float], sync_time: float = 0): + def run(self, run_time: Optional[float], sync_time: float = 0) -> None: """ Run a simulation for a fixed amount of time. @@ -511,7 +513,7 @@ def _calc_run_time(self, run_time: Optional[float]) -> Union[ f"{self._data_writer.get_hardware_time_step_us()} us") return n_machine_time_steps, total_run_time - def _run(self, run_time: Optional[float], sync_time: float): + def _run(self, run_time: Optional[float], sync_time: float) -> None: self._data_writer.start_run() try: @@ -535,7 +537,7 @@ def __is_main_thread() -> bool: """ return threading.get_ident() == threading.main_thread().ident - def __run(self, run_time: Optional[float], sync_time: float): + def __run(self, run_time: Optional[float], sync_time: float) -> None: """ The main internal run function. @@ -678,7 +680,8 @@ def __run(self, run_time: Optional[float], sync_time: float): sys.excepthook = self.exception_handler @final - def _add_commands_to_command_sender(self, system_placements: Placements): + def _add_commands_to_command_sender( + self, system_placements: Placements) -> None: """ Runs, times and logs the VirtualMachineGenerator if required. @@ -850,7 +853,7 @@ def _execute_machine_generator(self, allocator_data: Optional[Tuple[ self._data_writer.set_transceiver(transceiver) self._data_writer.set_machine(machine) - def _get_known_machine(self, total_run_time: float = 0.0): + def _get_known_machine(self, total_run_time: float = 0.0) -> None: """ The Python machine description object. @@ -921,7 +924,8 @@ def _report_network_specification(self) -> None: return network_specification() - def _execute_split_lpg_vertices(self, system_placements: Placements): + def _execute_split_lpg_vertices( + self, system_placements: Placements) -> None: """ Runs, times and logs the SplitLPGVertices if required. """ @@ -973,7 +977,7 @@ def _execute_splitter_partitioner(self) -> None: self._data_writer.set_n_chips_in_graph(splitter_partitioner()) def _execute_insert_chip_power_monitors( - self, system_placements: Placements): + self, system_placements: Placements) -> None: """ Run, time and log the InsertChipPowerMonitorsToGraphs if required. @@ -985,7 +989,7 @@ def _execute_insert_chip_power_monitors( @final def _execute_insert_extra_monitor_vertices( - self, system_placements: Placements): + self, system_placements: Placements) -> None: """ Run, time and log the InsertExtraMonitorVerticesToGraphs if required. """ @@ -1028,7 +1032,8 @@ def get_number_of_available_cores_on_machine(self) -> int: cores -= ethernets * self._data_writer.get_ethernet_monitor_cores() return cores - def _execute_application_placer(self, system_placements: Placements): + def _execute_application_placer( + self, system_placements: Placements) -> None: """ Runs, times and logs the Application Placer. @@ -1041,7 +1046,7 @@ def _execute_application_placer(self, system_placements: Placements): self._data_writer.set_placements(place_application_graph( system_placements)) - def _do_placer(self, system_placements: Placements): + def _do_placer(self, system_placements: Placements) -> None: """ Runs, times and logs one of the placers. @@ -1170,7 +1175,8 @@ def _report_tag_allocations(self) -> None: @final def _execute_global_allocate( - self, extra_allocations: Iterable[Tuple[ApplicationVertex, str]]): + self, extra_allocations: Iterable[ + Tuple[ApplicationVertex, str]]) -> None: """ Runs, times and logs the Global Zoned Routing Info Allocator. @@ -1186,7 +1192,8 @@ def _execute_global_allocate( @final def _execute_flexible_allocate( - self, extra_allocations: Iterable[Tuple[ApplicationVertex, str]]): + self, extra_allocations: Iterable[ + Tuple[ApplicationVertex, str]]) -> None: """ Runs, times and logs the Zoned Routing Info Allocator. @@ -1601,7 +1608,7 @@ def _compressor_name(self) -> Tuple[str, bool]: pre_compress = "BitField" not in name return name, pre_compress - def _compression_skipable(self, tables) -> bool: + def _compression_skipable(self, tables: MulticastRoutingTables) -> bool: if get_config_bool( "Mapping", "router_table_compress_as_far_as_possible"): return False @@ -1609,7 +1616,7 @@ def _compression_skipable(self, tables) -> bool: return (tables.get_max_number_of_entries() <= machine.min_n_router_enteries) - def _execute_pre_compression(self, pre_compress: bool): + def _execute_pre_compression(self, pre_compress: bool) -> None: name = get_config_str_or_none("Mapping", "precompressor") if not pre_compress or name is None: # Declare the precompressed data to be the uncompressed data @@ -1687,7 +1694,7 @@ def _report_uncompressed_routing_table(self) -> None: return router_report_from_router_tables() - def _check_uncompressed_routing_table(self): + def _check_uncompressed_routing_table(self) -> None: """ Runs, times and logs the checking of uncompressed table """ @@ -2302,7 +2309,7 @@ def __recover_from_error(self, exception: Exception) -> None: self._print_iobuf(errors, warnings) @staticmethod - def _print_iobuf(errors: Iterable[str], warnings: Iterable[str]): + def _print_iobuf(errors: Iterable[str], warnings: Iterable[str]) -> None: """ :param list(str) errors: :param list(str) warnings: @@ -2455,7 +2462,7 @@ def continue_simulation(self) -> None: transceiver.send_signal(self._data_writer.get_app_id(), sync_signal) @staticmethod - def __reset_object(obj) -> None: + def __reset_object(obj: Any) -> None: # Reset an object if appropriate if isinstance(obj, AbstractCanReset): obj.reset_to_first_timestep() diff --git a/spinn_front_end_common/interface/buffer_management/buffer_manager.py b/spinn_front_end_common/interface/buffer_management/buffer_manager.py index fbea6335dd..00030e9733 100644 --- a/spinn_front_end_common/interface/buffer_management/buffer_manager.py +++ b/spinn_front_end_common/interface/buffer_management/buffer_manager.py @@ -1,3 +1,4 @@ + # Copyright (c) 2015 The University of Manchester # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -176,7 +177,7 @@ def _request_data( return extra_mon_data @staticmethod - def _verify_data(extra_mon_data: bytes, txrx_data: bytes): + def _verify_data(extra_mon_data: bytes, txrx_data: bytes) -> None: sm = difflib.SequenceMatcher(a=extra_mon_data, b=txrx_data) failed_index = -1 for (tag, i1, i2, j1, j2) in sm.get_opcodes(): @@ -236,7 +237,7 @@ def resume(self) -> None: """ def clear_recorded_data( - self, x: int, y: int, p: int, recording_region_id: int): + self, x: int, y: int, p: int, recording_region_id: int) -> None: """ Removes the recorded data stored in memory. @@ -286,7 +287,7 @@ def _create_message_to_send( def _send_initial_messages( self, vertex: AbstractSendsBuffersFromHost, region: int, - progress: ProgressBar): + progress: ProgressBar) -> None: """ Send the initial set of messages. @@ -379,7 +380,7 @@ def extract_data(self) -> None: self.__python_extract_no_monitors(recording_placements) def __python_extract_with_monitors( - self, recording_placements: List[Placement]): + self, recording_placements: List[Placement]) -> None: """ :param list(~pacman.model.placements.Placement) recording_placements: Where to get the data from. @@ -398,7 +399,7 @@ def __python_extract_with_monitors( self.__python_extract_no_monitors(recording_placements) def __python_extract_no_monitors( - self, recording_placements: List[Placement]): + self, recording_placements: List[Placement]) -> None: """ :param list(~pacman.model.placements.Placement) recording_placements: Where to get the data from. @@ -526,7 +527,7 @@ def _raise_error(self, placement: Placement, recording_region_id: int, f"{recording_region_id} but there is no data" ) from lookup_error - def _retreive_by_placement(self, placement: Placement): + def _retreive_by_placement(self, placement: Placement) -> None: """ Retrieve the data for a vertex; must be locked first. diff --git a/spinn_front_end_common/interface/buffer_management/buffer_models/abstract_sends_buffers_from_host.py b/spinn_front_end_common/interface/buffer_management/buffer_models/abstract_sends_buffers_from_host.py index b87e1634dc..c6ae80cab5 100644 --- a/spinn_front_end_common/interface/buffer_management/buffer_models/abstract_sends_buffers_from_host.py +++ b/spinn_front_end_common/interface/buffer_management/buffer_models/abstract_sends_buffers_from_host.py @@ -119,7 +119,7 @@ def is_empty(self, region: int) -> bool: raise NotImplementedError @abstractmethod - def rewind(self, region: int): + def rewind(self, region: int) -> None: """ Rewinds the internal buffer in preparation of re-sending the spikes. diff --git a/spinn_front_end_common/interface/buffer_management/buffer_models/sends_buffers_from_host_pre_buffered_impl.py b/spinn_front_end_common/interface/buffer_management/buffer_models/sends_buffers_from_host_pre_buffered_impl.py index 3313c49ec7..cde790c957 100644 --- a/spinn_front_end_common/interface/buffer_management/buffer_models/sends_buffers_from_host_pre_buffered_impl.py +++ b/spinn_front_end_common/interface/buffer_management/buffer_models/sends_buffers_from_host_pre_buffered_impl.py @@ -74,5 +74,5 @@ def is_empty(self, region: int) -> bool: return len(self.send_buffers[region].timestamps) == 0 @overrides(AbstractSendsBuffersFromHost.rewind) - def rewind(self, region: int): + def rewind(self, region: int) -> None: self.send_buffers[region].rewind() diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py index 27726c6fd9..a577bfef8d 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py @@ -23,7 +23,7 @@ PROVENANCE_CORE_KEY = "Power_Monitor_Core" -def _timestamp(): +def _timestamp() -> int: return int(time.time() * _SECONDS_TO_MICRO_SECONDS_CONVERSION) @@ -60,7 +60,7 @@ def clear_recording_region( :return: True if any region was changed :rtype: bool """ - for row in self.execute( + for row in self.cursor().execute( """ SELECT recording_region_id FROM recording_region_view WHERE x = ? AND y = ? AND processor = ? @@ -81,7 +81,7 @@ def _clear_recording_region(self, region_id: int) -> bool: :param region_id: region to clear :return: """ - self.execute( + self.cursor().execute( """ UPDATE recording_data SET content = CAST('' AS BLOB), content_len = 0, missing_data = 2 @@ -107,7 +107,7 @@ def _read_recording_with_missing(self, region_id: int) -> Tuple[ :param int region_id: :rtype: memoryview, bool """ - for row in self.execute( + for row in self.cursor().execute( """ SELECT count(*) as n_extractions, SUM(content_len) as total_content_length @@ -131,7 +131,7 @@ def _read_contents_single(self, region_id: int) -> Tuple[ :param int region_id: :rtype: memoryview """ - for row in self.execute( + for row in self.cursor().execute( """ SELECT content, missing_data FROM recording_data @@ -158,7 +158,7 @@ def _read_recording_by_extraction_id( last_extraction_id = self.get_last_extraction_id() extraction_id = last_extraction_id + 1 + extraction_id - for row in self.execute( + for row in self.cursor().execute( """ SELECT content, missing_data FROM recording_data @@ -183,7 +183,7 @@ def _read_download_by_extraction_id( last_extraction_id = self.get_last_extraction_id() extraction_id = last_extraction_id + 1 + extraction_id - for row in self.execute( + for row in self.cursor().execute( """ SELECT content, missing_data FROM download_data @@ -208,7 +208,7 @@ def _read_recording_multiple( c_buffer = bytearray(total_content_length) missing_data = False idx = 0 - for row in self.execute( + for row in self.cursor().execute( """ SELECT content, missing_data FROM recording_data WHERE recording_region_id = ? ORDER BY extraction_id ASC @@ -221,7 +221,7 @@ def _read_recording_multiple( def _find_existing_recording_region_id( self, x: int, y: int, p: int, region: int) -> Optional[int]: - for row in self.execute( + for row in self.cursor().execute( """ SELECT recording_region_id FROM recording_region_view @@ -234,7 +234,7 @@ def _find_existing_recording_region_id( def _find_existing_download_region_id( self, x: int, y: int, p: int, region: int) -> Optional[int]: - for row in self.execute( + for row in self.cursor().execute( """ SELECT download_region_id FROM download_region_view @@ -277,7 +277,7 @@ def _get_recording_region_id( return region_info core_id = self._get_core_id(x, y, p) - self.execute( + self.cursor().execute( """ INSERT INTO recording_region( core_id, local_region_index) @@ -301,7 +301,7 @@ def _get_download_region_id( return region_info core_id = self._get_core_id(x, y, p) - self.execute( + self.cursor().execute( """ INSERT INTO download_region( core_id, local_region_index) @@ -311,19 +311,19 @@ def _get_download_region_id( assert region_id is not None return region_id - def store_setup_data(self): + def store_setup_data(self) -> None: """ Stores data passed into simulator setup """ - for _ in self.execute( + for _ in self.cursor().execute( """ SELECT hardware_time_step_ms FROM setup """): return - self.execute( + self.cursor().execute( """ INSERT INTO setup( setup_id, hardware_time_step_ms, time_scale_factor) @@ -332,13 +332,13 @@ def store_setup_data(self): FecDataView.get_hardware_time_step_ms(), FecDataView.get_time_scale_factor())) - def start_new_extraction(self): + def start_new_extraction(self) -> int: """ Stores the metadata for the extractions about to occur """ run_timesteps = FecDataView.get_current_run_timesteps() or 0 - self.execute( + self.cursor().execute( """ INSERT INTO extraction(run_timestep, n_run, n_loop, extract_time) VALUES(?, ?, ?, ?) @@ -354,7 +354,7 @@ def get_last_extraction_id(self) -> int: Get the id of the current/ last extraction """ - for row in self.execute( + for row in self.cursor().execute( """ SELECT max(extraction_id) as max_id FROM extraction @@ -364,7 +364,7 @@ def get_last_extraction_id(self) -> int: raise LookupError("No Extraction id found") def store_recording(self, x: int, y: int, p: int, region: int, - missing: bool, data: bytes): + missing: bool, data: bytes) -> None: """ Store some information in the corresponding buffer for a specific chip, core and recording region. @@ -384,7 +384,7 @@ def store_recording(self, x: int, y: int, p: int, region: int, datablob = Binary(data) region_id = self._get_recording_region_id(x, y, p, region) extraction_id = self.get_last_extraction_id() - self.execute( + self.cursor().execute( """ INSERT INTO recording_data( recording_region_id, extraction_id, content, content_len, @@ -396,7 +396,7 @@ def store_recording(self, x: int, y: int, p: int, region: int, def store_download( self, x: int, y: int, p: int, region: int, missing: bool, - data: bytes): + data: bytes) -> None: """ Store some information in the corresponding buffer for a specific chip, core and recording region. @@ -417,7 +417,7 @@ def store_download( datablob = Binary(data) download_region_id = self._get_download_region_id(x, y, p, region) extraction_id = self.get_last_extraction_id() - self.execute( + self.cursor().execute( """ INSERT INTO download_data( download_region_id, extraction_id, content, content_len, @@ -518,27 +518,28 @@ def write_session_credentials_to_db(self) -> None: job = FecDataView.get_spalloc_job() if job is not None: config = job.get_session_credentials_for_db() - self.executemany( + self.cursor().executemany( """ INSERT INTO proxy_configuration(kind, name, value) VALUES(?, ?, ?) """, [(k1, k2, v) for (k1, k2), v in config.items()]) - def _set_core_name(self, x: int, y: int, p: int, core_name: Optional[str]): + def _set_core_name( + self, x: int, y: int, p: int, core_name: Optional[str]) -> None: """ :param int x: :param int y: :param int p: - :param str core_name: + :param core_name: """ try: - self.execute( + self.cursor().execute( """ INSERT INTO core (x, y, processor, core_name) VALUES (?, ?, ? ,?) """, (x, y, p, core_name)) except IntegrityError: - self.execute( + self.cursor().execute( """ UPDATE core SET core_name = ? WHERE x = ? AND y = ? and processor = ? @@ -567,7 +568,7 @@ def get_core_name(self, x: int, y: int, p: int) -> Optional[str]: :param int p: core p :rtype: str or None """ - for row in self.execute( + for row in self.cursor().execute( """ SELECT core_name FROM core @@ -576,15 +577,12 @@ def get_core_name(self, x: int, y: int, p: int) -> Optional[str]: return str(row["core_name"], 'utf8') return None - def get_power_monitor_core(self, x, y) -> int: + def get_power_monitor_core(self, x: int, y: int) -> int: """ Gets the power monitor core for chip x, y - :param str description: - :return: list of tuples x, y, value) - :rtype: list(tuple(int, int, float)) - """ - for row in self.execute( + """ + for row in self.cursor().execute( """ SELECT the_value FROM monitor_provenance diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/buffered_sending_region.py b/spinn_front_end_common/interface/buffer_management/storage_objects/buffered_sending_region.py index 765de283ef..c38522e7dc 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/buffered_sending_region.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/buffered_sending_region.py @@ -72,7 +72,7 @@ def __init__(self) -> None: self._timestamps: List[int] = list() self._current_timestamp_pos: int = 0 - def add_key(self, timestamp: int, key: int): + def add_key(self, timestamp: int, key: int) -> None: """ Add a key to be sent at a given time. @@ -84,7 +84,7 @@ def add_key(self, timestamp: int, key: int): self._buffer[timestamp] = list() self._buffer[timestamp].append(key) - def add_keys(self, timestamp: int, keys: Iterable[int]): + def add_keys(self, timestamp: int, keys: Iterable[int]) -> None: """ Add a set of keys to be sent at the given time. diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/buffers_sent_deque.py b/spinn_front_end_common/interface/buffer_management/storage_objects/buffers_sent_deque.py index e541d7252f..13cc58390d 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/buffers_sent_deque.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/buffers_sent_deque.py @@ -98,7 +98,7 @@ def send_stop_message(self) -> None: self._sent_stop_message = True self.add_message_to_send(EventStopRequest()) - def add_message_to_send(self, message: AbstractEIEIOMessage): + def add_message_to_send(self, message: AbstractEIEIOMessage) -> None: """ Add a message to send. The message is converted to a sequenced message. diff --git a/spinn_front_end_common/interface/config_handler.py b/spinn_front_end_common/interface/config_handler.py index 9bd74f9ae1..5910e4b2ec 100644 --- a/spinn_front_end_common/interface/config_handler.py +++ b/spinn_front_end_common/interface/config_handler.py @@ -75,7 +75,7 @@ def __init__(self, data_writer_cls: Optional[Type[FecDataWriter]] = None): self._ensure_provenance_for_energy_report() def __toggle_config(self, section: str, option: str, to_false: List[str], - to_true: List[str]): + to_true: List[str]) -> None: previous = get_config_str(section, option).lower() if previous in to_true: set_config(section, option, "True") @@ -144,7 +144,7 @@ def _previous_handler(self) -> None: self._replaced_cfg("Reports", "report_enabled", "[Mode]mode = Production to turn off most reports") - def _error_on_previous(self, option) -> None: + def _error_on_previous(self, option: str) -> None: try: get_config_str_list("Mapping", option) except NoOptionError: @@ -155,7 +155,7 @@ def _error_on_previous(self, option) -> None: "See https://spinnakermanchester.github.io/common_pages/" "Algorithms.html.") - def _replaced_cfg(self, section: str, previous: str, new: str): + def _replaced_cfg(self, section: str, previous: str, new: str) -> None: if has_config_option(section, previous): if get_config_bool(section, previous): raise ConfigurationException( @@ -165,7 +165,7 @@ def _replaced_cfg(self, section: str, previous: str, new: str): logger.warning(f"cfg setting [{section}] {previous} " f"is no longer supported! Use {new} instead") - def _reserve_system_vertices(self): + def _reserve_system_vertices(self) -> None: """ Reserves the sizes for the system vertices """ @@ -181,7 +181,7 @@ def _reserve_system_vertices(self): def _remove_excess_folders( self, max_kept: int, starting_directory: str, - remove_errored_folders: Optional[bool]): + remove_errored_folders: Optional[bool]) -> None: try: files_in_report_folder = os.listdir(starting_directory) @@ -245,7 +245,7 @@ def _set_up_report_specifics(self) -> None: f.write("Traceback of setup call:\n") traceback.print_stack(file=f) - def _ensure_provenance_for_energy_report(self): + def _ensure_provenance_for_energy_report(self) -> None: if get_config_bool("Reports", "write_energy_report"): set_config("Reports", "read_router_provenance_data", "True") set_config("Reports", "read_placements_provenance_data", "True") diff --git a/spinn_front_end_common/interface/ds/data_specification_base.py b/spinn_front_end_common/interface/ds/data_specification_base.py index 6d3a12759e..d0e1c52586 100644 --- a/spinn_front_end_common/interface/ds/data_specification_base.py +++ b/spinn_front_end_common/interface/ds/data_specification_base.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Optional, Sequence, TextIO, Union +from typing import Any, Optional, Sequence, TextIO, Union import numpy @@ -59,7 +59,7 @@ def __init__(self, x: int, y: int, p: int, ds_db: DsSqlliteDatabase, self._region_num: Optional[int] = None self._size: Optional[int] = None - def _report(self, *args) -> None: + def _report(self, *args: Any) -> None: if self._report_writer is not None: text = "".join( (repr(arg) if isinstance(arg, bytes) else str(arg)) @@ -70,7 +70,7 @@ def _flush(self) -> None: if self._report_writer is not None: self._report_writer.flush() - def comment(self, comment: str): + def comment(self, comment: str) -> None: """ Write a comment to the text version of the specification. @@ -84,7 +84,7 @@ def comment(self, comment: str): @abstractmethod def reserve_memory_region( self, region: int, size: int, label: Optional[str] = None, - reference: Optional[int] = None): + reference: Optional[int] = None) -> None: """ Insert command to reserve a memory region. @@ -103,7 +103,7 @@ def reserve_memory_region( @abstractmethod def reference_memory_region( - self, region: int, ref: int, label: Optional[str] = None): + self, region: int, ref: int, label: Optional[str] = None) -> None: """ Insert command to reference another memory region. @@ -118,7 +118,7 @@ def reference_memory_region( """ raise NotImplementedError - def switch_write_focus(self, region: int): + def switch_write_focus(self, region: int) -> None: """ Insert command to switch the region being written to. @@ -137,7 +137,8 @@ def switch_write_focus(self, region: int): if self._size <= 0: raise DataSpecException(f"No size set for region {region}") - def write_value(self, data: Union[int, float], data_type=DataType.UINT32): + def write_value(self, data: Union[int, float], + data_type: DataType = DataType.UINT32) -> None: """ Insert command to write a value (once) to the current write pointer, causing the write pointer to move on by the number of bytes required @@ -170,7 +171,7 @@ def write_value(self, data: Union[int, float], data_type=DataType.UINT32): if len(as_bytes) > data_type.size: self._flush() raise ValueError( - f"{data}:{data_type.name} as bytes was {as_bytes} " + f"{data}:{data_type.name} as bytes was {as_bytes!r} " f"when only {data_type.size} bytes expected") if len(self._content) % 4 != 0: # check we are at a word boundary if len(as_bytes) % data_type.size != 0: @@ -184,7 +185,7 @@ def write_value(self, data: Union[int, float], data_type=DataType.UINT32): def write_array(self, array_values: Union[ Sequence[int], Sequence[float], numpy.ndarray], - data_type=DataType.UINT32): + data_type: DataType = DataType.UINT32) -> None: """ Insert command to write an array, causing the write pointer to move on by (data type size * the array size), in bytes. @@ -195,7 +196,8 @@ def write_array(self, array_values: Union[ """ assert self._content is not None assert self._content_debug is not None - data = numpy.array(array_values, dtype=data_type.numpy_typename) + data: numpy.ndarray = numpy.array( + array_values, dtype=data_type.numpy_typename) encoded = data.tobytes() diff --git a/spinn_front_end_common/interface/ds/data_specification_generator.py b/spinn_front_end_common/interface/ds/data_specification_generator.py index 40661cd94e..5d1702af3f 100644 --- a/spinn_front_end_common/interface/ds/data_specification_generator.py +++ b/spinn_front_end_common/interface/ds/data_specification_generator.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Optional, Union, cast +from typing import Optional, TextIO, Union, cast from spinn_utilities.overrides import overrides from spinn_front_end_common.utilities.constants import BYTES_PER_WORD from spinn_front_end_common.utilities.exceptions import DataSpecException @@ -35,7 +35,7 @@ def __init__( vertex: Union[ AbstractGeneratesDataSpecification, AbstractRewritesDataSpecification], - ds_db: DsSqlliteDatabase, report_writer=None): + ds_db: DsSqlliteDatabase, report_writer: Optional[TextIO] = None): """ :param int x: :param int y: @@ -66,7 +66,7 @@ def __init__( @overrides(DataSpecificationBase.reserve_memory_region) def reserve_memory_region( self, region: int, size: int, label: Optional[str] = None, - reference: Optional[int] = None): + reference: Optional[int] = None) -> None: self._report("RESERVE memRegion=", region, " size=", size, (f" label='{label}'" if label else None), (f" REF {reference}" if reference is not None else None)) @@ -80,7 +80,7 @@ def reserve_memory_region( @overrides(DataSpecificationBase.reference_memory_region) def reference_memory_region( - self, region: int, ref: int, label: Optional[str] = None): + self, region: int, ref: int, label: Optional[str] = None) -> None: self._report("REFERENCE memRegion=", region, " ref=", ref, (f" label='{label}'" if label else None)) diff --git a/spinn_front_end_common/interface/ds/data_specification_reloader.py b/spinn_front_end_common/interface/ds/data_specification_reloader.py index 19f91d60f3..c1b079ce70 100644 --- a/spinn_front_end_common/interface/ds/data_specification_reloader.py +++ b/spinn_front_end_common/interface/ds/data_specification_reloader.py @@ -29,7 +29,7 @@ class DataSpecificationReloader(DataSpecificationBase): @overrides(DataSpecificationBase.reserve_memory_region) def reserve_memory_region( self, region: int, size: int, label: Optional[str] = None, - reference: Optional[int] = None): + reference: Optional[int] = None) -> None: original_size = self._ds_db.get_region_size( self._x, self._y, self._p, region) if original_size != size: @@ -41,7 +41,7 @@ def reserve_memory_region( @overrides(DataSpecificationBase.reference_memory_region) def reference_memory_region( - self, region: int, ref: int, label: Optional[str] = None): + self, region: int, ref: int, label: Optional[str] = None) -> None: raise NotImplementedError( "reference_memory_region unexpected during reload") diff --git a/spinn_front_end_common/interface/ds/data_type.py b/spinn_front_end_common/interface/ds/data_type.py index fdf0928596..0356dd8e03 100644 --- a/spinn_front_end_common/interface/ds/data_type.py +++ b/spinn_front_end_common/interface/ds/data_type.py @@ -338,17 +338,20 @@ class DataType(Enum): np.int64, "0.63 signed fixed point number")) # rounding problem for max - def __new__(cls, *args) -> 'DataType': + def __new__(cls, value: int, size: int, min_val: Decimal, max_val: Decimal, + scale: Decimal, struct_encoding: str, apply_scale: bool, + force_cast: Optional[Callable[[Any], int]], + numpy_typename: type, _doc: str) -> 'DataType': # pylint: disable=protected-access, too-many-arguments obj = object.__new__(cls) - obj._value_ = args[0] - obj.__doc__ = args[-1] + obj._value_ = value + obj.__doc__ = _doc return obj - def __init__(self, __, size: int, min_val: Decimal, max_val: Decimal, + def __init__(self, __: int, size: int, min_val: Decimal, max_val: Decimal, scale: Decimal, struct_encoding: str, apply_scale: bool, force_cast: Optional[Callable[[Any], int]], - numpy_typename: type, _doc: str = ""): + numpy_typename: type, _doc: str) -> None: # pylint: disable=protected-access, too-many-arguments self._size = size self._min = min_val @@ -391,7 +394,7 @@ def max(self) -> Decimal: """ return self._max - def check_value(self, value: Union[int, float]): + def check_value(self, value: Union[int, float]) -> None: """ Check the value against the allowed min and max diff --git a/spinn_front_end_common/interface/ds/ds_sqllite_database.py b/spinn_front_end_common/interface/ds/ds_sqllite_database.py index acf690847b..3dc296f836 100644 --- a/spinn_front_end_common/interface/ds/ds_sqllite_database.py +++ b/spinn_front_end_common/interface/ds/ds_sqllite_database.py @@ -65,7 +65,7 @@ def __init__(self, database_file: Optional[str] = None): super().__init__( database_file, ddl_file=_DDL_FILE if self._init_file else None) - def _context_entered(self): + def _context_entered(self) -> None: super()._context_entered() if self._init_file: self.__init_ethernets() @@ -78,7 +78,7 @@ def __init_ethernets(self) -> None: .. note:: Call of this method has to be delayed until inside the with """ eth_chips = FecDataView.get_machine().ethernet_connected_chips - self.executemany( + self.cursor().executemany( """ INSERT INTO ethernet( ethernet_x, ethernet_y, ip_address) @@ -88,7 +88,7 @@ def __init_ethernets(self) -> None: for ethernet in eth_chips)) def set_core(self, x: int, y: int, p: int, - vertex: AbstractHasAssociatedBinary): + vertex: AbstractHasAssociatedBinary) -> None: """ Creates a database record for the core with this x,y,z @@ -113,7 +113,7 @@ def set_core(self, x: int, y: int, p: int, is_system = 1 else: is_system = 0 - self.execute( + self.cursor().execute( """ INSERT INTO core(x, y, p, is_system) VALUES(?, ?, ?, ?) @@ -133,7 +133,7 @@ def get_core_infos(self, is_system: bool) -> List[ :rtype: list(int, int, int, int, int, int) """ core_infos: List[Tuple[int, int, int, int, int]] = [] - for row in self.execute( + for row in self.cursor().execute( """ SELECT x, y, p, ethernet_x, ethernet_y FROM core_view @@ -145,13 +145,13 @@ def get_core_infos(self, is_system: bool) -> List[ row["ethernet_x"], row["ethernet_y"])) return core_infos - def _set_chip(self, x: int, y: int): + def _set_chip(self, x: int, y: int) -> None: """ :param int x: :param int y: """ # skip if it already exists - for _ in self.execute( + for _ in self.cursor().execute( """ SELECT x FROM chip @@ -160,14 +160,14 @@ def _set_chip(self, x: int, y: int): """, (x, y)): return chip = FecDataView().get_chip_at(x, y) - self.execute( + self.cursor().execute( """ INSERT INTO chip(x, y, ethernet_x, ethernet_y) VALUES(?, ?, ?, ?) """, (x, y, chip.nearest_ethernet_x, chip.nearest_ethernet_y)) def set_memory_region( self, x: int, y: int, p: int, region_num: int, size: int, - reference: Optional[int], label: Optional[str]): + reference: Optional[int], label: Optional[str]) -> int: """ Writes the information to reserve a memory region into the database @@ -184,7 +184,7 @@ def set_memory_region( :type reference: int or None :return: """ - self.execute( + self.cursor().execute( """ INSERT INTO region( x, y, p, region_num, size, reference_num, region_label) @@ -203,7 +203,7 @@ def get_region_size(self, x: int, y: int, p: int, region_num: int) -> int: :return: The size of the region, in bytes :rtype: int """ - for row in self.execute( + for row in self.cursor().execute( """ SELECT size FROM region @@ -214,7 +214,7 @@ def get_region_size(self, x: int, y: int, p: int, region_num: int) -> int: raise DsDatabaseException(f"Region {region_num} not set") def set_reference(self, x: int, y: int, p: int, region_num: int, - reference: int, ref_label: Optional[str]): + reference: int, ref_label: Optional[str]) -> None: """ Writes a outgoing region_reference into the database @@ -226,7 +226,7 @@ def set_reference(self, x: int, y: int, p: int, region_num: int, :param ref_label: label for the referencing region :type ref_label: str or None """ - self.execute( + self.cursor().execute( """ INSERT INTO reference( x, y, p, region_num, reference_num, ref_label) @@ -250,7 +250,7 @@ def get_reference_pointers(self, x: int, y: int, p: int) -> Iterable[ :return: Yields the referencing vertex region number and the pointer :rtype: iterable(tuple(int,int)) """ - for row in self.execute( + for row in self.cursor().execute( """ SELECT ref_region, pointer FROM linked_reference_view @@ -271,7 +271,7 @@ def get_unlinked_references(self) -> Iterable[ :return: x, y, p, region, reference, label for all unlinked references :rtype: iterable(tuple(int, int, int, int, int, str)) """ - for row in self.execute( + for row in self.cursor().execute( """ SELECT x, y, ref_p, ref_region, reference_num, COALESCE(ref_label, "") as ref_label @@ -294,7 +294,7 @@ def get_double_region(self) -> Iterable[Tuple[int, int, int, int]]: :return: x, y, p, region :rtype: iterable(tuple(int, int, int, int)) """ - for row in self.execute( + for row in self.cursor().execute( """ SELECT x, y, p, region_num FROM pointer_content_view @@ -305,7 +305,7 @@ def get_double_region(self) -> Iterable[Tuple[int, int, int, int]]: def set_region_content( self, x: int, y: int, p: int, region_num: int, content: bytes, - content_debug: Optional[str]): + content_debug: Optional[str]) -> None: """ Sets the content for this region @@ -319,7 +319,7 @@ def set_region_content( :raises DsDatabaseException: If the region already has content """ # check for previous content - for row in self.execute( + for row in self.cursor().execute( """ SELECT content FROM region @@ -331,7 +331,7 @@ def set_region_content( f"Illegal attempt to overwrite content for " f"{x=} {y=} {p=} {region_num=}") - self.execute( + self.cursor().execute( """ UPDATE region SET content = ?, content_debug = ? @@ -357,7 +357,7 @@ def get_region_pointer( :rtype: int or None :raises DsDatabaseException: if the region is not known """ - for row in self.execute( + for row in self.cursor().execute( """ SELECT pointer FROM region @@ -381,7 +381,7 @@ def get_region_sizes(self, x: int, y: int, p: int) -> Dict[int, int]: :rtype: dict(int, int) """ regions: Dict[int, int] = dict() - for row in self.execute( + for row in self.cursor().execute( """ SELECT region_num, size FROM region @@ -406,7 +406,7 @@ def get_total_regions_size(self, x: int, y: int, p: int) -> int: or 0 if there are no regions for this core :rtype: int """ - for row in self.execute( + for row in self.cursor().execute( """ SELECT COALESCE(sum(size), 0) as total FROM region @@ -416,7 +416,8 @@ def get_total_regions_size(self, x: int, y: int, p: int) -> int: return row["total"] raise DsDatabaseException("Query failed unexpectedly") - def set_start_address(self, x: int, y: int, p: int, start_address: int): + def set_start_address( + self, x: int, y: int, p: int, start_address: int) -> None: """ Sets the base address for a core and calculates pointers @@ -426,7 +427,7 @@ def set_start_address(self, x: int, y: int, p: int, start_address: int): :param int start_address: The base address for the whole core :raises DsDatabaseException: if the region is not known """ - self.execute( + self.cursor().execute( """ UPDATE core SET start_address = ? @@ -446,7 +447,7 @@ def get_start_address(self, x: int, y: int, p: int) -> int: :return: The base address for the whole core :rtype: int """ - for row in self.execute( + for row in self.cursor().execute( """ SELECT start_address FROM core @@ -456,8 +457,8 @@ def get_start_address(self, x: int, y: int, p: int) -> int: return row["start_address"] raise DsDatabaseException(f"No core {x=} {y=} {p=}") - def set_region_pointer( - self, x: int, y: int, p: int, region_num: int, pointer: int): + def set_region_pointer(self, x: int, y: int, p: int, region_num: int, + pointer: int) -> None: """ Sets the pointer to the start of the address for this x, y, p region. @@ -467,7 +468,7 @@ def set_region_pointer( :param int region_num: :param int pointer: start address """ - self.execute( + self.cursor().execute( """ UPDATE region SET pointer = ? @@ -494,7 +495,7 @@ def get_region_pointers_and_content( :return: number, pointer and (content or None) :rtype: iterable(tuple(int, int, bytearray or None)) """ - for row in self.execute( + for row in self.cursor().execute( """ SELECT region_num, content, pointer FROM pointer_content_view @@ -523,7 +524,7 @@ def get_regions_content( :return: number, pointer and (content or None) :rtype: iterable(tuple(int, int, bytearray or None)) """ - for row in self.execute( + for row in self.cursor().execute( """ SELECT region_num, content, pointer FROM region @@ -542,7 +543,7 @@ def get_max_content_size(self, is_system: bool) -> int: :rtype: int :raises DsDatabaseException: """ - for row in self.execute( + for row in self.cursor().execute( """ SELECT MAX(LENGTH(content)) AS size FROM region NATURAL JOIN CORE @@ -567,7 +568,7 @@ def get_content_sizes(self, is_system: bool) -> List[Tuple[int, int]]: :rtype: list(tuple(int, int)) """ sizes: List[Tuple[int, int]] = [] - for row in self.execute( + for row in self.cursor().execute( """ SELECT LENGTH(content) AS size, COUNT(*) AS num FROM region NATURAL JOIN core @@ -592,7 +593,7 @@ def get_ds_cores(self) -> Iterable[XYP]: :return: Yields the (x, y, p) :rtype: iterable(tuple(int,int,int)) """ - for row in self.execute( + for row in self.cursor().execute( """ SELECT x, y, p FROM core """): @@ -607,7 +608,7 @@ def get_n_ds_cores(self) -> int: :rtype: int :raises DsDatabaseException: """ - for row in self.execute( + for row in self.cursor().execute( """ SELECT COUNT(*) as count FROM core LIMIT 1 @@ -627,7 +628,7 @@ def get_memory_to_malloc(self, x: int, y: int, p: int) -> int: """ to_malloc = APP_PTR_TABLE_BYTE_SIZE # try the fast way using regions - for row in self.execute( + for row in self.cursor().execute( """ SELECT regions_size FROM region_size_view @@ -649,7 +650,7 @@ def get_memory_to_write(self, x: int, y: int, p: int) -> int: """ to_write = APP_PTR_TABLE_BYTE_SIZE # try the fast way using regions - for row in self.execute( + for row in self.cursor().execute( """ SELECT contents_size FROM content_size_view @@ -675,7 +676,7 @@ def get_info_for_cores(self) -> Iterable[Tuple[XYP, int, int, int]]: and memory_written :rtype: iterable(tuple(tuple(int, int, int), int, int, int)) """ - for row in self.execute( + for row in self.cursor().execute( """ SELECT x, y, p, start_address, to_write,malloc_size FROM core_summary_view @@ -697,7 +698,7 @@ def write_session_credentials_to_db(self) -> None: job = cast('SpallocJobController', mac)._job if isinstance(job, SpallocJob): config = job.get_session_credentials_for_db() - self.executemany( + self.cursor().executemany( """ INSERT INTO proxy_configuration(kind, name, value) VALUES(?, ?, ?) @@ -708,7 +709,7 @@ def set_app_id(self) -> None: Sets the app id """ # check for previous content - self.execute( + self.cursor().execute( """ INSERT INTO app_id(app_id) VALUES(?) diff --git a/spinn_front_end_common/interface/interface_functions/application_runner.py b/spinn_front_end_common/interface/interface_functions/application_runner.py index 4b07cea84b..7ffa26cc32 100644 --- a/spinn_front_end_common/interface/interface_functions/application_runner.py +++ b/spinn_front_end_common/interface/interface_functions/application_runner.py @@ -152,8 +152,8 @@ def run_app( notification_interface.send_stop_pause_notification() return latest_runtime - def _run_wait( - self, runtime: Optional[float], time_threshold: Optional[float]): + def _run_wait(self, runtime: Optional[float], + time_threshold: Optional[float]) -> None: """ :param int runtime: :param float time_threshold: @@ -169,7 +169,7 @@ def _run_wait( sleep(time_to_wait) self._wait_for_end(timeout=time_threshold) - def _wait_for_start(self, timeout: Optional[float] = None): + def _wait_for_start(self, timeout: Optional[float] = None) -> None: """ :param timeout: :type timeout: float or None @@ -194,7 +194,7 @@ def _send_sync_signal(self) -> None: # fire all signals as required self.__txrx.send_signal(self.__app_id, sync_signal) - def _wait_for_end(self, timeout: Optional[float] = None): + def _wait_for_end(self, timeout: Optional[float] = None) -> None: """ :param timeout: :type timeout: float or None diff --git a/spinn_front_end_common/interface/interface_functions/chip_provenance_updater.py b/spinn_front_end_common/interface/interface_functions/chip_provenance_updater.py index c335147853..25241034e3 100644 --- a/spinn_front_end_common/interface/interface_functions/chip_provenance_updater.py +++ b/spinn_front_end_common/interface/interface_functions/chip_provenance_updater.py @@ -28,7 +28,7 @@ _LIMIT = 10 -def chip_provenance_updater(all_core_subsets: CoreSubsets): +def chip_provenance_updater(all_core_subsets: CoreSubsets) -> None: """ Forces all cores to generate provenance data, and then exit. @@ -88,7 +88,7 @@ def _run(self) -> None: def _update_provenance( self, total_processors: int, processors_completed: int, - progress: ProgressBar): + progress: ProgressBar) -> None: """ :param int total_processors: :param int processors_completed: diff --git a/spinn_front_end_common/interface/interface_functions/chip_runtime_updater.py b/spinn_front_end_common/interface/interface_functions/chip_runtime_updater.py index 6868b32784..7658661cdb 100644 --- a/spinn_front_end_common/interface/interface_functions/chip_runtime_updater.py +++ b/spinn_front_end_common/interface/interface_functions/chip_runtime_updater.py @@ -17,7 +17,7 @@ from spinn_front_end_common.utilities.scp import UpdateRuntimeProcess -def chip_runtime_updater(n_sync_steps: int): +def chip_runtime_updater(n_sync_steps: int) -> None: """ Updates the runtime of an application running on a SpiNNaker machine. diff --git a/spinn_front_end_common/interface/interface_functions/database_interface.py b/spinn_front_end_common/interface/interface_functions/database_interface.py index 2fcdf061d4..a386453cf7 100644 --- a/spinn_front_end_common/interface/interface_functions/database_interface.py +++ b/spinn_front_end_common/interface/interface_functions/database_interface.py @@ -52,7 +52,7 @@ def database_interface(runtime: Optional[float]) -> Optional[str]: return writer.database_path -def _write_to_db(w: DatabaseWriter, runtime: Optional[float]): +def _write_to_db(w: DatabaseWriter, runtime: Optional[float]) -> None: """ :param DatabaseWriter w: :param int runtime: diff --git a/spinn_front_end_common/interface/interface_functions/dsg_region_reloader.py b/spinn_front_end_common/interface/interface_functions/dsg_region_reloader.py index b712533342..cc8bcec944 100644 --- a/spinn_front_end_common/interface/interface_functions/dsg_region_reloader.py +++ b/spinn_front_end_common/interface/interface_functions/dsg_region_reloader.py @@ -13,6 +13,9 @@ # limitations under the License. from spinn_utilities.progress_bar import ProgressBar + +from pacman.model.placements import Placement + from spinn_front_end_common.interface.ds import ( DsSqlliteDatabase, DataSpecificationReloader) from spinn_front_end_common.utilities.utility_calls import get_report_writer @@ -33,7 +36,8 @@ def reload_dsg_regions() -> None: regenerate_data_spec(placement, ds_database) -def regenerate_data_spec(placement, ds_database) -> bool: +def regenerate_data_spec( + placement: Placement, ds_database: DsSqlliteDatabase) -> bool: """ Regenerate a data specification for a placement. diff --git a/spinn_front_end_common/interface/interface_functions/energy_provenance_reporter.py b/spinn_front_end_common/interface/interface_functions/energy_provenance_reporter.py index a192d209c1..1bbf39dfb6 100644 --- a/spinn_front_end_common/interface/interface_functions/energy_provenance_reporter.py +++ b/spinn_front_end_common/interface/interface_functions/energy_provenance_reporter.py @@ -30,7 +30,7 @@ _PROV_KEY = "power_provenance" -def energy_provenance_reporter(power_used: PowerUsed): +def energy_provenance_reporter(power_used: PowerUsed) -> None: """ Converts the power usage information into provenance data. diff --git a/spinn_front_end_common/interface/interface_functions/graph_binary_gatherer.py b/spinn_front_end_common/interface/interface_functions/graph_binary_gatherer.py index 1d908ecd1d..f6ac6f0b93 100644 --- a/spinn_front_end_common/interface/interface_functions/graph_binary_gatherer.py +++ b/spinn_front_end_common/interface/interface_functions/graph_binary_gatherer.py @@ -55,7 +55,7 @@ def gather_binaries(self) -> ExecutableTargets: return self._exe_targets - def __get_binary(self, placement: Placement): + def __get_binary(self, placement: Placement) -> None: """ :param ~pacman.model.placements.Placement placement: """ diff --git a/spinn_front_end_common/interface/interface_functions/graph_data_specification_writer.py b/spinn_front_end_common/interface/interface_functions/graph_data_specification_writer.py index 7f54e95c43..d72706785b 100644 --- a/spinn_front_end_common/interface/interface_functions/graph_data_specification_writer.py +++ b/spinn_front_end_common/interface/interface_functions/graph_data_specification_writer.py @@ -15,7 +15,7 @@ from collections import defaultdict import logging import os -from typing import Iterable, List, Sequence, Optional +from typing import Dict, Iterable, List, Optional, Sequence, Tuple from spinn_utilities.progress_bar import ProgressBar from spinn_utilities.log import FormatAdapter @@ -36,9 +36,10 @@ logger = FormatAdapter(logging.getLogger(__name__)) -def graph_data_specification_writer(placement_order=None): +def graph_data_specification_writer( + placement_order: Optional[Sequence[Placement]] = None) -> str: """ - :param list(~pacman.model.placements.Placement) placement_order: + :param placement_order: the optional order in which placements should be examined :return: Path to DSG targets database :rtype: str @@ -59,11 +60,14 @@ class _GraphDataSpecificationWriter(object): # Dict of list of vertices by chip coordinates "_vertices_by_chip") - def __init__(self): - self._sdram_usage = defaultdict(lambda: 0) - self._vertices_by_chip = defaultdict(list) + def __init__(self) -> None: + self._sdram_usage: Dict[Tuple[int, int], int] = defaultdict(lambda: 0) + self._vertices_by_chip: \ + Dict[Tuple[int, int], List[AbstractGeneratesDataSpecification]] =\ + defaultdict(list) - def run(self, placement_order: Optional[Sequence[Placement]] = None): + def run(self, + placement_order: Optional[Sequence[Placement]] = None) -> str: """ :param list(~pacman.model.placements.Placement) placement_order: the optional order in which placements should be examined @@ -102,16 +106,6 @@ def run(self, placement_order: Optional[Sequence[Placement]] = None): vertex, AbstractRewritesDataSpecification): vertices_to_reset.append(vertex) - # If the spec wasn't generated directly, and there is an - # application vertex, try with that - if not generated and vertex.app_vertex is not None: - generated = self.__generate_data_spec_for_vertices( - placement, vertex.app_vertex, ds_db) - if generated and isinstance( - vertex.app_vertex, - AbstractRewritesDataSpecification): - vertices_to_reset.append(vertex.app_vertex) - # Ensure that the vertices know their regions have been reloaded for rewriter in vertices_to_reset: rewriter.set_reload_required(False) @@ -195,7 +189,7 @@ def __generate_data_spec_for_vertices( f"Too much SDRAM has been used on {x}, {y}. Vertices and" f" their usage on that chip is as follows:\n{memory_usage}") - def _run_check_queries(self, ds_db: DsSqlliteDatabase): + def _run_check_queries(self, ds_db: DsSqlliteDatabase) -> None: msg = "" for x, y, p, region, reference, lbl in ds_db.get_unlinked_references(): if lbl is None: diff --git a/spinn_front_end_common/interface/interface_functions/hbp_allocator.py b/spinn_front_end_common/interface/interface_functions/hbp_allocator.py index b602e3963d..42accf48e8 100644 --- a/spinn_front_end_common/interface/interface_functions/hbp_allocator.py +++ b/spinn_front_end_common/interface/interface_functions/hbp_allocator.py @@ -58,7 +58,7 @@ def __init__(self, url: str, machine_name: str): super().__init__("HBPJobController") @overrides(MachineAllocationController.extend_allocation) - def extend_allocation(self, new_total_run_time: float): + def extend_allocation(self, new_total_run_time: float) -> None: r = requests.get(self._extend_lease_url, params={ "runTime": str(new_total_run_time)}, timeout=10) r.raise_for_status() @@ -69,12 +69,12 @@ def _check_lease(self, wait_time: int) -> JsonObject: r.raise_for_status() return r.json() - def _release(self, machine_name: str): + def _release(self, machine_name: str) -> None: r = requests.delete(self._release_machine_url, params={ "machineName": machine_name}, timeout=10) r.raise_for_status() - def _set_power(self, machine_name: str, power_on: bool): + def _set_power(self, machine_name: str, power_on: bool) -> None: r = requests.put(self._set_power_url, params={ "machineName": machine_name, "on": str(power_on)}, timeout=10) r.raise_for_status() @@ -105,7 +105,7 @@ def power(self) -> bool: """ return self._power_on - def set_power(self, power: bool): + def set_power(self, power: bool) -> None: """ Sets the power to the new state. diff --git a/spinn_front_end_common/interface/interface_functions/host_no_bitfield_router_compression.py b/spinn_front_end_common/interface/interface_functions/host_no_bitfield_router_compression.py index 04acdcb7a7..170061ed7f 100644 --- a/spinn_front_end_common/interface/interface_functions/host_no_bitfield_router_compression.py +++ b/spinn_front_end_common/interface/interface_functions/host_no_bitfield_router_compression.py @@ -139,7 +139,8 @@ def compress(self) -> None: raise SpinnFrontEndException( f"The router compressor failed on {self.__failures}") - def _load_routing_table(self, table: AbstractMulticastRoutingTable): + def _load_routing_table( + self, table: AbstractMulticastRoutingTable) -> None: """ :param pacman.model.routing_tables.AbstractMulticastRoutingTable table: the router table to load diff --git a/spinn_front_end_common/interface/interface_functions/insert_chip_power_monitors_to_graphs.py b/spinn_front_end_common/interface/interface_functions/insert_chip_power_monitors_to_graphs.py index 0c3acde25e..2b734394d1 100644 --- a/spinn_front_end_common/interface/interface_functions/insert_chip_power_monitors_to_graphs.py +++ b/spinn_front_end_common/interface/interface_functions/insert_chip_power_monitors_to_graphs.py @@ -32,7 +32,7 @@ def sample_chip_power_monitor() -> ChipPowerMonitorMachineVertex: "Sample ChipPowerMonitorMachineVertex") -def insert_chip_power_monitors_to_graphs(placements: Placements): +def insert_chip_power_monitors_to_graphs(placements: Placements) -> None: """ Adds chip power monitors into a given graph. diff --git a/spinn_front_end_common/interface/interface_functions/load_data_specification.py b/spinn_front_end_common/interface/interface_functions/load_data_specification.py index a1e2b8a79b..4c5b44cf32 100644 --- a/spinn_front_end_common/interface/interface_functions/load_data_specification.py +++ b/spinn_front_end_common/interface/interface_functions/load_data_specification.py @@ -101,7 +101,7 @@ def __reset_router_timeouts() -> None: receiver.load_application_routing_tables() # pylint: disable=unused-private-member - def __java_app(self, use_monitors: bool): + def __java_app(self, use_monitors: bool) -> None: """ :param bool use_monitors: """ @@ -119,7 +119,8 @@ def __java_app(self, use_monitors: bool): java_caller.load_app_data_specification(use_monitors) progress.end() - def load_data_specs(self, is_system: bool, uses_advanced_monitors: bool): + def load_data_specs( + self, is_system: bool, uses_advanced_monitors: bool) -> None: """ Execute the data specs for all system targets. """ @@ -147,7 +148,8 @@ def __java_sys(self) -> None: FecDataView.get_java_caller().load_system_data_specification() progress.end() - def __python_load(self, is_system: bool, uses_advanced_monitors: bool): + def __python_load( + self, is_system: bool, uses_advanced_monitors: bool) -> None: """ Does the Data Specification Execution and loading using Python. """ @@ -186,8 +188,8 @@ def __python_load(self, is_system: bool, uses_advanced_monitors: bool): if uses_advanced_monitors: self.__reset_router_timeouts() - def __python_malloc_core( - self, ds_database: DsSqlliteDatabase, x: int, y: int, p: int): + def __python_malloc_core(self, ds_database: DsSqlliteDatabase, + x: int, y: int, p: int) -> None: region_sizes = ds_database.get_region_sizes(x, y, p) total_size = sum(region_sizes.values()) malloc_size = total_size + APP_PTR_TABLE_BYTE_SIZE diff --git a/spinn_front_end_common/interface/interface_functions/load_executable_images.py b/spinn_front_end_common/interface/interface_functions/load_executable_images.py index 9ba5de7ad0..b94207ae19 100644 --- a/spinn_front_end_common/interface/interface_functions/load_executable_images.py +++ b/spinn_front_end_common/interface/interface_functions/load_executable_images.py @@ -59,8 +59,8 @@ def load_sys_images() -> None: raise e -def _load_images( - filter_predicate: Callable[[ExecutableType], bool], label: str): +def _load_images(filter_predicate: Callable[[ExecutableType], bool], + label: str) -> None: """ :param callable(ExecutableType,bool) filter_predicate: :param str label @@ -107,7 +107,7 @@ def filter_targets( return cores -def _start_simulation(cores: ExecutableTargets, app_id: int): +def _start_simulation(cores: ExecutableTargets, app_id: int) -> None: """ :param ~.ExecutableTargets cores: Possible subset of all ExecutableTargets to start diff --git a/spinn_front_end_common/interface/interface_functions/locate_executable_start_type.py b/spinn_front_end_common/interface/interface_functions/locate_executable_start_type.py index 22509854fe..c30da2a228 100644 --- a/spinn_front_end_common/interface/interface_functions/locate_executable_start_type.py +++ b/spinn_front_end_common/interface/interface_functions/locate_executable_start_type.py @@ -46,7 +46,8 @@ def locate_executable_start_type() -> Dict[ExecutableType, CoreSubsets]: return binary_start_types -def __add_vertex_to_subset(placement: Placement, core_subsets: CoreSubsets): +def __add_vertex_to_subset( + placement: Placement, core_subsets: CoreSubsets) -> None: """ :param ~.Placement placement: :param ~.CoreSubsets core_subsets: diff --git a/spinn_front_end_common/interface/interface_functions/placements_provenance_gatherer.py b/spinn_front_end_common/interface/interface_functions/placements_provenance_gatherer.py index aa1ee6b317..7f5eb9d06e 100644 --- a/spinn_front_end_common/interface/interface_functions/placements_provenance_gatherer.py +++ b/spinn_front_end_common/interface/interface_functions/placements_provenance_gatherer.py @@ -25,7 +25,7 @@ def placements_provenance_gatherer( - n_placements: int, placements: Iterable[Placement]): + n_placements: int, placements: Iterable[Placement]) -> None: """ Gets provenance information from the specified placements. @@ -47,7 +47,7 @@ def placements_provenance_gatherer( logger.warning("{}", error) -def _add_placement_provenance(placement: Placement, errors: List[str]): +def _add_placement_provenance(placement: Placement, errors: List[str]) -> None: """ :param ~.Placement placement: :param list(str) errors: diff --git a/spinn_front_end_common/interface/interface_functions/profile_data_gatherer.py b/spinn_front_end_common/interface/interface_functions/profile_data_gatherer.py index 579ca8b545..af0f32b11c 100644 --- a/spinn_front_end_common/interface/interface_functions/profile_data_gatherer.py +++ b/spinn_front_end_common/interface/interface_functions/profile_data_gatherer.py @@ -42,7 +42,7 @@ def profile_data_gatherer() -> None: _write(placement, profile_data, provenance_file_path) -def _write(p: Placement, profile_data: ProfileData, directory: str): +def _write(p: Placement, profile_data: ProfileData, directory: str) -> None: """ :param ~.Placement p: :param ProfileData profile_data: diff --git a/spinn_front_end_common/interface/interface_functions/router_provenance_gatherer.py b/spinn_front_end_common/interface/interface_functions/router_provenance_gatherer.py index 9a6aa2ee91..99301476af 100644 --- a/spinn_front_end_common/interface/interface_functions/router_provenance_gatherer.py +++ b/spinn_front_end_common/interface/interface_functions/router_provenance_gatherer.py @@ -103,7 +103,7 @@ def _add_router_table_diagnostic( def _add_unseen_router_chip_diagnostic( self, chip: Chip, reinjection_data: Optional[Dict[Chip, ReInjectionStatus]], - prefix: str): + prefix: str) -> None: """ :param ~.Chip chip: :param dict(Chip,ReInjectionStatus) reinjection_data: @@ -135,7 +135,7 @@ def __router_diagnostics( self, chip: Chip, diagnostics: RouterDiagnostics, status: Optional[ReInjectionStatus], expected: bool, table: Optional[AbstractMulticastRoutingTable], - prefix: str): + prefix: str) -> None: """ Describes the router diagnostics for one router. diff --git a/spinn_front_end_common/interface/interface_functions/routing_setup.py b/spinn_front_end_common/interface/interface_functions/routing_setup.py index a5b827c310..9edc0acb36 100644 --- a/spinn_front_end_common/interface/interface_functions/routing_setup.py +++ b/spinn_front_end_common/interface/interface_functions/routing_setup.py @@ -43,7 +43,8 @@ def routing_setup() -> None: _set_router_diagnostic_filters(table.x, table.y, transceiver) -def _set_router_diagnostic_filters(x: int, y: int, transceiver: Transceiver): +def _set_router_diagnostic_filters( + x: int, y: int, transceiver: Transceiver) -> None: """ :param int x: :param int y: diff --git a/spinn_front_end_common/interface/interface_functions/routing_table_loader.py b/spinn_front_end_common/interface/interface_functions/routing_table_loader.py index 9a6dff7086..edb5ee2ed5 100644 --- a/spinn_front_end_common/interface/interface_functions/routing_table_loader.py +++ b/spinn_front_end_common/interface/interface_functions/routing_table_loader.py @@ -17,7 +17,7 @@ from spinn_front_end_common.data import FecDataView -def routing_table_loader(router_tables: MulticastRoutingTables): +def routing_table_loader(router_tables: MulticastRoutingTables) -> None: """ Loads routes into initialised routers. diff --git a/spinn_front_end_common/interface/interface_functions/spalloc_allocator.py b/spinn_front_end_common/interface/interface_functions/spalloc_allocator.py index 088a5bcd29..a8e81dd255 100644 --- a/spinn_front_end_common/interface/interface_functions/spalloc_allocator.py +++ b/spinn_front_end_common/interface/interface_functions/spalloc_allocator.py @@ -84,7 +84,7 @@ def job(self) -> SpallocJob: return self._job @overrides(MachineAllocationController.extend_allocation) - def extend_allocation(self, new_total_run_time: float): + def extend_allocation(self, new_total_run_time: float) -> None: # Does Nothing in this allocator - machines are held until exit pass @@ -193,7 +193,7 @@ def proxying(self) -> bool: return self.__use_proxy @overrides(MachineAllocationController.make_report) - def make_report(self, filename: str): + def make_report(self, filename: str) -> None: with open(filename, "w", encoding="utf-8") as report: report.write(f"Job: {self._job}") @@ -217,7 +217,7 @@ def __init__(self, job: Job, host: str): super().__init__("SpallocJobController", host) @overrides(MachineAllocationController.extend_allocation) - def extend_allocation(self, new_total_run_time: float): + def extend_allocation(self, new_total_run_time: float) -> None: # Does Nothing in this allocator - machines are held until exit pass @@ -233,7 +233,7 @@ def power(self) -> bool: """ return self._job.power - def set_power(self, power: bool): + def set_power(self, power: bool) -> None: """ :param bool power: """ @@ -388,27 +388,20 @@ def _allocate_job_old(spalloc_server: str, n_boards: int) -> Tuple[ :param int n_boards: The number of boards required :rtype: tuple(str, dict(tuple(int,int),str), MachineAllocationController) """ - host, port, user = parse_old_spalloc( + host, port, owner = parse_old_spalloc( spalloc_server, get_config_int("Machine", "spalloc_port"), get_config_str("Machine", "spalloc_user")) - spalloc_kwargs = { - 'hostname': host, - 'port': port, - 'owner': user - } - spalloc_machine = get_config_str_or_none("Machine", "spalloc_machine") - - if spalloc_machine is not None: - spalloc_kwargs['machine'] = spalloc_machine + machine = get_config_str_or_none("Machine", "spalloc_machine") job, hostname, scamp_connection_data = _launch_checked_job_old( - n_boards, spalloc_kwargs) + n_boards, host, port, owner, machine) machine_allocation_controller = _OldSpallocJobController(job, hostname) return (hostname, scamp_connection_data, machine_allocation_controller) -def _launch_checked_job_old(n_boards: int, spalloc_kwargs: dict) -> Tuple[ - Job, str, Dict[XY, str]]: +def _launch_checked_job_old( + n_boards: int, host: str, port: int, owner: str, + machine: Optional[str]) -> Tuple[Job, str, Dict[XY, str]]: """ :rtype: tuple(~.Job, str, dict(tuple(int,int),str)) """ @@ -417,7 +410,8 @@ def _launch_checked_job_old(n_boards: int, spalloc_kwargs: dict) -> Tuple[ avoid_jobs = [] try: while True: - job = Job(n_boards, **spalloc_kwargs) + job = Job(n_boards, hostname=host, port=port, owner=owner, + machine=machine) try: job.wait_until_ready() # get param from jobs before starting, so that hanging doesn't @@ -454,4 +448,5 @@ def _launch_checked_job_old(n_boards: int, spalloc_kwargs: dict) -> Tuple[ del connections[key] for avoid_job in avoid_jobs: avoid_job.destroy("Asked to avoid by cfg") + assert hostname is not None return job, hostname, connections diff --git a/spinn_front_end_common/interface/interface_functions/split_lpg_vertices.py b/spinn_front_end_common/interface/interface_functions/split_lpg_vertices.py index 2e241206ec..aa29a7d655 100644 --- a/spinn_front_end_common/interface/interface_functions/split_lpg_vertices.py +++ b/spinn_front_end_common/interface/interface_functions/split_lpg_vertices.py @@ -19,7 +19,7 @@ LivePacketGather, _LPGSplitter) -def split_lpg_vertices(system_placements: Placements): +def split_lpg_vertices(system_placements: Placements) -> None: """ Split any LPG vertices found. diff --git a/spinn_front_end_common/interface/interface_functions/system_multicast_routing_generator.py b/spinn_front_end_common/interface/interface_functions/system_multicast_routing_generator.py index bf76b8fb63..e92f0ddd2b 100644 --- a/spinn_front_end_common/interface/interface_functions/system_multicast_routing_generator.py +++ b/spinn_front_end_common/interface/interface_functions/system_multicast_routing_generator.py @@ -14,7 +14,7 @@ from collections import defaultdict import logging -from typing import Dict, Tuple, Set, Optional, cast +from typing import Dict, List, Tuple, Set, Optional, cast from spinn_utilities.log import FormatAdapter from spinn_utilities.typing.coords import XY @@ -174,7 +174,8 @@ def _logging_retry( return tree def _add_routing_entry( - self, chip: Chip, key: int, *, processor_id=None, link_ids=None): + self, chip: Chip, key: int, *, processor_id: Optional[int] = None, + link_ids: Optional[List[int]] = None) -> None: """ Adds a routing entry on this chip, creating the table if needed. @@ -203,8 +204,8 @@ def _add_routing_entry( routing_entry=routing_entry) table.add_multicast_routing_entry(entry) - def _add_routing_entries( - self, ethernet_chip: Chip, tree: Dict[Chip, Tuple[Chip, int]]): + def _add_routing_entries(self, ethernet_chip: Chip, + tree: Dict[Chip, Tuple[Chip, int]]) -> None: """ Adds the routing entries based on the tree. diff --git a/spinn_front_end_common/interface/java_caller.py b/spinn_front_end_common/interface/java_caller.py index 951d634e45..ad2f9fd264 100644 --- a/spinn_front_end_common/interface/java_caller.py +++ b/spinn_front_end_common/interface/java_caller.py @@ -187,7 +187,7 @@ def _machine_json(self) -> str: self._machine_json_path = write_json_machine(progress_bar=False) return self._machine_json_path - def set_placements(self, used_placements: Iterable[Placement]): + def set_placements(self, used_placements: Iterable[Placement]) -> None: """ Passes in the placements leaving this class to decide pass it to Java. @@ -218,7 +218,7 @@ def _placement_json(self) -> str: raise SpinnFrontEndException("placements not set") return self.__placement_json - def _json_placement(self, placement: Placement): + def _json_placement(self, placement: Placement) -> JsonObject: """ :param ~pacman.model.placements.Placement placement: :rtype: dict @@ -318,7 +318,7 @@ def _write_gather( "y": chip.y, "p": self._monitor_cores[chip]} if chip in by_chip: - json_placements = [ + json_placements: JsonArray = [ self._json_placement(placement) for placement in by_chip[chip]] if json_placements: @@ -355,7 +355,7 @@ def _write_placements( return path - def _run_java(self, *args: str): + def _run_java(self, *args: str) -> None: """ Does the actual running of `JavaSpiNNaker`. Arguments are those that will be processed by the `main` method on the Java side. diff --git a/spinn_front_end_common/interface/profiling/profile_data.py b/spinn_front_end_common/interface/profiling/profile_data.py index 4d4a818895..b1212fa936 100644 --- a/spinn_front_end_common/interface/profiling/profile_data.py +++ b/spinn_front_end_common/interface/profiling/profile_data.py @@ -58,7 +58,7 @@ def __init__(self, tag_labels: Mapping[int, str]): self._tags: Dict[str, Tuple[numpy.ndarray, numpy.ndarray]] = dict() self._max_time: float = 0.0 - def add_data(self, data: bytes): + def add_data(self, data: bytes) -> None: """ Add profiling data read from the profile section. @@ -96,9 +96,9 @@ def add_data(self, data: bytes): self._add_tag_data( entry_tags, entry_times_ms, exit_tags, exit_times_ms, tag) - def _add_tag_data( - self, entry_tags: numpy.ndarray, entry_times: numpy.ndarray, - exit_tags: numpy.ndarray, exit_times: numpy.ndarray, tag: int): + def _add_tag_data(self, entry_tags: numpy.ndarray, + entry_times: numpy.ndarray, exit_tags: numpy.ndarray, + exit_times: numpy.ndarray, tag: int) -> None: """ :param ~numpy.ndarray entry_tags: :param ~numpy.ndarray entry_times: diff --git a/spinn_front_end_common/interface/profiling/profile_utils.py b/spinn_front_end_common/interface/profiling/profile_utils.py index 21ebe35801..255f8cd8da 100644 --- a/spinn_front_end_common/interface/profiling/profile_utils.py +++ b/spinn_front_end_common/interface/profiling/profile_utils.py @@ -41,7 +41,7 @@ def get_profile_region_size(n_samples: int) -> int: def reserve_profile_region( - spec: DataSpecificationGenerator, region: int, n_samples: int): + spec: DataSpecificationGenerator, region: int, n_samples: int) -> None: """ Reserves the profile region for recording the profile data. @@ -56,7 +56,7 @@ def reserve_profile_region( def write_profile_region_data( - spec: DataSpecificationGenerator, region: int, n_samples: int): + spec: DataSpecificationGenerator, region: int, n_samples: int) -> None: """ Writes the profile region data. diff --git a/spinn_front_end_common/interface/provenance/abstract_provides_provenance_data_from_machine.py b/spinn_front_end_common/interface/provenance/abstract_provides_provenance_data_from_machine.py index 490d61f049..35acb44418 100644 --- a/spinn_front_end_common/interface/provenance/abstract_provides_provenance_data_from_machine.py +++ b/spinn_front_end_common/interface/provenance/abstract_provides_provenance_data_from_machine.py @@ -28,7 +28,7 @@ class AbstractProvidesProvenanceDataFromMachine( __slots__ = () @abstractmethod - def get_provenance_data_from_machine(self, placement: Placement): + def get_provenance_data_from_machine(self, placement: Placement) -> None: """ Get provenance data items for a placement and store them in the provenance DB. diff --git a/spinn_front_end_common/interface/provenance/fec_timer.py b/spinn_front_end_common/interface/provenance/fec_timer.py index d18821298b..e51ff0f887 100644 --- a/spinn_front_end_common/interface/provenance/fec_timer.py +++ b/spinn_front_end_common/interface/provenance/fec_timer.py @@ -17,7 +17,9 @@ import os import time from datetime import timedelta -from typing import List, Optional, Tuple, Union, TYPE_CHECKING +from typing import List, Optional, Tuple, Type, Union, TYPE_CHECKING +from types import TracebackType + from typing_extensions import Literal, Self from spinn_utilities.config_holder import (get_config_bool) from spinn_utilities.log import FormatAdapter @@ -60,7 +62,7 @@ class FecTimer(object): APPLICATION_RUNNER = "Application runner" @classmethod - def setup(cls, simulator: AbstractSpinnakerBase): + def setup(cls, simulator: AbstractSpinnakerBase) -> None: """ Checks and saves cfg values so they don't have to be read each time @@ -86,7 +88,7 @@ def __enter__(self) -> Self: self._start_time = time.perf_counter_ns() return self - def _report(self, message: str): + def _report(self, message: str) -> None: if self._provenance_path is not None: with open(self._provenance_path, "a", encoding="utf-8") as p_file: p_file.write(f"{message}\n") @@ -94,14 +96,14 @@ def _report(self, message: str): logger.info(message) def _insert_timing( - self, time_taken: timedelta, skip_reason: Optional[str]): + self, time_taken: timedelta, skip_reason: Optional[str]) -> None: if self._category_id is not None: with GlobalProvenance() as db: db.insert_timing( self._category_id, self._algorithm, self._work, time_taken, skip_reason) - def skip(self, reason: str): + def skip(self, reason: str) -> None: """ Records that the algorithms is being skipped and ends the timer. @@ -254,7 +256,7 @@ def skip_all_cfgs_false( self.skip(reason) return True - def error(self, reason: str): + def error(self, reason: str) -> None: """ Ends an algorithm timing and records that it failed. @@ -288,7 +290,9 @@ def __convert_to_timedelta(time_diff: int) -> timedelta: """ return timedelta(microseconds=time_diff / _NANO_TO_MICRO) - def __exit__(self, exc_type, exc_value, traceback) -> Literal[False]: + def __exit__(self, exc_type: Optional[Type], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Literal[False]: if self._start_time is None: return False time_taken = self._stop_timer() @@ -325,7 +329,7 @@ def __stop_category(cls) -> int: return time_now @classmethod - def _change_category(cls, category: TimerCategory): + def _change_category(cls, category: TimerCategory) -> None: """ This method should only be called via the View! @@ -338,7 +342,8 @@ def _change_category(cls, category: TimerCategory): cls._category_time = time_now @classmethod - def start_category(cls, category: TimerCategory, machine_on=None): + def start_category(cls, category: TimerCategory, + machine_on: Optional[bool] = None) -> None: """ This method should only be called via the View! @@ -355,7 +360,7 @@ def start_category(cls, category: TimerCategory, machine_on=None): cls._machine_on = machine_on @classmethod - def end_category(cls, category: TimerCategory): + def end_category(cls, category: TimerCategory) -> None: """ This method should only be called via the View! diff --git a/spinn_front_end_common/interface/provenance/global_provenance.py b/spinn_front_end_common/interface/provenance/global_provenance.py index 95fcf0ea82..78a6d6cbfb 100644 --- a/spinn_front_end_common/interface/provenance/global_provenance.py +++ b/spinn_front_end_common/interface/provenance/global_provenance.py @@ -86,21 +86,22 @@ def __init__( SQLiteDB.__init__(self, database_file, ddl_file=_DDL_FILE, row_factory=None, text_factory=None) - def insert_version(self, description: str, the_value: str): + def insert_version(self, description: str, the_value: str) -> None: """ Inserts data into the version_provenance table :param str description: The package for which the version applies :param str the_value: The version to be recorded """ - self.execute( + self.cursor().execute( """ INSERT INTO version_provenance( description, the_value) VALUES(?, ?) """, [description, the_value]) - def insert_category(self, category: TimerCategory, machine_on: bool): + def insert_category( + self, category: TimerCategory, machine_on: bool) -> int: """ Inserts category into the category_timer_provenance returning id @@ -108,7 +109,7 @@ def insert_category(self, category: TimerCategory, machine_on: bool): :param bool machine_on: If the machine was done during all or some of the time """ - self.execute( + self.cursor().execute( """ INSERT INTO category_timer_provenance( category, machine_on, n_run, n_loop) @@ -119,7 +120,8 @@ def insert_category(self, category: TimerCategory, machine_on: bool): FecDataView.get_run_step()]) return self.lastrowid - def insert_category_timing(self, category_id: int, delta: timedelta): + def insert_category_timing( + self, category_id: int, delta: timedelta) -> None: """ Inserts run time into the category @@ -130,7 +132,7 @@ def insert_category_timing(self, category_id: int, delta: timedelta): (delta.seconds * MICRO_TO_MILLISECOND_CONVERSION) + (delta.microseconds / MICRO_TO_MILLISECOND_CONVERSION)) - self.execute( + self.cursor().execute( """ UPDATE category_timer_provenance SET @@ -140,7 +142,7 @@ def insert_category_timing(self, category_id: int, delta: timedelta): def insert_timing( self, category: int, algorithm: str, work: TimerWork, - delta: timedelta, skip_reason: Optional[str]): + delta: timedelta, skip_reason: Optional[str]) -> None: """ Inserts algorithms run times into the timer_provenance table @@ -155,7 +157,7 @@ def insert_timing( time_taken = ( (delta.seconds * MICRO_TO_MILLISECOND_CONVERSION) + (delta.microseconds / MICRO_TO_MILLISECOND_CONVERSION)) - self.execute( + self.cursor().execute( """ INSERT INTO timer_provenance( category_id, algorithm, work, time_taken, skip_reason) @@ -164,7 +166,7 @@ def insert_timing( [category, algorithm, work.work_name, time_taken, skip_reason]) def store_log(self, level: int, message: str, - timestamp: Optional[datetime] = None): + timestamp: Optional[datetime] = None) -> None: """ Stores log messages into the database @@ -173,7 +175,7 @@ def store_log(self, level: int, message: str, """ if timestamp is None: timestamp = datetime.now() - self.execute( + self.cursor().execute( """ INSERT INTO p_log_provenance( timestamp, level, message) @@ -181,14 +183,14 @@ def store_log(self, level: int, message: str, """, [timestamp, level, message]) - def _test_log_locked(self, text: str): + def _test_log_locked(self, text: str) -> None: """ THIS IS A TESTING METHOD. This will lock the database and then try to do a log """ # lock the database - self.execute( + self.cursor().execute( """ INSERT INTO version_provenance( description, the_value) @@ -228,7 +230,7 @@ def run_query(self, query: str, :rtype: list(tuple or ~sqlite3.Row) """ results = [] - for row in self.execute(query, list(params)): + for row in self.cursor().execute(query, list(params)): results.append(row) return results diff --git a/spinn_front_end_common/interface/provenance/log_store_db.py b/spinn_front_end_common/interface/provenance/log_store_db.py index 5300d117d7..ddef7ac933 100644 --- a/spinn_front_end_common/interface/provenance/log_store_db.py +++ b/spinn_front_end_common/interface/provenance/log_store_db.py @@ -29,7 +29,7 @@ class LogStoreDB(LogStore): @overrides(LogStore.store_log) def store_log( self, level: int, message: str, - timestamp: Optional[datetime] = None): + timestamp: Optional[datetime] = None) -> None: try: with GlobalProvenance() as db: db.store_log(level, message, timestamp) diff --git a/spinn_front_end_common/interface/provenance/provenance_reader.py b/spinn_front_end_common/interface/provenance/provenance_reader.py index 852548fe03..187d33aa9f 100644 --- a/spinn_front_end_common/interface/provenance/provenance_reader.py +++ b/spinn_front_end_common/interface/provenance/provenance_reader.py @@ -13,7 +13,7 @@ # limitations under the License. import os -from typing import Iterable, List, Optional, Sequence, Tuple, Union, cast +from typing import Iterable, List, Optional, Sequence, Tuple, cast from typing_extensions import TypeAlias from spinn_utilities.typing.coords import XYP from spinn_front_end_common.data import FecDataView @@ -23,7 +23,7 @@ #: Basic types supported natively by SQLite _MonitorItem: TypeAlias = Tuple[int, int, _SqliteTypes] -_RouterItem: TypeAlias = Tuple[int, int, Union[int, float]] +_RouterItem: TypeAlias = Tuple[int, int, int] class ProvenanceReader(BaseDatabase): @@ -105,7 +105,7 @@ def run_query(self, query: str, params: Iterable[_SqliteTypes] = () statement :rtype: list(tuple or ~sqlite3.Row) """ - return list(self.execute(query, list(params))) + return list(self.cursor().execute(query, list(params))) def cores_with_late_spikes(self) -> List[Tuple[int, int, int, int]]: """ diff --git a/spinn_front_end_common/interface/provenance/provenance_writer.py b/spinn_front_end_common/interface/provenance/provenance_writer.py index 21be6adc7b..528b85d0da 100644 --- a/spinn_front_end_common/interface/provenance/provenance_writer.py +++ b/spinn_front_end_common/interface/provenance/provenance_writer.py @@ -13,7 +13,7 @@ # limitations under the License. import logging -from typing import Dict, Optional, Tuple, Union +from typing import Dict, Optional, Tuple from spinn_utilities.config_holder import ( get_config_int_or_none, get_config_bool) from spinn_utilities.log import FormatAdapter @@ -52,7 +52,7 @@ def __init__(self, database_file: Optional[str] = None): """ super().__init__(database_file) - def insert_power(self, description: str, the_value: _SqliteTypes): + def insert_power(self, description: str, the_value: _SqliteTypes) -> None: """ Inserts a general power value into the `power_provenance` table. @@ -61,7 +61,7 @@ def insert_power(self, description: str, the_value: _SqliteTypes): """ if not get_config_bool("Reports", "write_provenance"): return - self.execute( + self.cursor().execute( """ INSERT INTO power_provenance( description, the_value) @@ -70,7 +70,7 @@ def insert_power(self, description: str, the_value: _SqliteTypes): def insert_gatherer( self, x: int, y: int, address: int, bytes_read: int, run: int, - description: str, the_value: _SqliteTypes): + description: str, the_value: _SqliteTypes) -> None: """ Records provenance into the `gatherer_provenance` table. @@ -84,15 +84,15 @@ def insert_gatherer( """ if not get_config_bool("Reports", "write_provenance"): return - self.execute( + self.cursor().execute( """ INSERT INTO gatherer_provenance( x, y, address, bytes, run, description, the_value) VALUES(?, ?, ?, ?, ?, ?, ?) """, [x, y, address, bytes_read, run, description, the_value]) - def insert_monitor( - self, x: int, y: int, description: str, the_value: _SqliteTypes): + def insert_monitor(self, x: int, y: int, description: str, + the_value: _SqliteTypes) -> None: """ Inserts data into the `monitor_provenance` table. @@ -106,8 +106,8 @@ def insert_monitor( if get_config_bool("Reports", "write_provenance"): self.insert_monitor_value(x, y, description, the_value) - def insert_monitor_value( - self, x: int, y: int, description: str, the_value: _SqliteTypes): + def insert_monitor_value(self, x: int, y: int, description: str, + the_value: _SqliteTypes) -> None: """ Inserts data into the `monitor_provenance` table. @@ -116,9 +116,9 @@ def insert_monitor_value( :param int x: X coordinate of the chip :param int y: Y coordinate of the chip :param str description: type of value - :param int the_value: data + :param the_value: data """ - self.execute( + self.cursor().execute( """ INSERT INTO monitor_provenance( x, y, description, the_value) @@ -127,8 +127,8 @@ def insert_monitor_value( def insert_router( self, x: int, y: int, description: str, - the_value: Union[int, float], - expected: bool = True): + the_value: int, + expected: bool = True) -> None: """ Inserts data into the `router_provenance` table. @@ -141,7 +141,7 @@ def insert_router( """ if not get_config_bool("Reports", "write_provenance"): return - self.execute( + self.cursor().execute( """ INSERT INTO router_provenance( x, y, description, the_value, expected) @@ -150,7 +150,7 @@ def insert_router( def insert_core( self, x: int, y: int, p: int, description: str, - the_value: _SqliteTypes): + the_value: _SqliteTypes) -> None: """ Inserts data for a specific core into the `core_provenance` table. @@ -163,14 +163,14 @@ def insert_core( if not get_config_bool("Reports", "write_provenance"): return core_id = self._get_core_id(x, y, p) - self.execute( + self.cursor().execute( """ INSERT INTO core_provenance( core_id, description, the_value) VALUES(?, ?, ?) """, [core_id, description, the_value]) - def insert_report(self, message: str): + def insert_report(self, message: str) -> None: """ Save and if applicable logs a message to the `reports` table. @@ -182,7 +182,7 @@ def insert_report(self, message: str): if not get_config_bool("Reports", "write_provenance"): logger.warning(message) return - self.execute( + self.cursor().execute( """ INSERT INTO reports(message) VALUES(?) @@ -199,7 +199,7 @@ def insert_report(self, message: str): def insert_connector( self, pre_population: str, post_population: str, the_type: str, - description: str, the_value: _SqliteTypes): + description: str, the_value: _SqliteTypes) -> None: """ Inserts edge data into the `connector_provenance` @@ -211,7 +211,7 @@ def insert_connector( """ if not get_config_bool("Reports", "write_provenance"): return - self.execute( + self.cursor().execute( """ INSERT OR IGNORE INTO connector_provenance( pre_population, post_population, the_type, description, @@ -222,7 +222,7 @@ def insert_connector( the_value]) def insert_board_provenance(self, connections: Optional[ - Dict[Tuple[int, int], str]]): + Dict[Tuple[int, int], str]]) -> None: """ Write the connection details retrieved from spalloc_client job to the `boards_provenance` table. @@ -234,7 +234,7 @@ def insert_board_provenance(self, connections: Optional[ return if not connections: return - self.executemany( + self.cursor().executemany( """ INSERT OR IGNORE INTO boards_provenance( ethernet_x, ethernet_y, ip_addres) @@ -242,14 +242,14 @@ def insert_board_provenance(self, connections: Optional[ """, ((x, y, ipaddress) for ((x, y), ipaddress) in connections.items())) - def _test_log_locked(self, text): + def _test_log_locked(self, text: str) -> None: """ THIS IS A TESTING METHOD. This will lock the database and then try to do a log """ # lock the database - self.execute( + self.cursor().execute( """ INSERT INTO reports(message) VALUES(?) diff --git a/spinn_front_end_common/interface/provenance/provides_provenance_data_from_machine_impl.py b/spinn_front_end_common/interface/provenance/provides_provenance_data_from_machine_impl.py index 26db12b980..fc67eb4130 100644 --- a/spinn_front_end_common/interface/provenance/provides_provenance_data_from_machine_impl.py +++ b/spinn_front_end_common/interface/provenance/provides_provenance_data_from_machine_impl.py @@ -24,6 +24,7 @@ from spinn_front_end_common.utilities.helpful_functions import ( get_region_base_address_offset) from spinn_front_end_common.data import FecDataView +from spinn_front_end_common.interface.ds import DataSpecificationGenerator from spinn_front_end_common.utilities.constants import BYTES_PER_WORD from spinn_front_end_common.utilities.helpful_functions import n_word_struct @@ -74,7 +75,8 @@ def _n_additional_data_items(self) -> int: """ raise NotImplementedError - def reserve_provenance_data_region(self, spec) -> None: + def reserve_provenance_data_region( + self, spec: DataSpecificationGenerator) -> None: """ :param ~data_specification.DataSpecificationGenerator spec: The data specification being written. @@ -142,7 +144,7 @@ def _get_provenance_placement_description( def parse_system_provenance_items( self, label: str, x: int, y: int, p: int, - provenance_data: Sequence[int]): + provenance_data: Sequence[int]) -> None: """ Given some words of provenance data, convert the portion of them that describes the system provenance into proper provenance items. @@ -246,7 +248,7 @@ def _get_extra_provenance_words( def parse_extra_provenance_items( self, label: str, x: int, y: int, p: int, - provenance_data: Sequence[int]): + provenance_data: Sequence[int]) -> None: # pylint: disable=unused-argument """ Convert the remaining provenance words (those not in the standard set) @@ -273,7 +275,7 @@ def parse_extra_provenance_items( AbstractProvidesProvenanceDataFromMachine. get_provenance_data_from_machine, extend_doc=False) - def get_provenance_data_from_machine(self, placement: Placement): + def get_provenance_data_from_machine(self, placement: Placement) -> None: """ Retrieve the provenance data. diff --git a/spinn_front_end_common/interface/provenance/router_prov_mapper.py b/spinn_front_end_common/interface/provenance/router_prov_mapper.py index b2cb6dfc70..f029625d32 100644 --- a/spinn_front_end_common/interface/provenance/router_prov_mapper.py +++ b/spinn_front_end_common/interface/provenance/router_prov_mapper.py @@ -15,10 +15,11 @@ import argparse import os # pylint: disable=no-name-in-module -from typing import ( - Any, ContextManager, FrozenSet, Iterable, List, Optional, Tuple, cast) import sqlite3 -from types import ModuleType +from types import ModuleType, TracebackType +from typing import ( + Any, ContextManager, FrozenSet, Iterable, List, Optional, Tuple, Type, + cast) import numpy from typing_extensions import Literal @@ -70,14 +71,16 @@ def __init__(self, db_filename: str, verbose: bool = False): def __enter__(self) -> SQLiteDB: return self._db.__enter__() - def __exit__(self, *args) -> Literal[False]: - return self._db.__exit__(*args) + def __exit__(self, exc_type: Optional[Type], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Literal[False]: + return self._db.__exit__(exc_type, exc_val, exc_tb) def __do_chip_query(self, description: str) -> Iterable[sqlite3.Row]: # Does the query in one of two ways, depending on schema version if self.__have_insertion_order: try: - return self._db.execute(""" + return self._db.cursor().execute(""" SELECT source_name AS "source", x, y, description_name AS "description", the_value AS "value" @@ -90,7 +93,7 @@ def __do_chip_query(self, description: str) -> Iterable[sqlite3.Row]: if "no such column: insertion_order" != str(e): raise self.__have_insertion_order = False - return self._db.execute(""" + return self._db.cursor().execute(""" SELECT source_name AS "source", x, y, description_name AS "description", MAX(the_value) AS "value" @@ -110,8 +113,8 @@ def get_per_chip_prov_types(self) -> FrozenSet[str]: FROM provenance_view WHERE x IS NOT NULL AND p IS NULL AND "description" IS NOT NULL """ - return frozenset(row["description"] for row in self._db.execute( - query)) + return frozenset(row["description"] + for row in self._db.cursor().execute(query)) def get_per_chip_prov_details(self, info: str) -> Tuple[ str, int, int, numpy.ndarray]: @@ -147,7 +150,7 @@ def __do_sum_query(self, description: str) -> Iterable[sqlite3.Row]: # Does the query in one of two ways, depending on schema version if self.__have_insertion_order: try: - return self._db.execute(""" + return self._db.cursor().execute(""" SELECT "source", x, y, "description", SUM("value") AS "value" FROM ( @@ -164,7 +167,7 @@ def __do_sum_query(self, description: str) -> Iterable[sqlite3.Row]: if "no such column: insertion_order" != str(e): raise self.__have_insertion_order = False - return self._db.execute(""" + return self._db.cursor().execute(""" SELECT "source", x, y, "description", SUM("value") AS "value" FROM ( @@ -190,7 +193,8 @@ def get_per_core_prov_types(self) -> FrozenSet[str]: AND "description" IS NOT NULL """ return frozenset( - cast(str, row["description"]) for row in self._db.execute(query)) + cast(str, row["description"]) + for row in self._db.cursor().execute(query)) def get_sum_chip_prov_details(self, info: str) -> Tuple[ str, int, int, numpy.ndarray]: @@ -234,7 +238,7 @@ def __plotter_apis(cls) -> Tuple[ModuleType, ModuleType]: "matplotlib and seaborn to plot router provenance") return cls.__pyplot, cls.__seaborn - def plot_per_core_data(self, key: str, output_filename: str): + def plot_per_core_data(self, key: str, output_filename: str) -> None: """ Plots the metadata for this key/term to the file at a core level @@ -258,7 +262,7 @@ def plot_per_core_data(self, key: str, output_filename: str): plot.savefig(output_filename, bbox_inches='tight') plot.close() - def plot_per_chip_data(self, key: str, output_filename: str): + def plot_per_chip_data(self, key: str, output_filename: str) -> None: """ Plots the metadata for this key/term to the file at a chip level diff --git a/spinn_front_end_common/interface/provenance/timer_category.py b/spinn_front_end_common/interface/provenance/timer_category.py index ede03439bf..0ad0fcfdf1 100644 --- a/spinn_front_end_common/interface/provenance/timer_category.py +++ b/spinn_front_end_common/interface/provenance/timer_category.py @@ -31,12 +31,12 @@ class TimerCategory(Enum): RESETTING = (auto(), "Resetting") SHUTTING_DOWN = (auto(), "Shutting down") - def __new__(cls, *args) -> 'TimerCategory': + def __new__(cls, value: int, __: str) -> 'TimerCategory': obj = object.__new__(cls) - obj._value_ = args[0] + obj._value_ = value return obj - def __init__(self, __, category_name: str): + def __init__(self, __: int, category_name: str) -> None: self._category_name = category_name @property diff --git a/spinn_front_end_common/interface/provenance/timer_work.py b/spinn_front_end_common/interface/provenance/timer_work.py index cc9cde03b6..62911ff845 100644 --- a/spinn_front_end_common/interface/provenance/timer_work.py +++ b/spinn_front_end_common/interface/provenance/timer_work.py @@ -33,12 +33,12 @@ class TimerWork(Enum): EXTRACT_DATA = (auto(), "Extracting Data") REPORT = (auto(), "Reporting") - def __new__(cls, *args) -> 'TimerWork': + def __new__(cls, value: int, __: str) -> 'TimerWork': obj = object.__new__(cls) - obj._value_ = args[0] + obj._value_ = value return obj - def __init__(self, __, work_name: str): + def __init__(self, __: int, work_name: str): self._work_name = work_name @property diff --git a/spinn_front_end_common/interface/splitter_selectors/splitter_selector.py b/spinn_front_end_common/interface/splitter_selectors/splitter_selector.py index c2354e8672..97323b0e40 100644 --- a/spinn_front_end_common/interface/splitter_selectors/splitter_selector.py +++ b/spinn_front_end_common/interface/splitter_selectors/splitter_selector.py @@ -35,7 +35,7 @@ def splitter_selector() -> None: vertex_selector(app_vertex) -def vertex_selector(app_vertex: ApplicationVertex): +def vertex_selector(app_vertex: ApplicationVertex) -> None: """ Main point for selecting a splitter object for a given app vertex. diff --git a/spinn_front_end_common/utilities/base_database.py b/spinn_front_end_common/utilities/base_database.py index f0d6136acd..36c264c517 100644 --- a/spinn_front_end_common/utilities/base_database.py +++ b/spinn_front_end_common/utilities/base_database.py @@ -26,7 +26,7 @@ _SqliteTypes: TypeAlias = Union[str, int, float, bytes, None] -def _timestamp(): +def _timestamp() -> int: return int(time.time() * _SECONDS_TO_MICRO_SECONDS_CONVERSION) @@ -86,14 +86,14 @@ def _get_core_id( :param int p: :rtype: int """ - for row in self.execute( + for row in self.cursor().execute( """ SELECT core_id FROM core WHERE x = ? AND y = ? AND processor = ? LIMIT 1 """, (x, y, p)): return row["core_id"] - self.execute( + self.cursor().execute( """ INSERT INTO core(x, y, processor) VALUES(?, ?, ?) """, (x, y, p)) diff --git a/spinn_front_end_common/utilities/connections/live_event_connection.py b/spinn_front_end_common/utilities/connections/live_event_connection.py index 9820548146..45bf13f8dd 100644 --- a/spinn_front_end_common/utilities/connections/live_event_connection.py +++ b/spinn_front_end_common/utilities/connections/live_event_connection.py @@ -20,9 +20,12 @@ from typing import ( Callable, Dict, Iterable, List, Optional, Set, Tuple, Union, cast) + from typing_extensions import TypeGuard + from spinn_utilities.log import FormatAdapter from spinn_utilities.logger_utils import warn_once + from spinnman.messages.eieio.data_messages import ( EIEIODataMessage, KeyPayloadDataElement, KeyDataElement) from spinnman.messages.eieio import EIEIOType, AbstractEIEIOMessage @@ -37,6 +40,7 @@ from spinnman.messages.eieio import ( read_eieio_command_message, read_eieio_data_message) from spinnman.spalloc import SpallocEIEIOConnection, SpallocEIEIOListener + from spinn_front_end_common.utilities.constants import NOTIFY_PORT from spinn_front_end_common.utilities.database import ( DatabaseConnection, DatabaseReader) @@ -193,7 +197,7 @@ def __init__(self, live_packet_gather_label: Optional[str], self.__expect_scp_response_lock = Condition() self.__scp_response_received: Optional[bytes] = None - def add_send_label(self, label: str): + def add_send_label(self, label: str) -> None: """ Adds a send label. @@ -208,7 +212,7 @@ def add_send_label(self, label: str): self.__pause_stop_callbacks[label] = list() self.__init_callbacks[label] = list() - def add_receive_label(self, label: str): + def add_receive_label(self, label: str) -> None: """ Adds a receive label is possible. @@ -229,7 +233,8 @@ def add_receive_label(self, label: str): self.__pause_stop_callbacks[label] = list() self.__init_callbacks[label] = list() - def add_init_callback(self, label: str, init_callback: _InitCallback): + def add_init_callback( + self, label: str, init_callback: _InitCallback) -> None: """ Add a callback to be called to initialise a vertex. @@ -247,7 +252,7 @@ def add_init_callback(self, label: str, init_callback: _InitCallback): def add_receive_callback( self, label: str, live_event_callback: _RcvTimeCallback, - translate_key: bool = True): + translate_key: bool = True) -> None: """ Add a callback for the reception of time events from a vertex. @@ -280,7 +285,7 @@ def add_receive_callback( def add_receive_no_time_callback( self, label: str, live_event_callback: _RcvCallback, - translate_key: bool = True): + translate_key: bool = True) -> None: """ Add a callback for the reception of live events from a vertex. @@ -299,7 +304,8 @@ def add_receive_no_time_callback( self.__no_time_event_callbacks[label_id].append( (live_event_callback, translate_key)) - def add_start_callback(self, label: str, start_callback: _Callback): + def add_start_callback( + self, label: str, start_callback: _Callback) -> None: """ Add a callback for the start of the simulation. @@ -318,7 +324,7 @@ def add_start_callback(self, label: str, start_callback: _Callback): self.add_start_resume_callback(label, start_callback) def add_start_resume_callback( - self, label: str, start_resume_callback: _Callback): + self, label: str, start_resume_callback: _Callback) -> None: """ Add a callback for the start and resume state of the simulation. @@ -332,7 +338,7 @@ def add_start_resume_callback( self.__start_resume_callbacks[label].append(start_resume_callback) def add_pause_stop_callback( - self, label: str, pause_stop_callback: _Callback): + self, label: str, pause_stop_callback: _Callback) -> None: """ Add a callback for the pause and stop state of the simulation. @@ -345,7 +351,7 @@ def add_pause_stop_callback( """ self.__pause_stop_callbacks[label].append(pause_stop_callback) - def __read_database_callback(self, db_reader: DatabaseReader): + def __read_database_callback(self, db_reader: DatabaseReader) -> None: """ :param DatabaseReader db_reader: """ @@ -370,8 +376,8 @@ def __read_database_callback(self, db_reader: DatabaseReader): init_callback( label, vertex_size, run_time_ms, machine_timestep / 1000.0) - def __init_sender( - self, database: DatabaseReader, vertex_sizes: Dict[str, int]): + def __init_sender(self, database: DatabaseReader, + vertex_sizes: Dict[str, int]) -> None: """ :param DatabaseReader database: :param dict(str,int) vertex_sizes: @@ -391,8 +397,8 @@ def __init_sender( database.get_atom_id_to_key_mapping(label) vertex_sizes[label] = len(self._atom_id_to_key[label]) - def __init_receivers( - self, database: DatabaseReader, vertex_sizes: Dict[str, int]): + def __init_receivers(self, database: DatabaseReader, + vertex_sizes: Dict[str, int]) -> None: """ :param DatabaseReader database: :param dict(str,int) vertex_sizes: @@ -481,7 +487,8 @@ def __handle_possible_rerun_state(self) -> None: self.__receiver_connection.close() self.__receiver_connection = None - def __launch_thread(self, kind: str, label: str, callback: _Callback): + def __launch_thread( + self, kind: str, label: str, callback: _Callback) -> None: thread = Thread( target=callback, args=(label, self), name=(f"{kind} callback thread for live_event_connection " @@ -555,7 +562,7 @@ def __handle_scp_packet(self, data: bytes) -> bool: return True return False - def __do_receive_packet(self, data: bytes): + def __do_receive_packet(self, data: bytes) -> None: if self.__handle_scp_packet(data): return @@ -563,7 +570,8 @@ def __do_receive_packet(self, data: bytes): try: header = _ONE_SHORT.unpack_from(data)[0] if header & 0xC000 == 0x4000: - return read_eieio_command_message(data, 0) + read_eieio_command_message(data, 0) + return packet: EIEIODataMessage = read_eieio_data_message(data, 0) if packet.eieio_header.is_time: self.__handle_time_packet(packet) @@ -579,7 +587,7 @@ def __rcv_label(self, label_id: int) -> str: raise ConfigurationException("no receive labels defined") return self.__receive_labels[label_id] - def __handle_time_packet(self, packet: EIEIODataMessage): + def __handle_time_packet(self, packet: EIEIODataMessage) -> None: key_times_labels: Dict[int, Dict[int, List[int]]] = defaultdict( lambda: defaultdict(list)) atoms_times_labels: Dict[int, Dict[int, List[int]]] = defaultdict( @@ -613,7 +621,7 @@ def __handle_time_packet(self, packet: EIEIODataMessage): else: c_back(label, time, key_times_labels[time][label_id]) - def __handle_no_time_packet(self, packet: EIEIODataMessage): + def __handle_no_time_packet(self, packet: EIEIODataMessage) -> None: while packet.is_next_element: element = packet.next_element if not isinstance(element, ( @@ -643,13 +651,13 @@ def __handle_no_time_packet(self, packet: EIEIODataMessage): else: self.__handle_unknown_key(key) - def __handle_unknown_key(self, key: int): + def __handle_unknown_key(self, key: int) -> None: if key not in self.__error_keys: self.__error_keys.add(key) logger.warning("Received unexpected key {}", key) def send_event(self, label: str, atom_id: int, - send_full_keys: bool = False): + send_full_keys: bool = False) -> None: """ Send an event from a single atom. @@ -664,7 +672,7 @@ def send_event(self, label: str, atom_id: int, self.send_events(label, [atom_id], send_full_keys) def send_events(self, label: str, atom_ids: List[int], - send_full_keys: bool = False): + send_full_keys: bool = False) -> None: """ Send a number of events. @@ -703,7 +711,7 @@ def send_events(self, label: str, atom_ids: List[int], sleep(0.1) def send_event_with_payload( - self, label: str, atom_id: int, payload: int): + self, label: str, atom_id: int, payload: int) -> None: """ Send an event with a payload from a single atom. @@ -716,7 +724,7 @@ def send_event_with_payload( def send_events_with_payloads( self, label: str, - atom_ids_and_payloads: List[Tuple[int, int]]): + atom_ids_and_payloads: List[Tuple[int, int]]) -> None: """ Send a number of events with payloads. @@ -747,7 +755,7 @@ def send_events_with_payloads( sleep(0.1) def send_eieio_message( - self, message: AbstractEIEIOMessage, label: str): + self, message: AbstractEIEIOMessage, label: str) -> None: """ Send an EIEIO message (using one-way the live input) to the vertex with the given label. @@ -763,7 +771,7 @@ def send_eieio_message( self._send(message, x, y, p, ip_address) def _send(self, message: AbstractEIEIOMessage, x: int, y: int, p: int, - ip_address: str): + ip_address: str) -> None: """ Send an EIEIO message to a particular core. diff --git a/spinn_front_end_common/utilities/database/database_connection.py b/spinn_front_end_common/utilities/database/database_connection.py index 55294efa53..41daf6da0b 100644 --- a/spinn_front_end_common/utilities/database/database_connection.py +++ b/spinn_front_end_common/utilities/database/database_connection.py @@ -85,7 +85,8 @@ def __init__( thread.daemon = True thread.start() - def add_database_callback(self, database_callback_function: _DBCB): + def add_database_callback( + self, database_callback_function: _DBCB) -> None: """ Add a database callback to be called when the database is ready. @@ -114,7 +115,7 @@ def __run(self) -> None: finally: self.__running = False - def __process_run_cycle(self, timeout: float): + def __process_run_cycle(self, timeout: float) -> None: """ Heart of :py:meth:`__run`. """ @@ -133,7 +134,8 @@ def __process_run_cycle(self, timeout: float): if self.__pause_and_stop_callback is not None: self.__pause_stop() - def __read_db(self, toolchain_address: Tuple[str, int], data: bytes): + def __read_db( + self, toolchain_address: Tuple[str, int], data: bytes) -> None: # Read the read packet confirmation logger.info("{}:{} Reading database", self.local_ip_address, self.local_port) @@ -181,7 +183,7 @@ def __pause_stop(self) -> None: self.__pause_and_stop_callback() def __send_command( - self, command: CMDS, toolchain_address: Tuple[str, int]): + self, command: CMDS, toolchain_address: Tuple[str, int]) -> None: self.send_to(EIEIOCommandHeader(command.value).bytestring, toolchain_address) diff --git a/spinn_front_end_common/utilities/database/database_reader.py b/spinn_front_end_common/utilities/database/database_reader.py index cf4a6ebfa3..22cd2ac4ae 100644 --- a/spinn_front_end_common/utilities/database/database_reader.py +++ b/spinn_front_end_common/utilities/database/database_reader.py @@ -30,14 +30,6 @@ def __init__(self, database_path: str): self.__job: Optional[SpallocJob] = None self.__looked_for_job = False - def __exec_one(self, query, *args): - self.execute(query + " LIMIT 1", args) - return self.fetchone() - - @staticmethod - def __r2t(row, *args): - return tuple(None if row is None else row[key] for key in args) - def get_job(self) -> Optional[SpallocJob]: """ Get the job described in the database. If no job exists, direct @@ -52,7 +44,8 @@ def get_job(self) -> Optional[SpallocJob]: job_url = None cookies = {} headers = {} - for row in self.execute(""" + for row in self.cursor().execute( + """ SELECT kind, name, value FROM proxy_configuration """): kind, name, value = row @@ -83,7 +76,7 @@ def get_key_to_atom_id_mapping(self, label: str) -> Dict[int, int]: """ return { row["event"]: row["atom"] - for row in self.execute( + for row in self.cursor().execute( """ SELECT * FROM label_event_atom_view WHERE label = ? @@ -99,7 +92,7 @@ def get_atom_id_to_key_mapping(self, label: str) -> Dict[int, int]: """ return { row["atom"]: row["event"] - for row in self.execute( + for row in self.cursor().execute( """ SELECT * FROM label_event_atom_view WHERE label = ? @@ -117,15 +110,17 @@ def get_live_output_details( chip_x, chip_y) :rtype: tuple(str, int, bool, str, int, int, int) """ - return self.__r2t( - self.__exec_one( - """ - SELECT * FROM app_output_tag_view - WHERE pre_vertex_label = ? - AND post_vertex_label LIKE ? - """, label, str(receiver_label) + "%"), - "ip_address", "port", "strip_sdp", "board_address", "tag", - "chip_x", "chip_y") + self.cursor().execute( + """ + SELECT * FROM app_output_tag_view + WHERE pre_vertex_label = ? + AND post_vertex_label LIKE ? + LIMIT 1 + """, (label, str(receiver_label) + "%")) + row = self.fetchone() + return (row["ip_address"], row["port"], row["strip_sdp"], + row["board_address"], row["tag"], row["chip_x"], + row["chip_y"]) def get_configuration_parameter_value( self, parameter_name: str) -> Optional[float]: @@ -136,17 +131,15 @@ def get_configuration_parameter_value( :return: The value of the parameter :rtype: float or None """ - row = self.__exec_one( + self.cursor().execute( """ SELECT value FROM configuration_parameters WHERE parameter_id = ? - """, parameter_name) + LIMIT 1 + """, (parameter_name,)) + row = self.fetchone() return None if row is None else float(row["value"]) - @staticmethod - def __xyp(row) -> Tuple[int, int, int]: - return int(row["x"]), int(row["y"]), int(row["p"]) - def get_placements(self, label: str) -> List[Tuple[int, int, int]]: """ Get the placements of an application vertex with a given label. @@ -156,7 +149,8 @@ def get_placements(self, label: str) -> List[Tuple[int, int, int]]: :rtype: list(tuple(int, int, int)) """ return [ - self.__xyp(row) for row in self.execute( + (int(row["x"]), int(row["y"]), int(row["p"])) + for row in self.cursor().execute( """ SELECT x, y, p FROM application_vertex_placements WHERE vertex_label = ? @@ -171,11 +165,13 @@ def get_ip_address(self, x: int, y: int) -> Optional[str]: :return: The IP address of the Ethernet to use to contact the chip :rtype: str or None """ - row = self.__exec_one( + self.cursor().execute( """ SELECT eth_ip_address FROM chip_eth_info WHERE x = ? AND y = ? OR x = 0 AND y = 0 ORDER BY x DESC - """, x, y) + LIMIT 1 + """, (x, y)) + row = self.fetchone() # Should only fail if no machine is present or a bad XY given! return None if row is None else row["eth_ip_address"] diff --git a/spinn_front_end_common/utilities/database/database_writer.py b/spinn_front_end_common/utilities/database/database_writer.py index e1ea3752a9..8b9136143f 100644 --- a/spinn_front_end_common/utilities/database/database_writer.py +++ b/spinn_front_end_common/utilities/database/database_writer.py @@ -15,7 +15,8 @@ from __future__ import annotations import logging import os -from typing import Dict, Iterable, List, Optional, Tuple, cast, TYPE_CHECKING +from typing import ( + cast, Dict, Iterable, List, Optional, Tuple, TYPE_CHECKING, Union) from spinn_utilities.log import FormatAdapter from spinn_machine import Machine from pacman.model.graphs import AbstractVertex @@ -39,10 +40,6 @@ INIT_SQL = "db.sql" -def _extract_int(x): - return None if x is None else int(x) - - class DatabaseWriter(SQLiteDB): """ The interface for the database system for main front ends. @@ -97,13 +94,13 @@ def database_path(self) -> str: """ return self._database_path - def __insert(self, sql: str, *args) -> int: + def __insert(self, sql: str, *args: Union[str, int, None]) -> int: """ :param str sql: :rtype: int """ try: - self.execute(sql, args) + self.cursor().execute(sql, args) return self.lastrowid except Exception: logger.exception("problem with insertion; argument types are {}", @@ -121,7 +118,7 @@ def add_machine_objects(self) -> None: x_dimension, y_dimension) VALUES(?, ?) """, machine.width, machine.height) - self.executemany( + self.cursor().executemany( """ INSERT INTO Machine_chip( no_processors, chip_x, chip_y, machine_id, @@ -160,13 +157,13 @@ def __add_machine_vertex(self, m_vertex: MachineVertex) -> int: self.__vertex_to_id[m_vertex] = m_vertex_id return m_vertex_id - def add_system_params(self, runtime: Optional[float]): + def add_system_params(self, runtime: Optional[float]) -> None: """ Write system parameters into the database. :param int runtime: the amount of time the application is to run for """ - self.executemany( + self.cursor().executemany( """ INSERT INTO configuration_parameters ( parameter_id, value) @@ -188,7 +185,7 @@ def add_proxy_configuration(self) -> None: job = FecDataView.get_spalloc_job() if job is not None: config = job.get_session_credentials_for_db() - self.executemany( + self.cursor().executemany( """ INSERT INTO proxy_configuration(kind, name, value) VALUES(?, ?, ?) @@ -203,7 +200,7 @@ def add_placements(self) -> None: if placement.vertex not in self.__vertex_to_id: self.__add_machine_vertex(placement.vertex) # add records - self.executemany( + self.cursor().executemany( """ INSERT INTO Placements( vertex_id, chip_x, chip_y, chip_p, machine_id) @@ -218,7 +215,7 @@ def add_tags(self) -> None: Adds the tags into the database. """ tags = FecDataView.get_tags() - self.executemany( + self.cursor().executemany( """ INSERT INTO IP_tags( vertex_id, tag, board_address, ip_address, port, @@ -231,7 +228,7 @@ def add_tags(self) -> None: def create_atom_to_event_id_mapping( self, machine_vertices: Optional[ - Iterable[Tuple[MachineVertex, str]]]): + Iterable[Tuple[MachineVertex, str]]]) -> None: """ :param machine_vertices: :type machine_vertices: @@ -262,7 +259,7 @@ def create_atom_to_event_id_mapping( f"{key_vertices[key]}") key_vertices[key] = m_vertex m_vertex_id = self.__vertex_to_id[m_vertex] - self.executemany( + self.cursor().executemany( """ INSERT INTO event_to_atom_mapping( vertex_id, event_id, atom_id) @@ -271,14 +268,14 @@ def create_atom_to_event_id_mapping( ) def create_device_atom_event_id_mapping( - self, devices: Iterable[LiveOutputDevice]): + self, devices: Iterable[LiveOutputDevice]) -> None: """ Add output mappings for devices. """ for device in devices: for m_vertex, atom_keys in device.get_device_output_keys().items(): m_vertex_id = self.__vertex_to_id[m_vertex] - self.executemany( + self.cursor().executemany( """ INSERT INTO event_to_atom_mapping( vertex_id, event_id, atom_id) @@ -329,7 +326,7 @@ def add_lpg_mapping(self) -> List[Tuple[MachineVertex, str]]: for (m_vertex, part_id, lpg_m_vertex) in self._get_machine_lpg_mappings(part)) - self.executemany( + self.cursor().executemany( """ INSERT INTO m_vertex_to_lpg_vertex( pre_vertex_id, partition_id, post_vertex_id) diff --git a/spinn_front_end_common/utilities/db.sql b/spinn_front_end_common/utilities/db.sql index 22b4d0db02..3deb186e01 100644 --- a/spinn_front_end_common/utilities/db.sql +++ b/spinn_front_end_common/utilities/db.sql @@ -189,7 +189,7 @@ CREATE TABLE IF NOT EXISTS router_provenance( x INTEGER NOT NULL, y INTEGER NOT NULL, description STRING NOT NULL, - the_value FLOAT NOT NULL, + the_value INTEGER NOT NULL, expected INTEGER NOT NULL); -- Compute some basic statistics per router over the router provenance diff --git a/spinn_front_end_common/utilities/emergency_recovery.py b/spinn_front_end_common/utilities/emergency_recovery.py index eee0bbadd7..febab5b0f6 100644 --- a/spinn_front_end_common/utilities/emergency_recovery.py +++ b/spinn_front_end_common/utilities/emergency_recovery.py @@ -68,7 +68,7 @@ def _emergency_state_check() -> None: def _emergency_iobuf_extract( - executable_targets: Optional[ExecutableTargets] = None): + executable_targets: Optional[ExecutableTargets] = None) -> None: """ :param executable_targets: The specific targets to extract, or `None` for all @@ -81,7 +81,7 @@ def _emergency_iobuf_extract( def emergency_recover_state_from_failure( - vertex: AbstractHasAssociatedBinary, placement: Placement): + vertex: AbstractHasAssociatedBinary, placement: Placement) -> None: """ Used to get at least *some* information out of a core when something goes badly wrong. Not a replacement for what abstract spinnaker base does. diff --git a/spinn_front_end_common/utilities/iobuf_extractor.py b/spinn_front_end_common/utilities/iobuf_extractor.py index 80f86847bd..aced4a6c70 100644 --- a/spinn_front_end_common/utilities/iobuf_extractor.py +++ b/spinn_front_end_common/utilities/iobuf_extractor.py @@ -227,7 +227,7 @@ def __extract_selected_types(self) -> Tuple[List[str], List[str]]: def __extract_iobufs_for_binary( self, core_subsets: CoreSubsets, binary: str, - error_entries: List[str], warn_entries: List[str]): + error_entries: List[str], warn_entries: List[str]) -> None: """ :param ~.CoreSubsets core_subsets: Where the binary is deployed :param str binary: What binary was deployed there. @@ -252,7 +252,7 @@ def __extract_iobufs_for_binary( def __process_one_iobuf( self, iobuf: IOBuffer, file_path: str, replacer: Replacer, - error_entries: List[str], warn_entries: List[str]): + error_entries: List[str], warn_entries: List[str]) -> None: """ :param ~.IOBuffer iobuf: :param str file_path: @@ -300,8 +300,8 @@ def __recover_iobufs(self, core_subsets: CoreSubsets) -> List[IOBuffer]: return io_buffers @staticmethod - def __add_value_if_match( - regex: Pattern, line: str, entries: List[str], iobuf: IOBuffer): + def __add_value_if_match(regex: Pattern, line: str, + entries: List[str], iobuf: IOBuffer) -> None: """ :param ~typing.Pattern regex: :param str line: diff --git a/spinn_front_end_common/utilities/report_functions/bit_field_compressor_report.py b/spinn_front_end_common/utilities/report_functions/bit_field_compressor_report.py index 1317a7ff22..281db3df3b 100644 --- a/spinn_front_end_common/utilities/report_functions/bit_field_compressor_report.py +++ b/spinn_front_end_common/utilities/report_functions/bit_field_compressor_report.py @@ -34,7 +34,7 @@ NOT_APPLICABLE = "N/A" -def generate_provenance_item(x: int, y: int, bit_fields_merged: int): +def generate_provenance_item(x: int, y: int, bit_fields_merged: int) -> None: """ Generates a provenance item in the format BitFieldCompressorReport expects. diff --git a/spinn_front_end_common/utilities/report_functions/board_chip_report.py b/spinn_front_end_common/utilities/report_functions/board_chip_report.py index d618b50514..ba39e902b6 100644 --- a/spinn_front_end_common/utilities/report_functions/board_chip_report.py +++ b/spinn_front_end_common/utilities/report_functions/board_chip_report.py @@ -40,7 +40,8 @@ def board_chip_report() -> None: _write_report(writer, machine, progress_bar) -def _write_report(writer: TextIO, machine: Machine, progress_bar: ProgressBar): +def _write_report( + writer: TextIO, machine: Machine, progress_bar: ProgressBar) -> None: """ :param ~io.FileIO writer: :param ~spinn_machine.Machine machine: diff --git a/spinn_front_end_common/utilities/report_functions/energy_report.py b/spinn_front_end_common/utilities/report_functions/energy_report.py index ce7d6cbd98..b660f4f22c 100644 --- a/spinn_front_end_common/utilities/report_functions/energy_report.py +++ b/spinn_front_end_common/utilities/report_functions/energy_report.py @@ -33,7 +33,7 @@ class EnergyReport(object): # energy report file name _SUMMARY_FILENAME = "energy_report.rpt" - def write_energy_report(self, power_used: PowerUsed): + def write_energy_report(self, power_used: PowerUsed) -> None: """ Writes the report. @@ -50,7 +50,7 @@ def write_energy_report(self, power_used: PowerUsed): self._write_summary_report(f, power_used) @classmethod - def _write_summary_report(cls, f: TextIO, power_used: PowerUsed): + def _write_summary_report(cls, f: TextIO, power_used: PowerUsed) -> None: """ Write summary file. diff --git a/spinn_front_end_common/utilities/report_functions/memory_map_on_host_chip_report.py b/spinn_front_end_common/utilities/report_functions/memory_map_on_host_chip_report.py index bb4d53b0cb..f527a99359 100644 --- a/spinn_front_end_common/utilities/report_functions/memory_map_on_host_chip_report.py +++ b/spinn_front_end_common/utilities/report_functions/memory_map_on_host_chip_report.py @@ -57,7 +57,8 @@ def memory_map_on_host_chip_report() -> None: file_name) -def _describe_mem_map(f: TextIO, txrx: Transceiver, x: int, y: int, p: int): +def _describe_mem_map( + f: TextIO, txrx: Transceiver, x: int, y: int, p: int) -> None: """ :param ~spinnman.transceiver.Transceiver txrx: """ diff --git a/spinn_front_end_common/utilities/report_functions/network_specification.py b/spinn_front_end_common/utilities/report_functions/network_specification.py index 4d004bd254..9238e96c8b 100644 --- a/spinn_front_end_common/utilities/report_functions/network_specification.py +++ b/spinn_front_end_common/utilities/report_functions/network_specification.py @@ -39,7 +39,7 @@ def network_specification() -> None: " for writing.", filename) -def _write_report(f: TextIO, vertex: ApplicationVertex): +def _write_report(f: TextIO, vertex: ApplicationVertex) -> None: """ :param ~io.FileIO f: :param ~pacman.model.graphs.application.ApplicationVertex vertex: diff --git a/spinn_front_end_common/utilities/report_functions/reports.py b/spinn_front_end_common/utilities/report_functions/reports.py index b99d5715fb..7248e5a53a 100644 --- a/spinn_front_end_common/utilities/report_functions/reports.py +++ b/spinn_front_end_common/utilities/report_functions/reports.py @@ -211,7 +211,7 @@ def router_report_from_paths() -> None: def _write_one_router_partition_report( - f: TextIO, partition: ApplicationEdgePartition): + f: TextIO, partition: ApplicationEdgePartition) -> None: """ :param ~io.FileIO f: :param AbstractSingleSourcePartition partition: @@ -263,7 +263,7 @@ def partitioner_report() -> None: file_name) -def _write_one_vertex_partition(f: TextIO, vertex: ApplicationVertex): +def _write_one_vertex_partition(f: TextIO, vertex: ApplicationVertex) -> None: """ :param ~io.FileIO f: :param ~pacman.model.graphs.application.ApplicationVertex vertex: @@ -314,7 +314,7 @@ def placement_report_with_application_graph_by_vertex() -> None: def _write_one_vertex_application_placement( - f: TextIO, vertex: ApplicationVertex): + f: TextIO, vertex: ApplicationVertex) -> None: """ :param ~io.FileIO f: :param ~pacman.model.graphs.application.ApplicationVertex vertex: @@ -379,7 +379,7 @@ def placement_report_with_application_graph_by_core() -> None: file_name) -def _write_one_chip_application_placement(f: TextIO, chip: Chip): +def _write_one_chip_application_placement(f: TextIO, chip: Chip) -> None: """ :param ~io.FileIO f: :param ~spinn_machine.Chip chip: @@ -452,7 +452,7 @@ def sdram_usage_report_per_chip() -> None: def _sdram_usage_report_per_chip_with_timesteps( f: TextIO, timesteps: Optional[int], progress: ProgressBar, - end_progress: bool, details: bool): + end_progress: bool, details: bool) -> None: """ :param ~io.FileIO f: :param int timesteps: Either the plan or data timesteps @@ -502,8 +502,8 @@ def _sdram_usage_report_per_chip_with_timesteps( pass -def routing_info_report( - extra_allocations: Iterable[Tuple[ApplicationVertex, str]] = ()): +def routing_info_report(extra_allocations: Iterable[ + Tuple[ApplicationVertex, str]] = ()) -> None: """ Generates a report which says which keys is being allocated to each vertex. @@ -532,7 +532,7 @@ def routing_info_report( def _write_vertex_virtual_keys( f: TextIO, pre_vertex: ApplicationVertex, part_id: str, - routing_infos: RoutingInfo): + routing_infos: RoutingInfo) -> None: """ :param ~io.FileIO f: :param ~pacman.model.graphs.application.ApplicationVertex pre_vertex: @@ -572,7 +572,7 @@ def router_report_from_router_tables() -> None: def router_report_from_compressed_router_tables( - routing_tables: MulticastRoutingTables): + routing_tables: MulticastRoutingTables) -> None: """ Report the compressed routing tables. @@ -590,8 +590,8 @@ def router_report_from_compressed_router_tables( generate_routing_table(routing_table, top_level_folder) -def generate_routing_table( - routing_table: AbstractMulticastRoutingTable, top_level_folder: str): +def generate_routing_table(routing_table: AbstractMulticastRoutingTable, + top_level_folder: str) -> None: """ :param routing_table: The routing table to describe :type routing_table: @@ -641,7 +641,7 @@ def _compression_ratio(uncompressed: int, compressed: int) -> float: def generate_comparison_router_report( - compressed_routing_tables: MulticastRoutingTables): + compressed_routing_tables: MulticastRoutingTables) -> None: """ Make a report on comparison of the compressed and uncompressed routing tables. diff --git a/spinn_front_end_common/utilities/scp/clear_iobuf_process.py b/spinn_front_end_common/utilities/scp/clear_iobuf_process.py index f666564f70..7efc29cd3b 100644 --- a/spinn_front_end_common/utilities/scp/clear_iobuf_process.py +++ b/spinn_front_end_common/utilities/scp/clear_iobuf_process.py @@ -51,11 +51,11 @@ class ClearIOBUFProcess(AbstractMultiConnectionProcess[CheckOKResponse]): __slots__ = () def __receive_response( - self, progress: ProgressBar, _response: CheckOKResponse): + self, progress: ProgressBar, _response: CheckOKResponse) -> None: progress.update() def clear_iobuf(self, core_subsets: CoreSubsets, - n_cores: Optional[int] = None): + n_cores: Optional[int] = None) -> None: """ :param ~spinn_machine.CoreSubsets core_subsets: :param int n_cores: Defaults to the number of cores in `core_subsets`. diff --git a/spinn_front_end_common/utilities/scp/get_current_time_process.py b/spinn_front_end_common/utilities/scp/get_current_time_process.py index b94dc5c55e..6e973ae86b 100644 --- a/spinn_front_end_common/utilities/scp/get_current_time_process.py +++ b/spinn_front_end_common/utilities/scp/get_current_time_process.py @@ -24,7 +24,8 @@ from spinnman.messages.scp.abstract_messages import ( AbstractSCPRequest, AbstractSCPResponse) from spinnman.messages.scp import SCPRequestHeader -from spinnman.processes import AbstractMultiConnectionProcess +from spinnman.processes import ( + AbstractMultiConnectionProcess, MostDirectConnectionSelector) from spinnman.model.enums import ( SDP_PORTS, SDP_RUNNING_MESSAGE_CODES) from spinnman.messages.scp.enums import SCPResult @@ -39,12 +40,12 @@ class _GetCurrentTimeResponse(AbstractSCPResponse): "__current_time", ) - def __init__(self): + def __init__(self) -> None: super().__init__() - self.__current_time = None + self.__current_time: Optional[int] = None @overrides(AbstractSCPResponse.read_data_bytestring) - def read_data_bytestring(self, data: bytes, offset: int): + def read_data_bytestring(self, data: bytes, offset: int) -> None: result = self.scp_response_header.result # We can accept a no-reply response here; that could just mean # that the count wasn't complete (but might be enough anyway) @@ -57,6 +58,7 @@ def read_data_bytestring(self, data: bytes, offset: int): def current_time(self) -> int: """ Get the current time from the response """ + assert self.__current_time is not None return self.__current_time @@ -96,13 +98,13 @@ class GetCurrentTimeProcess( "__earliest_time" ) - def __init__(self, connection_selector): + def __init__(self, connection_selector: MostDirectConnectionSelector): super().__init__(connection_selector) - self.__latest_time = None - self.__earliest_time = None + self.__latest_time: Optional[int] = None + self.__earliest_time: Optional[int] = None - def __receive_response( - self, progress: ProgressBar, response: _GetCurrentTimeResponse): + def __receive_response(self, progress: ProgressBar, + response: _GetCurrentTimeResponse) -> None: progress.update() current_time = response.current_time if self.__latest_time is None or current_time > self.__latest_time: diff --git a/spinn_front_end_common/utilities/scp/load_mc_routes_process.py b/spinn_front_end_common/utilities/scp/load_mc_routes_process.py index 4a0d130b0f..552abc9655 100644 --- a/spinn_front_end_common/utilities/scp/load_mc_routes_process.py +++ b/spinn_front_end_common/utilities/scp/load_mc_routes_process.py @@ -26,7 +26,7 @@ class LoadMCRoutesProcess(AbstractMultiConnectionProcess[CheckOKResponse]): """ __slots__ = () - def load_application_mc_routes(self, core_subsets: CoreSubsets): + def load_application_mc_routes(self, core_subsets: CoreSubsets) -> None: """ Load the saved application multicast routes. @@ -39,7 +39,7 @@ def load_application_mc_routes(self, core_subsets: CoreSubsets): self._send_request(LoadApplicationMCRoutesMessage( core_subset.x, core_subset.y, processor_id)) - def load_system_mc_routes(self, core_subsets: CoreSubsets): + def load_system_mc_routes(self, core_subsets: CoreSubsets) -> None: """ Load the saved system multicast routes. diff --git a/spinn_front_end_common/utilities/scp/reinjector_control_process.py b/spinn_front_end_common/utilities/scp/reinjector_control_process.py index 061e627025..2e15837e85 100644 --- a/spinn_front_end_common/utilities/scp/reinjector_control_process.py +++ b/spinn_front_end_common/utilities/scp/reinjector_control_process.py @@ -34,7 +34,7 @@ class ReinjectorControlProcess(AbstractMultiConnectionProcess): """ __slots__ = () - def clear_queue(self, core_subsets: CoreSubsets): + def clear_queue(self, core_subsets: CoreSubsets) -> None: """ Clear the reinjection queue. @@ -46,7 +46,7 @@ def clear_queue(self, core_subsets: CoreSubsets): self._send_request(ClearReinjectionQueueMessage( core_subset.x, core_subset.y, processor_id)) - def reset_counters(self, core_subsets: CoreSubsets): + def reset_counters(self, core_subsets: CoreSubsets) -> None: """ Reset the packet counters. @@ -61,7 +61,7 @@ def reset_counters(self, core_subsets: CoreSubsets): @staticmethod def __handle_response( result: Dict[Chip, ReInjectionStatus], - response: GetReinjectionStatusMessageResponse): + response: GetReinjectionStatusMessageResponse) -> None: """ :param dict result: :param GetReinjectionStatusMessageResponse response: @@ -113,7 +113,8 @@ def get_reinjection_status_for_core_subsets( def set_packet_types( self, core_subsets: CoreSubsets, point_to_point: bool, - multicast: bool, nearest_neighbour: bool, fixed_route: bool): + multicast: bool, nearest_neighbour: bool, + fixed_route: bool) -> None: """ Set what types of packets should be reinjected. @@ -131,8 +132,8 @@ def set_packet_types( core_subset.x, core_subset.y, processor_id, multicast, point_to_point, fixed_route, nearest_neighbour)) - def set_wait1_timeout( - self, mantissa: int, exponent: int, core_subsets: CoreSubsets): + def set_wait1_timeout(self, mantissa: int, exponent: int, + core_subsets: CoreSubsets) -> None: """ The wait1 timeout is the time from when a packet is received to when emergency routing becomes enabled. @@ -147,8 +148,8 @@ def set_wait1_timeout( self.__set_timeout( core_subset, processor_id, mantissa, exponent, wait=1) - def set_wait2_timeout( - self, mantissa: int, exponent: int, core_subsets: CoreSubsets): + def set_wait2_timeout(self, mantissa: int, exponent: int, + core_subsets: CoreSubsets) -> None: """ The wait2 timeout is the time from when a packet has emergency routing enabled for it to when it is dropped. @@ -165,7 +166,7 @@ def set_wait2_timeout( def __set_timeout( self, core: CoreSubset, processor_id: int, - mantissa: int, exponent: int, *, wait: int): + mantissa: int, exponent: int, *, wait: int) -> None: """ Set a timeout for a router controlled by an extra monitor on a core. This is not a parallelised operation in order to aid debugging when diff --git a/spinn_front_end_common/utilities/scp/send_pause_process.py b/spinn_front_end_common/utilities/scp/send_pause_process.py index 44c3315b40..6b1b156a1d 100644 --- a/spinn_front_end_common/utilities/scp/send_pause_process.py +++ b/spinn_front_end_common/utilities/scp/send_pause_process.py @@ -59,10 +59,10 @@ class SendPauseProcess(AbstractMultiConnectionProcess[CheckOKResponse]): __slots__ = () def __receive_response( - self, progress: ProgressBar, _response: CheckOKResponse): + self, progress: ProgressBar, _response: CheckOKResponse) -> None: progress.update() - def send_pause(self, core_subsets: CoreSubsets, n_cores: int): + def send_pause(self, core_subsets: CoreSubsets, n_cores: int) -> None: """ :param ~spinn_machine.CoreSubsets core_subsets: :param int n_cores: Number of cores being updated diff --git a/spinn_front_end_common/utilities/scp/update_runtime_process.py b/spinn_front_end_common/utilities/scp/update_runtime_process.py index fa4edd935a..e1385ea818 100644 --- a/spinn_front_end_common/utilities/scp/update_runtime_process.py +++ b/spinn_front_end_common/utilities/scp/update_runtime_process.py @@ -69,12 +69,12 @@ class UpdateRuntimeProcess(AbstractMultiConnectionProcess[CheckOKResponse]): __slots__ = () def __receive_response( - self, progress: ProgressBar, _response: CheckOKResponse): + self, progress: ProgressBar, _response: CheckOKResponse) -> None: progress.update() - def update_runtime( - self, current_time: int, run_time: int, infinite_run: bool, - core_subsets: CoreSubsets, n_cores: int, n_sync_steps: int): + def update_runtime(self, current_time: int, run_time: int, + infinite_run: bool, core_subsets: CoreSubsets, + n_cores: int, n_sync_steps: int) -> None: """ :param int current_time: :param int run_time: diff --git a/spinn_front_end_common/utilities/sqlite_db.py b/spinn_front_end_common/utilities/sqlite_db.py index 28cfe0045a..422cae3a7a 100644 --- a/spinn_front_end_common/utilities/sqlite_db.py +++ b/spinn_front_end_common/utilities/sqlite_db.py @@ -19,7 +19,11 @@ import pathlib import sqlite3 import struct -from typing import Optional, Type, Union +from types import TracebackType +from typing import Literal, Optional, Type, Union + +from typing_extensions import Self + from pacman.exceptions import PacmanValueError from spinn_front_end_common.utilities.exceptions import DatabaseException @@ -109,7 +113,7 @@ def __init__( The synchronisation level. Doesn't normally need to be altered. """ self.__db = None - self.__cursor = None + self.__cursor: Optional[sqlite3.Cursor] = None if database_file is None: self.__db = sqlite3.connect(":memory:") # Magic name! # in-memory DB is never read-only @@ -155,7 +159,7 @@ def __init__( self.__pragma("recursive_triggers", True) self.__pragma("trusted_schema", False) - def _context_entered(self): + def _context_entered(self) -> None: """ Work to do when then context is entered. @@ -170,11 +174,13 @@ def _context_entered(self): self.__db.execute("BEGIN") self.__cursor = self.__db.cursor() - def __enter__(self): + def __enter__(self) -> Self: self._context_entered() return self - def __exit__(self, exc_type, exc_val, exc_tb): + def __exit__(self, exc_type: Optional[Type], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Literal[False]: if self.__db is not None: if exc_type is None: self.__db.commit() @@ -182,6 +188,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.__db.rollback() self.__cursor = None self.close() + return False def __del__(self) -> None: self.close() @@ -197,7 +204,7 @@ def close(self) -> None: except AttributeError: self.__db = None - def __pragma(self, pragma_name: str, value: Union[bool, int, str]): + def __pragma(self, pragma_name: str, value: Union[bool, int, str]) -> None: """ Set a database ``PRAGMA``. See the `SQLite PRAGMA documentation `_ for details. @@ -227,33 +234,14 @@ def __pragma(self, pragma_name: str, value: Union[bool, int, str]): else: raise TypeError("can only set pragmas to bool, int or str") - def execute(self, sql, parameters=()): - """ - Executes a query by passing it to the database - - :param str sql: - :param parameters: - :raises DatabaseException: If there is no cursor. - Typically because database was used outside of a with + def cursor(self) -> sqlite3.Cursor: """ - if self.__cursor is None: - raise DatabaseException( - "This method should only be used inside a with") - return self.__cursor.execute(sql, parameters) - - def executemany(self, sql, parameters=()): - """ - Repeatedly executes a query by passing it to the database - - :param str sql: - :param parameters: - :raises DatabaseException: If there is no cursor. - Typically because database was used outside of a with + Gets the cursor created by the with statement """ if self.__cursor is None: raise DatabaseException( "This method should only be used inside a with") - return self.__cursor.executemany(sql, parameters) + return self.__cursor @property def lastrowid(self) -> int: @@ -267,6 +255,7 @@ def lastrowid(self) -> int: if self.__cursor is None: raise DatabaseException( "This method should only be used inside a with") + assert self.__cursor.lastrowid is not None return self.__cursor.lastrowid @property @@ -283,7 +272,7 @@ def rowcount(self) -> int: "This method should only be used inside a with") return self.__cursor.rowcount - def fetchone(self): + def fetchone(self) -> sqlite3.Row: """ Gets the fetchone from the last query run diff --git a/spinn_front_end_common/utilities/system_control_logic.py b/spinn_front_end_common/utilities/system_control_logic.py index e967f2afe6..472675f36a 100644 --- a/spinn_front_end_common/utilities/system_control_logic.py +++ b/spinn_front_end_common/utilities/system_control_logic.py @@ -33,7 +33,7 @@ def run_system_application( filename_template: str, binaries_to_track: Optional[List[str]] = None, progress_bar: Optional[ProgressBar] = None, logger: Optional[FormatAdapter] = None, - timeout: Optional[float] = None): + timeout: Optional[float] = None) -> None: """ Executes the given _system_ application. Used for on-chip expander, compressors, etc. @@ -124,7 +124,7 @@ def run_system_application( def _report_iobuf_messages( cores: ExecutableTargets, logger: Optional[FormatAdapter], - filename_template: str): + filename_template: str) -> None: """ :param ~spinnman.model.ExecutableTargets cores: :param ~logging.Logger logger: @@ -142,7 +142,8 @@ def _report_iobuf_messages( logger.error("{}", entry) -def _load_application(executable_targets: ExecutableTargets, app_id: int): +def _load_application( + executable_targets: ExecutableTargets, app_id: int) -> None: """ Execute a set of binaries that make up a complete application on specified cores, wait for them to be ready and then start all of the diff --git a/spinn_front_end_common/utilities/utility_calls.py b/spinn_front_end_common/utilities/utility_calls.py index 4b8ed6ba2e..a8502c1488 100644 --- a/spinn_front_end_common/utilities/utility_calls.py +++ b/spinn_front_end_common/utilities/utility_calls.py @@ -38,7 +38,7 @@ T = TypeVar("T") -def _mkdir(directory: str): +def _mkdir(directory: str) -> None: """ Make a directory if it doesn't exist. @@ -141,7 +141,7 @@ def parse_old_spalloc( def retarget_tag( connection: Union[SpallocEIEIOListener, SpallocEIEIOConnection, SCAMPConnection], x: int, y: int, tag: int, - ip_address: Optional[str] = None, strip: bool = True): + ip_address: Optional[str] = None, strip: bool = True) -> None: """ Make a tag deliver to the given connection. diff --git a/spinn_front_end_common/utilities/utility_objs/extra_monitor_scp_messages/get_reinjection_status_message.py b/spinn_front_end_common/utilities/utility_objs/extra_monitor_scp_messages/get_reinjection_status_message.py index 397979cae5..7e618fca84 100644 --- a/spinn_front_end_common/utilities/utility_objs/extra_monitor_scp_messages/get_reinjection_status_message.py +++ b/spinn_front_end_common/utilities/utility_objs/extra_monitor_scp_messages/get_reinjection_status_message.py @@ -32,7 +32,7 @@ class GetReinjectionStatusMessage( """ __slots__ = () - def __init__(self, x: int, y: int, p: int): + def __init__(self, x: int, y: int, p: int) -> None: """ :param int x: The x-coordinate of a chip, between 0 and 255 :param int y: The y-coordinate of a chip, between 0 and 255 @@ -66,7 +66,7 @@ def __init__(self, command_code: ReinjectorSCPCommands): self._command_code = command_code @overrides(AbstractSCPResponse.read_data_bytestring) - def read_data_bytestring(self, data: bytes, offset: int): + def read_data_bytestring(self, data: bytes, offset: int) -> None: result = self.scp_response_header.result if result != SCPResult.RC_OK: raise SpinnmanUnexpectedResponseCodeException( diff --git a/spinn_front_end_common/utilities/utility_objs/live_packet_gather_parameters.py b/spinn_front_end_common/utilities/utility_objs/live_packet_gather_parameters.py index 3b786c941f..398af931e6 100644 --- a/spinn_front_end_common/utilities/utility_objs/live_packet_gather_parameters.py +++ b/spinn_front_end_common/utilities/utility_objs/live_packet_gather_parameters.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Optional +from typing import Any, Optional from spinnman.messages.eieio import EIEIOType, EIEIOPrefix from pacman.model.resources.iptag_resource import IPtagResource from spinn_front_end_common.utilities.exceptions import ConfigurationException @@ -36,14 +36,20 @@ class LivePacketGatherParameters(object): "_received_key_mask", "_translate_keys", "_translated_key_right_shift") def __init__( - self, port: int, hostname: str, tag=None, strip_sdp=True, - use_prefix=False, key_prefix=None, prefix_type=None, - message_type=EIEIOType.KEY_32_BIT, right_shift=0, - payload_as_time_stamps=True, use_payload_prefix=True, - payload_prefix=None, payload_right_shift=0, - number_of_packets_sent_per_time_step=0, label=None, - received_key_mask=0xFFFFFFFF, - translate_keys=False, translated_key_right_shift=0) -> None: + self, port: int, hostname: str, tag: Optional[int] = None, + strip_sdp: bool = True, use_prefix: bool = False, + key_prefix: Optional[int] = None, + prefix_type: Optional[EIEIOPrefix] = None, + message_type: EIEIOType = EIEIOType.KEY_32_BIT, + right_shift: int = 0, + payload_as_time_stamps: bool = True, + use_payload_prefix: bool = True, + payload_prefix: Optional[int] = None, payload_right_shift: int = 0, + number_of_packets_sent_per_time_step: int = 0, + label: Optional[str] = None, + received_key_mask: int = 0xFFFFFFFF, + translate_keys: bool = False, + translated_key_right_shift: int = 0) -> None: """ :raises ConfigurationException: If the parameters passed are known to be an invalid combination. @@ -135,11 +141,9 @@ def use_prefix(self) -> bool: return self._use_prefix @property - def key_prefix(self) -> int: + def key_prefix(self) -> Optional[int]: """ The EIEIO key prefix to remove from messages. - - :rtype: int """ return self._key_prefix @@ -189,11 +193,9 @@ def use_payload_prefix(self) -> bool: return self._use_payload_prefix @property - def payload_prefix(self) -> int: + def payload_prefix(self) -> Optional[int]: """ The payload prefix to remove if applying compaction. - - :rtype: int """ return self._payload_prefix @@ -216,11 +218,9 @@ def number_of_packets_sent_per_time_step(self) -> int: return self._n_packets_per_time_step @property - def label(self) -> str: + def label(self) -> Optional[str]: """ A label. - - :rtype: str """ return self._label @@ -263,7 +263,9 @@ def get_iptag_resource(self) -> IPtagResource: strip_sdp=self.strip_sdp, tag=self.tag, traffic_identifier=TRAFFIC_IDENTIFIER) - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: + if not isinstance(other, LivePacketGatherParameters): + return False return (self._port == other.port and self._hostname == other.hostname and self._tag == other.tag and @@ -286,10 +288,10 @@ def __eq__(self, other): self._translated_key_right_shift == other.translated_key_right_shift) - def __ne__(self, other): + def __ne__(self, other: Any) -> bool: return not self.__eq__(other) - def __hash__(self): + def __hash__(self) -> int: data = ( self._port, self._tag, self._strip_sdp, self._use_prefix, self._key_prefix, self._prefix_type, self._message_type, diff --git a/spinn_front_end_common/utility_models/chip_power_monitor_machine_vertex.py b/spinn_front_end_common/utility_models/chip_power_monitor_machine_vertex.py index 4235f24d18..252c93a57b 100644 --- a/spinn_front_end_common/utility_models/chip_power_monitor_machine_vertex.py +++ b/spinn_front_end_common/utility_models/chip_power_monitor_machine_vertex.py @@ -122,8 +122,8 @@ def binary_file_name() -> str: return BINARY_FILE_NAME @overrides(AbstractGeneratesDataSpecification.generate_data_specification) - def generate_data_specification( - self, spec: DataSpecificationGenerator, placement: Placement): + def generate_data_specification(self, spec: DataSpecificationGenerator, + placement: Placement) -> None: spec.comment("\n*** Spec for ChipPowerMonitor Instance ***\n\n") # Construct the data images needed for the Neuron: @@ -136,7 +136,8 @@ def generate_data_specification( self.__write_recording_metadata(placement) - def _write_configuration_region(self, spec: DataSpecificationGenerator): + def _write_configuration_region( + self, spec: DataSpecificationGenerator) -> None: """ Write the data needed by the C code to configure itself. @@ -147,7 +148,7 @@ def _write_configuration_region(self, spec: DataSpecificationGenerator): spec.write_value(self.__n_samples_per_recording) spec.write_value(self.__sampling_frequency) - def _write_setup_info(self, spec): + def _write_setup_info(self, spec: DataSpecificationGenerator) -> None: """ Writes the system data as required. @@ -165,7 +166,8 @@ def _write_setup_info(self, spec): spec.write_array(recording_utilities.get_recording_header_array( recorded_region_sizes)) - def _reserve_memory_regions(self, spec): + def _reserve_memory_regions( + self, spec: DataSpecificationGenerator) -> None: """ Reserve the DSG memory regions as required. diff --git a/spinn_front_end_common/utility_models/command_sender.py b/spinn_front_end_common/utility_models/command_sender.py index d738d39421..4799916c74 100644 --- a/spinn_front_end_common/utility_models/command_sender.py +++ b/spinn_front_end_common/utility_models/command_sender.py @@ -45,7 +45,7 @@ def add_commands( self, start_resume_commands: Iterable[MultiCastCommand], pause_stop_commands: Iterable[MultiCastCommand], timed_commands: Iterable[MultiCastCommand], - vertex_to_send_to: AbstractVertex): + vertex_to_send_to: AbstractVertex) -> None: """ Add commands to be sent down a given edge. diff --git a/spinn_front_end_common/utility_models/command_sender_machine_vertex.py b/spinn_front_end_common/utility_models/command_sender_machine_vertex.py index 9f89d458b4..07fc03751e 100644 --- a/spinn_front_end_common/utility_models/command_sender_machine_vertex.py +++ b/spinn_front_end_common/utility_models/command_sender_machine_vertex.py @@ -116,7 +116,7 @@ def add_commands( self, start_resume_commands: Iterable[MultiCastCommand], pause_stop_commands: Iterable[MultiCastCommand], timed_commands: Iterable[MultiCastCommand], - vertex_to_send_to: AbstractVertex): + vertex_to_send_to: AbstractVertex) -> None: """ Add commands to be sent down a given edge. @@ -191,7 +191,8 @@ def sdram_required(self) -> AbstractSDRAM: @overrides( AbstractGeneratesDataSpecification.generate_data_specification) def generate_data_specification( - self, spec: DataSpecificationGenerator, placement: Placement): + self, spec: DataSpecificationGenerator, + placement: Placement) -> None: routing_infos = FecDataView.get_routing_infos() av = self.app_vertex assert av is not None @@ -242,7 +243,7 @@ def generate_data_specification( def _write_basic_commands( self, commands: List[MultiCastCommand], - spec: DataSpecificationGenerator): + spec: DataSpecificationGenerator) -> None: """ :param list(MultiCastCommand) commands: :param ~data_specification.DataSpecificationGenerator spec: @@ -256,7 +257,7 @@ def _write_basic_commands( def _write_timed_commands( self, timed_commands: List[MultiCastCommand], - spec: DataSpecificationGenerator): + spec: DataSpecificationGenerator) -> None: """ :param list(MultiCastCommand) timed_commands: :param ~data_specification.DataSpecificationGenerator spec: @@ -271,7 +272,7 @@ def _write_timed_commands( @classmethod def __write_command( cls, command: MultiCastCommand, - spec: DataSpecificationGenerator): + spec: DataSpecificationGenerator) -> None: """ :param MultiCastCommand command: :param ~data_specification.DataSpecificationGenerator spec: @@ -287,7 +288,7 @@ def __write_command( def _reserve_memory_regions( self, spec: DataSpecificationGenerator, time_command_size: int, - start_command_size: int, end_command_size: int): + start_command_size: int, end_command_size: int) -> None: """ Reserve SDRAM space for memory areas: @@ -399,7 +400,7 @@ def edges_and_partitions(self) -> Tuple[List[MachineEdge], List[str]]: parse_extra_provenance_items) def parse_extra_provenance_items( self, label: str, x: int, y: int, p: int, - provenance_data: Sequence[int]): + provenance_data: Sequence[int]) -> None: # pylint: disable=unused-argument n_commands_sent, = provenance_data with ProvenanceWriter() as db: diff --git a/spinn_front_end_common/utility_models/data_speed_up_packet_gatherer_machine_vertex.py b/spinn_front_end_common/utility_models/data_speed_up_packet_gatherer_machine_vertex.py index 6ca71cd6c3..391eccfcf0 100644 --- a/spinn_front_end_common/utility_models/data_speed_up_packet_gatherer_machine_vertex.py +++ b/spinn_front_end_common/utility_models/data_speed_up_packet_gatherer_machine_vertex.py @@ -184,7 +184,7 @@ class _DataInCommands(IntEnum): _PROVENANCE_DATA_SIZE: Final = _FOUR_WORDS.size -def ceildiv(dividend, divisor) -> int: +def ceildiv(dividend: float, divisor: int) -> int: """ How to divide two possibly-integer numbers and round up. """ @@ -323,7 +323,7 @@ def __init__(self, x: int, y: int, ip_address: str): self._last_status: Optional[ReInjectionStatus] = None def __throttled_send( - self, message: SDPMessage, connection: SCAMPConnection): + self, message: SDPMessage, connection: SCAMPConnection) -> None: """ Slows down transmissions to allow SpiNNaker to keep up. @@ -363,8 +363,8 @@ def get_binary_start_type(self) -> ExecutableType: return ExecutableType.SYSTEM @overrides(AbstractGeneratesDataSpecification.generate_data_specification) - def generate_data_specification( - self, spec: DataSpecificationGenerator, placement: Placement): + def generate_data_specification(self, spec: DataSpecificationGenerator, + placement: Placement) -> None: # pylint: disable=unsubscriptable-object # update my placement for future knowledge self.__placement = placement @@ -430,7 +430,8 @@ def _placement(self) -> Placement: raise SpinnFrontEndException("placement not known") return self.__placement - def _reserve_memory_regions(self, spec: DataSpecificationGenerator): + def _reserve_memory_regions( + self, spec: DataSpecificationGenerator) -> None: """ Writes the DSG regions memory sizes. Static so that it can be used by the application vertex. @@ -454,8 +455,8 @@ def get_binary_file_name(self) -> str: return "data_speed_up_packet_gatherer.aplx" def _generate_data_in_report( - self, time_diff, data_size: int, x: int, y: int, - address_written_to: int, missing_seq_nums): + self, time_diff: datetime.timedelta, data_size: int, x: int, + y: int, address_written_to: int) -> None: """ Writes the data in report for this stage. @@ -467,8 +468,6 @@ def _generate_data_in_report( :param int y: the location in machine where the data was written to Y axis :param int address_written_to: where in SDRAM it was written to - :param list(set(int)) missing_seq_nums: - the set of missing sequence numbers per data transmission attempt """ dir_path = FecDataView.get_run_dir_path() in_report_path = os.path.join(dir_path, self.IN_REPORT_NAME) @@ -493,13 +492,14 @@ def _generate_data_in_report( with open(in_report_path, "a", encoding="utf-8") as writer: writer.write( f"{x}\t\t {y}\t\t {address_written_to}\t\t {data_size}\t\t" - f"\t\t {time_took_ms}\t\t\t {mbs}\t\t {missing_seq_nums}\n") + f"\t\t {time_took_ms}\t\t\t {mbs}\t\t " + f"{self._missing_seq_nums_data_in}\n") def send_data_into_spinnaker( self, x: int, y: int, base_address: int, data: Union[BinaryIO, bytes, str, int], *, n_bytes: Optional[int] = None, offset: int = 0, - cpu: int = 0): # pylint: disable=unused-argument + cpu: int = 0) -> None: # pylint: disable=unused-argument """ Sends a block of data into SpiNNaker to a given chip. @@ -555,13 +555,12 @@ def send_data_into_spinnaker( if get_config_bool("Reports", "write_data_speed_up_reports"): self._generate_data_in_report( x=x, y=y, time_diff=end - start, - data_size=n_bytes, address_written_to=base_address, - missing_seq_nums=self._missing_seq_nums_data_in) + data_size=n_bytes, address_written_to=base_address) @staticmethod def __verify_sent_data( original_data: bytes, verified_data: bytes, x: int, y: int, - base_address: int, n_bytes: int): + base_address: int, n_bytes: int) -> None: if original_data != verified_data: log.error("VARIANCE: chip:{},{} address:{} len:{}", x, y, base_address, n_bytes) @@ -609,7 +608,7 @@ def __open_connection(self) -> SCAMPConnection: def _send_data_via_extra_monitors( self, destination_chip: Chip, start_address: int, - data_to_write: bytes): + data_to_write: bytes) -> None: """ Sends data using the extra monitor cores. @@ -745,7 +744,7 @@ def _read_in_missing_seq_nums( def _outgoing_retransmit_missing_seq_nums( self, data_to_write: bytes, missing: Set[int], - connection: SCAMPConnection): + connection: SCAMPConnection) -> None: """ Transmits back into SpiNNaker the missing data based off missing sequence numbers. @@ -818,7 +817,8 @@ def __make_data_in_stream_message( # return message for sending, and the length in data sent return self.__make_data_in_message(packet_data), packet_data_length - def __send_location(self, start_address: int, connection: SCAMPConnection): + def __send_location( + self, start_address: int, connection: SCAMPConnection) -> None: """ Send location as separate message. @@ -843,7 +843,7 @@ def __send_tell_flag(self, connection: SCAMPConnection) -> None: def _send_all_data_based_packets( self, data_to_write: bytes, start_address: int, - connection: SCAMPConnection): + connection: SCAMPConnection) -> None: """ Send all the data as one block. @@ -907,7 +907,7 @@ def load_system_routing_tables() -> None: """ FecDataView.get_monitor_by_xy(0, 0).load_system_mc_routes() - def set_router_wait1_timeout(self, timeout: Tuple[int, int]): + def set_router_wait1_timeout(self, timeout: Tuple[int, int]) -> None: """ Set the wait1 field for a set of routers. @@ -926,7 +926,7 @@ def set_router_wait1_timeout(self, timeout: Tuple[int, int]): self, FecDataView.get_placement_of_vertex(self)) raise - def set_router_wait2_timeout(self, timeout: Tuple[int, int]): + def set_router_wait2_timeout(self, timeout: Tuple[int, int]) -> None: """ Set the wait2 field for a set of routers. @@ -1144,7 +1144,7 @@ def __describe_fixed_route_from(placement: Placement) -> List[XY]: entry = fixed_routes[(link.destination_x, link.destination_y)] return routers - def _report_routers_used_for_out(self, placement: Placement): + def _report_routers_used_for_out(self, placement: Placement) -> None: """ Write the used routers into a report. @@ -1349,7 +1349,8 @@ def __offset(seq_num: int) -> int: def __write_into_view( self, view_start_position: int, view_end_position: int, - data: bytes, data_start_position: int, data_end_position: int): + data: bytes, data_start_position: int, + data_end_position: int) -> None: """ Puts data into the view. @@ -1411,7 +1412,7 @@ def __provenance_address(x: int, y: int, p: int) -> int: @overrides(AbstractProvidesProvenanceDataFromMachine .get_provenance_data_from_machine) - def get_provenance_data_from_machine(self, placement: Placement): + def get_provenance_data_from_machine(self, placement: Placement) -> None: x, y, p = placement.x, placement.y, placement.p # Get the App Data for the core data = FecDataView.read_memory( diff --git a/spinn_front_end_common/utility_models/extra_monitor_support_machine_vertex.py b/spinn_front_end_common/utility_models/extra_monitor_support_machine_vertex.py index 6b6343489c..f440c76458 100644 --- a/spinn_front_end_common/utility_models/extra_monitor_support_machine_vertex.py +++ b/spinn_front_end_common/utility_models/extra_monitor_support_machine_vertex.py @@ -16,7 +16,8 @@ import logging import struct # pylint: disable=no-name-in-module -from typing import Dict, Iterable, Optional, ContextManager +from typing import Dict, Iterable, Optional, ContextManager, Type +from types import TracebackType from typing_extensions import Literal @@ -263,8 +264,8 @@ def static_get_binary_file_name() -> str: return "extra_monitor_support.aplx" @overrides(AbstractGeneratesDataSpecification.generate_data_specification) - def generate_data_specification( - self, spec: DataSpecificationGenerator, placement: Placement): + def generate_data_specification(self, spec: DataSpecificationGenerator, + placement: Placement) -> None: # storing for future usage self.__placement = placement chip = placement.chip @@ -279,7 +280,7 @@ def generate_data_specification( spec.end_specification() def _generate_data_speed_up_out_config( - self, spec: DataSpecificationGenerator): + self, spec: DataSpecificationGenerator) -> None: """ :param ~.DataSpecificationGenerator spec: spec file """ @@ -295,7 +296,7 @@ def _generate_data_speed_up_out_config( spec.write_value(Gatherer.END_FLAG_KEY) def _generate_reinjection_config( - self, spec: DataSpecificationGenerator, chip: Chip): + self, spec: DataSpecificationGenerator, chip: Chip) -> None: """ :param ~.DataSpecificationGenerator spec: spec file :param ~.Chip chip: @@ -322,7 +323,7 @@ def _generate_reinjection_config( chip.nearest_ethernet_x, chip.nearest_ethernet_y]) def _generate_data_speed_up_in_config( - self, spec: DataSpecificationGenerator, chip: Chip): + self, spec: DataSpecificationGenerator, chip: Chip) -> None: """ :param ~.DataSpecificationGenerator spec: spec file :param ~.Chip chip: the chip where this monitor will run @@ -365,7 +366,8 @@ def __encode_route(self, entry: MulticastRoutingEntry) -> int: route |= Router.convert_routing_table_entry_to_spinnaker_route(entry) return route - def _generate_provenance_area(self, spec: DataSpecificationGenerator): + def _generate_provenance_area( + self, spec: DataSpecificationGenerator) -> None: """ :param ~.DataSpecificationGenerator spec: spec file """ @@ -392,7 +394,7 @@ def __provenance_address(self, place: Placement) -> int: @overrides(AbstractProvidesProvenanceDataFromMachine. get_provenance_data_from_machine) - def get_provenance_data_from_machine(self, placement: Placement): + def get_provenance_data_from_machine(self, placement: Placement) -> None: # No standard provenance region, so no standard provenance data # But we do have our own. x, y = placement.x, placement.y @@ -415,7 +417,7 @@ def __recover(self) -> ContextManager[Placement]: return _Recoverer(self, self.placement) def reset_reinjection_counters(self, extra_monitor_cores_to_set: Iterable[ - ExtraMonitorSupportMachineVertex]): + ExtraMonitorSupportMachineVertex]) -> None: """ Resets the counters for reinjection. @@ -463,7 +465,7 @@ def set_reinjection_packets( self, point_to_point: Optional[bool] = None, multicast: Optional[bool] = None, nearest_neighbour: Optional[bool] = None, - fixed_route: Optional[bool] = None): + fixed_route: Optional[bool] = None) -> None: """ :param point_to_point: If point to point should be set, or `None` if left as before @@ -547,7 +549,9 @@ def __init__(self, vtx: ExtraMonitorSupportMachineVertex, def __enter__(self) -> Placement: return self.__placement - def __exit__(self, exc_type, exc_val, exc_tb) -> Literal[False]: + def __exit__(self, exc_type: Optional[Type], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Literal[False]: if exc_val: emergency_recover_state_from_failure(self.__vtx, self.__placement) return False diff --git a/spinn_front_end_common/utility_models/live_packet_gather.py b/spinn_front_end_common/utility_models/live_packet_gather.py index 948b64777f..a04cf85c46 100644 --- a/spinn_front_end_common/utility_models/live_packet_gather.py +++ b/spinn_front_end_common/utility_models/live_packet_gather.py @@ -47,7 +47,7 @@ def __init__(self) -> None: self.__targeted_lpgs: Set[Tuple[ LivePacketGatherMachineVertex, MachineVertex, str]] = set() - def create_sys_vertices(self, system_placements: Placements): + def create_sys_vertices(self, system_placements: Placements) -> None: """ Special way of making LPG machine vertices, where one is placed on each Ethernet-enabled chip. @@ -66,7 +66,7 @@ def create_sys_vertices(self, system_placements: Placements): self.__m_vertices_by_ethernet[eth.x, eth.y] = lpg_vtx @overrides(AbstractSplitterCommon.create_machine_vertices) - def create_machine_vertices(self, chip_counter: ChipCounter): + def create_machine_vertices(self, chip_counter: ChipCounter) -> None: # Skip here, and do later! This is a special case... pass diff --git a/spinn_front_end_common/utility_models/live_packet_gather_machine_vertex.py b/spinn_front_end_common/utility_models/live_packet_gather_machine_vertex.py index 9003301737..eca899b300 100644 --- a/spinn_front_end_common/utility_models/live_packet_gather_machine_vertex.py +++ b/spinn_front_end_common/utility_models/live_packet_gather_machine_vertex.py @@ -85,7 +85,8 @@ def __init__( self._lpg_params = lpg_params self._incoming_sources: List[Tuple[MachineVertex, str]] = list() - def add_incoming_source(self, m_vertex: MachineVertex, partition_id: str): + def add_incoming_source( + self, m_vertex: MachineVertex, partition_id: str) -> None: """ Add a machine vertex source incoming into this gatherer. @@ -130,7 +131,7 @@ def iptags(self) -> List[IPtagResource]: ProvidesProvenanceDataFromMachineImpl.parse_extra_provenance_items) def parse_extra_provenance_items( self, label: str, x: int, y: int, p: int, - provenance_data: Sequence[int]): + provenance_data: Sequence[int]) -> None: (lost, lost_payload, events, messages) = provenance_data with ProvenanceWriter() as db: @@ -168,8 +169,8 @@ def get_binary_start_type(self) -> ExecutableType: @overrides( AbstractGeneratesDataSpecification.generate_data_specification) - def generate_data_specification( - self, spec: DataSpecificationGenerator, placement: Placement): + def generate_data_specification(self, spec: DataSpecificationGenerator, + placement: Placement) -> None: tags = FecDataView.get_tags().get_ip_tags_for_vertex(self) assert tags is not None @@ -183,7 +184,8 @@ def generate_data_specification( # End-of-Spec: spec.end_specification() - def _reserve_memory_regions(self, spec: DataSpecificationGenerator): + def _reserve_memory_regions( + self, spec: DataSpecificationGenerator) -> None: """ Reserve SDRAM space for memory areas. @@ -201,8 +203,8 @@ def _reserve_memory_regions(self, spec: DataSpecificationGenerator): label='config') self.reserve_provenance_data_region(spec) - def _write_configuration_region( - self, spec: DataSpecificationGenerator, iptags: List[IPTag]): + def _write_configuration_region(self, spec: DataSpecificationGenerator, + iptags: List[IPTag]) -> None: """ Write the configuration region to the spec. @@ -258,7 +260,7 @@ def _write_configuration_region( spec.write_value(r_info.mask) spec.write_value(vertex.vertex_slice.lo_atom) - def _write_setup_info(self, spec): + def _write_setup_info(self, spec: DataSpecificationGenerator) -> None: """ Write basic info to the system region. diff --git a/spinn_front_end_common/utility_models/reverse_ip_tag_multi_cast_source.py b/spinn_front_end_common/utility_models/reverse_ip_tag_multi_cast_source.py index 641d0ca3a0..ae7ac9c2a1 100644 --- a/spinn_front_end_common/utility_models/reverse_ip_tag_multi_cast_source.py +++ b/spinn_front_end_common/utility_models/reverse_ip_tag_multi_cast_source.py @@ -173,7 +173,7 @@ def send_buffer_times(self) -> _SendBufferTimes: return self.__send_buffer_times @send_buffer_times.setter - def send_buffer_times(self, send_buffer_times: _SendBufferTimes): + def send_buffer_times(self, send_buffer_times: _SendBufferTimes) -> None: self.__send_buffer_times = send_buffer_times for vertex in self.machine_vertices: send_buffer_times_to_set = self.__send_buffer_times @@ -183,7 +183,7 @@ def send_buffer_times(self, send_buffer_times: _SendBufferTimes): vertex_slice.get_raster_ids()] vertex.send_buffer_times = send_buffer_times_to_set - def enable_recording(self, new_state: bool = True): + def enable_recording(self, new_state: bool = True) -> None: """ Turns on or of the recording for this vertex. diff --git a/spinn_front_end_common/utility_models/reverse_ip_tag_multicast_source_machine_vertex.py b/spinn_front_end_common/utility_models/reverse_ip_tag_multicast_source_machine_vertex.py index 0f81970c27..9255b8b274 100644 --- a/spinn_front_end_common/utility_models/reverse_ip_tag_multicast_source_machine_vertex.py +++ b/spinn_front_end_common/utility_models/reverse_ip_tag_multicast_source_machine_vertex.py @@ -317,7 +317,7 @@ def _recording_sdram_per_timestep( return ((header_size + EIEIOType.KEY_32_BIT.key_bytes) * keys_per_timestep) - def _install_send_buffer(self, send_buffer_times: _SBT): + def _install_send_buffer(self, send_buffer_times: _SBT) -> None: """ :param ~numpy.ndarray send_buffer_times: """ @@ -339,7 +339,7 @@ def _clear_send_buffer(self) -> None: self._send_buffer_times = None self._send_buffers = {} - def _install_virtual_key(self, n_keys: int): + def _install_virtual_key(self, n_keys: int) -> None: """ :param int n_keys: """ @@ -443,7 +443,7 @@ def send_buffer_times(self) -> _SendBufferTimes: return self._send_buffer_times @send_buffer_times.setter - def send_buffer_times(self, send_buffer_times: _SendBufferTimes): + def send_buffer_times(self, send_buffer_times: _SendBufferTimes) -> None: """ :type send_buffer_times: ~numpy.ndarray(~numpy.ndarray(numpy.int32)) or @@ -477,7 +477,7 @@ def _fill_send_buffer(self) -> None: # Work with a single list self._fill_send_buffer_1d(key_to_send) - def _fill_send_buffer_2d(self, key_base: int): + def _fill_send_buffer_2d(self, key_base: int) -> None: """ Add the keys with different times for each atom. Can be overridden to override keys. @@ -497,7 +497,7 @@ def _fill_send_buffer_2d(self, key_base: int): if first_time_step <= tick < end_time_step: self._send_buffer.add_key(tick, keys[atom]) - def _fill_send_buffer_1d(self, key_base: int): + def _fill_send_buffer_1d(self, key_base: int) -> None: """ Add the keys from the given vertex slice within the given time range into the given send buffer, with the same times for each @@ -539,7 +539,7 @@ def calculate_mask(n_neurons: int) -> int: mask = 0xFFFFFFFF - max_key return mask - def enable_recording(self, new_state: bool = True): + def enable_recording(self, new_state: bool = True) -> None: """ Enable recording of the keys sent. @@ -547,7 +547,7 @@ def enable_recording(self, new_state: bool = True): """ self._is_recording = new_state - def _reserve_regions(self, spec: DataSpecificationGenerator): + def _reserve_regions(self, spec: DataSpecificationGenerator) -> None: """ :param ~.DataSpecificationGenerator spec: """ @@ -595,7 +595,7 @@ def update_virtual_key(self) -> None: self._prefix_type = EIEIOPrefix.UPPER_HALF_WORD self._prefix = self._virtual_key - def _write_configuration(self, spec: DataSpecificationGenerator): + def _write_configuration(self, spec: DataSpecificationGenerator) -> None: """ :param ~.DataSpecificationGenerator spec: """ @@ -657,8 +657,8 @@ def _write_configuration(self, spec: DataSpecificationGenerator): ReverseIPTagMulticastSourceMachineVertex._n_data_specs += 1 @overrides(AbstractGeneratesDataSpecification.generate_data_specification) - def generate_data_specification( - self, spec: DataSpecificationGenerator, placement: Placement): + def generate_data_specification(self, spec: DataSpecificationGenerator, + placement: Placement) -> None: self.update_virtual_key() # Reserve regions @@ -753,7 +753,7 @@ def send_buffers(self) -> Dict[int, BufferedSendingRegion]: return self._send_buffers @send_buffers.setter - def send_buffers(self, value: Dict[int, BufferedSendingRegion]): + def send_buffers(self, value: Dict[int, BufferedSendingRegion]) -> None: self._send_buffers = value @overrides(SendsBuffersFromHostPreBufferedImpl.get_regions) @@ -764,7 +764,7 @@ def get_regions(self) -> Collection[int]: return self._send_buffers.keys() @overrides(SendsBuffersFromHostPreBufferedImpl.rewind) - def rewind(self, region: int): + def rewind(self, region: int) -> None: # reset theses so fill send buffer will run when send_buffers called self._first_machine_time_step = None self._run_until_timesteps = None @@ -790,7 +790,7 @@ def get_region_buffer_size(self, region: int) -> int: ProvidesProvenanceDataFromMachineImpl.parse_extra_provenance_items) def parse_extra_provenance_items( self, label: str, x: int, y: int, p: int, - provenance_data: Sequence[int]): + provenance_data: Sequence[int]) -> None: n_rcv, n_snt, bad_key, bad_pkt, late = provenance_data with ProvenanceWriter() as db: diff --git a/spinn_front_end_common/utility_models/streaming_context_manager.py b/spinn_front_end_common/utility_models/streaming_context_manager.py index 4eb429925b..ec0c4f7dbd 100644 --- a/spinn_front_end_common/utility_models/streaming_context_manager.py +++ b/spinn_front_end_common/utility_models/streaming_context_manager.py @@ -13,7 +13,8 @@ # limitations under the License. from __future__ import annotations # pylint: disable=no-name-in-module -from typing import ContextManager, Iterable, TYPE_CHECKING +from types import TracebackType +from typing import ContextManager, Iterable, Optional, Type, TYPE_CHECKING from typing_extensions import Literal if TYPE_CHECKING: from .data_speed_up_packet_gatherer_machine_vertex import ( @@ -40,7 +41,9 @@ def __enter__(self) -> None: for gatherer in self._gatherers: gatherer.set_cores_for_data_streaming() - def __exit__(self, _type, _value, _tb) -> Literal[False]: + def __exit__(self, exc_type: Optional[Type], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType]) -> Literal[False]: for gatherer in self._gatherers: gatherer.unset_cores_for_data_streaming() for gatherer in self._gatherers: diff --git a/unittests/interface/interface_functions/test_front_end_common_dsg_region_reloader.py b/unittests/interface/interface_functions/test_front_end_common_dsg_region_reloader.py index f6f9f9450b..bb1b0be71b 100644 --- a/unittests/interface/interface_functions/test_front_end_common_dsg_region_reloader.py +++ b/unittests/interface/interface_functions/test_front_end_common_dsg_region_reloader.py @@ -68,12 +68,12 @@ def reload_required(self) -> bool: return self._requires_regions_to_be_reloaded @overrides(AbstractRewritesDataSpecification.set_reload_required) - def set_reload_required(self, new_value: bool): + def set_reload_required(self, new_value: bool) -> None: self._requires_regions_to_be_reloaded = new_value @overrides(AbstractRewritesDataSpecification.regenerate_data_specification) - def regenerate_data_specification( - self, spec: DataSpecificationReloader, placement: Placement): + def regenerate_data_specification(self, spec: DataSpecificationReloader, + placement: Placement) -> None: global regenerate_call_count for region_id, size, data in reload_region_data[placement.p]: spec.reserve_memory_region(region_id, size) @@ -91,8 +91,8 @@ def get_binary_start_type(self) -> ExecutableType: return ExecutableType.USES_SIMULATION_INTERFACE @overrides(AbstractGeneratesDataSpecification.generate_data_specification) - def generate_data_specification( - self, spec: DataSpecificationGenerator, placement: Placement): + def generate_data_specification(self, spec: DataSpecificationGenerator, + placement: Placement) -> None: raise NotImplementedError()