Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Typing for FEC #1245

Merged
merged 33 commits into from
Dec 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
e590436
mypy.bash files
Christian-B Nov 13, 2024
ef743bb
typing
Christian-B Nov 13, 2024
e262e48
return after function call not the result of the call
Christian-B Nov 14, 2024
be6cf7d
typing
Christian-B Nov 14, 2024
0745a8f
typing
Christian-B Nov 14, 2024
0d558cc
ApplicationVertex never generate dataSpecs
Christian-B Nov 14, 2024
55d12eb
typing
Christian-B Nov 14, 2024
06e3040
typing
Christian-B Nov 15, 2024
a543036
typing
Christian-B Nov 18, 2024
ba27d77
merged in master
Christian-B Nov 19, 2024
599b926
pass params not kwarg to job
Christian-B Nov 27, 2024
a46191f
typing
Christian-B Nov 27, 2024
cc6e3d2
merged in master
Christian-B Nov 27, 2024
f5a82e8
merged in master
Christian-B Nov 27, 2024
7e04ddd
merged in master
Christian-B Dec 6, 2024
ecdbefe
merged in master
Christian-B Dec 6, 2024
ee021ca
Merge branch 't_spalloc' into t_fec
Christian-B Dec 16, 2024
c9c09bc
typing
Christian-B Dec 16, 2024
5b2b81a
return None
Christian-B Dec 16, 2024
810c593
remove kwarg methods
Christian-B Dec 16, 2024
383bb67
typing
Christian-B Dec 16, 2024
0e0c0ce
remove pass through kwargs methods
Christian-B Dec 16, 2024
2bf7ead
router_provenance the_value is an int
Christian-B Dec 16, 2024
98907c7
flake8
Christian-B Dec 16, 2024
c59eebe
flake8
Christian-B Dec 16, 2024
197f549
flake8
Christian-B Dec 16, 2024
ac16965
fixes found by typing
Christian-B Dec 16, 2024
339c0d0
mypy fec_integration_tests
Christian-B Dec 16, 2024
461ddc9
pylint fixes
Christian-B Dec 17, 2024
b38bd97
mypy-full_packages
Christian-B Dec 17, 2024
96eba89
merged in master
Christian-B Dec 18, 2024
84fa138
Merge branch 'master' into t_fec
Christian-B Dec 18, 2024
984b4ca
merged in remote/t_fec
Christian-B Dec 18, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .github/workflows/python_actions.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,5 +27,6 @@ jobs:
coverage-package: spinn_front_end_common
flake8-packages: spinn_front_end_common unittests fec_integration_tests
pylint-packages: spinn_front_end_common
mypy-packages: spinn_front_end_common unittests fec_integration_tests
mypy-packages: unittests fec_integration_tests
mypy-full_packages: spinn_front_end_common
secrets: inherit
2 changes: 1 addition & 1 deletion mypy.bash
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,4 @@ man="../SpiNNMan/spinnman"
pacman="../PACMAN/pacman"
spalloc="../spalloc/spalloc_client"

mypy --python-version 3.8 $$utils $machine $man $pacman $spalloc spinn_front_end_common
mypy --python-version 3.8 $utils $machine $man $pacman $spalloc spinn_front_end_common unittests fec_integration_tests
29 changes: 29 additions & 0 deletions mypyd.bash
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
#!/bin/bash

# Copyright (c) 2024 The University of Manchester
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# This bash assumes that other repositories are installed in paralled

# requires the latest mypy
# pip install --upgrade mypy

utils="../SpiNNUtils/spinn_utilities"
machine="../SpiNNMachine/spinn_machine"
man="../SpiNNMan/spinnman"
pacman="../PACMAN/pacman"
spalloc="../spalloc/spalloc_client"

mypy --python-version 3.8 --disallow-untyped-defs $utils $machine $man $pacman $spalloc spinn_front_end_common

Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,12 @@
from __future__ import annotations
from typing import TYPE_CHECKING
from spinn_utilities.abstract_base import AbstractBase, abstractmethod
from spinn_utilities.overrides import overrides
from spinn_utilities.require_subclass import require_subclass
from pacman.model.graphs.machine import MachineVertex
from pacman.model.placements import Placement
from pacman.model.resources import AbstractSDRAM

from .abstract_has_associated_binary import AbstractHasAssociatedBinary
if TYPE_CHECKING:
from spinn_front_end_common.interface.ds import DataSpecificationGenerator
Expand All @@ -30,8 +34,8 @@ class AbstractGeneratesDataSpecification(object, metaclass=AbstractBase):
__slots__ = ()

@abstractmethod
def generate_data_specification(
self, spec: DataSpecificationGenerator, placement: Placement):
def generate_data_specification(self, spec: DataSpecificationGenerator,
placement: Placement) -> None:
"""
Generate a data specification.

Expand All @@ -41,3 +45,10 @@ def generate_data_specification(
The placement the vertex is located at
"""
raise NotImplementedError

@property
@overrides(MachineVertex.sdram_required)
@abstractmethod
def sdram_required(self) -> AbstractSDRAM:
# pylint: disable=missing-function-docstring
raise NotImplementedError
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ class AbstractRewritesDataSpecification(object, metaclass=AbstractBase):
__slots__ = ()

@abstractmethod
def regenerate_data_specification(
self, spec: DataSpecificationReloader, placement: Placement):
def regenerate_data_specification(self, spec: DataSpecificationReloader,
placement: Placement) -> None:
"""
Regenerate the data specification, only generating regions that
have changed and need to be reloaded.
Expand All @@ -55,7 +55,7 @@ def reload_required(self) -> bool:
raise NotImplementedError

@abstractmethod
def set_reload_required(self, new_value: bool):
def set_reload_required(self, new_value: bool) -> None:
"""
Indicate that the regions have been reloaded.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def __init__(self, thread_name: str, hostname: Optional[str] = None,
thread.start()

@abstractmethod
def extend_allocation(self, new_total_run_time: float):
def extend_allocation(self, new_total_run_time: float) -> None:
"""
Extend the allocation of the machine from the original run time.
Expand Down Expand Up @@ -191,7 +191,7 @@ def proxying(self) -> bool:
"""
return False

def make_report(self, filename: str):
def make_report(self, filename: str) -> None:
"""
Asks the controller to make a report of details of allocations.
By default, this does nothing.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ class MachineDataSpecableVertex(
__slots__ = ()

@overrides(AbstractGeneratesDataSpecification.generate_data_specification)
def generate_data_specification(
self, spec: DataSpecificationGenerator, placement: Placement):
def generate_data_specification(self, spec: DataSpecificationGenerator,
placement: Placement) -> None:
tags = FecDataView.get_tags()
iptags = tags.get_ip_tags_for_vertex(placement.vertex)
reverse_iptags = tags.get_reverse_ip_tags_for_vertex(placement.vertex)
Expand All @@ -44,7 +44,7 @@ def generate_data_specification(
def generate_machine_data_specification(
self, spec: DataSpecificationGenerator, placement: Placement,
iptags: Optional[Iterable[IPTag]],
reverse_iptags: Optional[Iterable[ReverseIPTag]]):
reverse_iptags: Optional[Iterable[ReverseIPTag]]) -> None:
"""
:param ~data_specification.DataSpecificationGenerator spec:
The data specification to write into.
Expand Down
18 changes: 10 additions & 8 deletions spinn_front_end_common/data/fec_data_view.py
Original file line number Diff line number Diff line change
Expand Up @@ -870,7 +870,8 @@ def get_system_provenance_dir_path(cls) -> str:
cls.get_provenance_dir_path(), "system_provenance_data")

@classmethod
def _child_folder(cls, parent, child_name, must_create=False):
def _child_folder(cls, parent: str, child_name: str,
must_create: bool = False) -> str:
"""
:param str parent:
:param str child_name:
Expand Down Expand Up @@ -979,7 +980,7 @@ def get_live_packet_recorder_params(cls) -> Dict[
def add_live_packet_gatherer_parameters(
cls, live_packet_gatherer_params: LivePacketGatherParameters,
vertex_to_record_from: ApplicationVertex,
partition_ids: Iterable[str]):
partition_ids: Iterable[str]) -> None:
"""
Adds parameters for a new live packet gatherer (LPG) if needed, or
adds to the tracker for parameters.
Expand Down Expand Up @@ -1252,7 +1253,7 @@ def get_n_database_socket_addresses(cls) -> int:

@classmethod
def add_database_socket_address(
cls, database_socket_address: SocketAddress):
cls, database_socket_address: SocketAddress) -> None:
"""
Adds a socket address to the list of known addresses.

Expand All @@ -1267,7 +1268,8 @@ def add_database_socket_address(

@classmethod
def add_database_socket_addresses(
cls, database_socket_addresses: Optional[Iterable[SocketAddress]]):
cls, database_socket_addresses: Optional[Iterable[SocketAddress]]
) -> None:
"""
Adds all socket addresses to the list of known addresses.

Expand Down Expand Up @@ -1297,7 +1299,7 @@ def get_notification_protocol(cls) -> NotificationProtocol:

@classmethod
def add_live_output_vertex(
cls, vertex: ApplicationVertex, partition_id: str):
cls, vertex: ApplicationVertex, partition_id: str) -> None:
"""
Add a vertex that is to be output live, and so wants its atom IDs
recorded in the database.
Expand All @@ -1324,9 +1326,9 @@ def iterate_live_output_vertices(
return iter(cls.__fec_data._live_output_vertices)

@classmethod
def get_next_ds_references(cls, number):
def get_next_ds_references(cls, number: int) -> List[int]:
"""
Get a a list of unique data specification references
Get a list of unique data specification references

These will be unique since the last hard reset

Expand All @@ -1339,7 +1341,7 @@ def get_next_ds_references(cls, number):
return list(references)

@classmethod
def add_live_output_device(cls, device: LiveOutputDevice):
def add_live_output_device(cls, device: LiveOutputDevice) -> None:
"""
Add a live output device.

Expand Down
51 changes: 23 additions & 28 deletions spinn_front_end_common/data/fec_data_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def _soft_reset(self) -> None:

def __create_run_dir_path(self) -> None:
self.set_run_dir_path(self._child_folder(
self.__fec_data._timestamp_dir_path,
self.get_timestamp_dir_path(),
f"run_{self.__fec_data._run_number}"))

def __create_reports_directory(self) -> None:
Expand Down Expand Up @@ -150,7 +150,7 @@ def write_finished_file(self) -> None:
f.writelines(self._get_timestamp())

def set_allocation_controller(self, allocation_controller: Optional[
MachineAllocationController]):
MachineAllocationController]) -> None:
"""
Sets the allocation controller variable.
Expand All @@ -170,7 +170,7 @@ def set_allocation_controller(self, allocation_controller: Optional[
"Expecting only the SpallocJobController to be proxying")
self.__fec_data._spalloc_job = allocation_controller.job

def set_buffer_manager(self, buffer_manager: BufferManager):
def set_buffer_manager(self, buffer_manager: BufferManager) -> None:
"""
Sets the Buffer manager variable.
Expand All @@ -180,7 +180,8 @@ def set_buffer_manager(self, buffer_manager: BufferManager):
raise TypeError("buffer_manager must be a BufferManager")
self.__fec_data._buffer_manager = buffer_manager

def increment_current_run_timesteps(self, increment: Optional[int]):
def increment_current_run_timesteps(
self, increment: Optional[int]) -> None:
"""
Increment the current_run_timesteps and sets first_machine_time_step.
Expand Down Expand Up @@ -224,7 +225,7 @@ def set_current_run_timesteps(self, current_run_timesteps: int) -> None:
"Last run was longer than duration supported by recording")
self.__fec_data._current_run_timesteps = current_run_timesteps

def set_max_run_time_steps(self, max_run_time_steps: int):
def set_max_run_time_steps(self, max_run_time_steps: int) -> None:
"""
Sets the max_run_time_steps value
Expand All @@ -240,7 +241,7 @@ def set_max_run_time_steps(self, max_run_time_steps: int):
def set_up_timings(
self, simulation_time_step_us: Optional[int],
time_scale_factor: Optional[float],
default_time_scale_factor: Optional[float] = None):
default_time_scale_factor: Optional[float] = None) -> None:
"""
Set up timings for the simulation.
Expand Down Expand Up @@ -276,7 +277,7 @@ def set_up_timings(
raise

def _set_simulation_time_step(
self, simulation_time_step_us: Optional[int]):
self, simulation_time_step_us: Optional[int]) -> None:
"""
:param simulation_time_step_us:
An explicitly specified time step for the simulation. If `None`,
Expand Down Expand Up @@ -307,7 +308,7 @@ def _set_simulation_time_step(

def _set_time_scale_factor(
self, time_scale_factor: Optional[float],
default_time_scale_factor: Optional[float]):
default_time_scale_factor: Optional[float]) -> None:
"""
Set up time_scale_factor.
Expand Down Expand Up @@ -374,7 +375,7 @@ def _set_hardware_timestep(self) -> None:

def set_system_multicast_routing_data(
self, data: Tuple[
MulticastRoutingTables, Dict[XY, int], Dict[XY, int]]):
MulticastRoutingTables, Dict[XY, int], Dict[XY, int]]) -> None:
"""
Sets the system_multicast_routing_data.
Expand All @@ -398,7 +399,7 @@ def set_system_multicast_routing_data(
self.__fec_data._data_in_multicast_routing_tables = routing_tables
self.__fec_data._system_multicast_router_timeout_keys = timeout_keys

def set_ipaddress(self, ip_address: str):
def set_ipaddress(self, ip_address: str) -> None:
"""
:param str ip_address:
"""
Expand All @@ -407,7 +408,7 @@ def set_ipaddress(self, ip_address: str):
self.__fec_data._ipaddress = ip_address

def set_fixed_routes(
self, fixed_routes: Dict[Tuple[int, int], RoutingEntry]):
self, fixed_routes: Dict[Tuple[int, int], RoutingEntry]) -> None:
"""
:param fixed_routes:
:type fixed_routes:
Expand All @@ -417,7 +418,7 @@ def set_fixed_routes(
raise TypeError("fixed_routes must be a dict")
self.__fec_data._fixed_routes = fixed_routes

def set_java_caller(self, java_caller: JavaCaller):
def set_java_caller(self, java_caller: JavaCaller) -> None:
"""
:param JavaCaller java_caller:
"""
Expand All @@ -432,7 +433,7 @@ def reset_sync_signal(self) -> None:
self.__fec_data._next_sync_signal = Signal.SYNC0

def set_executable_types(self, executable_types: Dict[
ExecutableType, CoreSubsets]):
ExecutableType, CoreSubsets]) -> None:
"""
:param executable_types:
:type executable_types: dict(
Expand All @@ -443,15 +444,8 @@ def set_executable_types(self, executable_types: Dict[
raise TypeError("executable_types must be a Dict")
self.__fec_data._executable_types = executable_types

def set_live_packet_gatherer_parameters(self, params):
"""
testing method will not work outside of mock
"""
if not self._is_mocked():
raise NotImplementedError("This call is only for testing")
self.__fec_data._live_packet_recorder_params = params

def set_database_file_path(self, database_file_path: Optional[str]):
def set_database_file_path(
self, database_file_path: Optional[str]) -> None:
"""
Sets the database_file_path variable. Possibly to `None`.
Expand All @@ -462,7 +456,8 @@ def set_database_file_path(self, database_file_path: Optional[str]):
raise TypeError("database_file_path must be a str or None")
self.__fec_data._database_file_path = database_file_path

def set_executable_targets(self, executable_targets: ExecutableTargets):
def set_executable_targets(
self, executable_targets: ExecutableTargets) -> None:
"""
Sets the executable_targets
Expand All @@ -472,7 +467,7 @@ def set_executable_targets(self, executable_targets: ExecutableTargets):
raise TypeError("executable_targets must be a ExecutableTargets")
self.__fec_data._executable_targets = executable_targets

def set_ds_database_path(self, ds_database_path: str):
def set_ds_database_path(self, ds_database_path: str) -> None:
"""
Sets the Data Spec targets database.
Expand All @@ -489,7 +484,7 @@ def __gatherer_map_error(self) -> TypeError:
"DataSpeedUpPacketGatherMachineVertex)")

def set_gatherer_map(self, gatherer_map: Dict[
Chip, DataSpeedUpPacketGatherMachineVertex]):
Chip, DataSpeedUpPacketGatherMachineVertex]) -> None:
"""
Sets the map of Chip to Gatherer Vertices.
Expand All @@ -516,7 +511,7 @@ def __monitor_map_error(self) -> TypeError:
"ExtraMonitorSupportMachineVertex)")

def set_monitor_map(self, monitor_map: Dict[
Chip, ExtraMonitorSupportMachineVertex]):
Chip, ExtraMonitorSupportMachineVertex]) -> None:
"""
Sets the map of Chip to Monitor Vertices.
Expand All @@ -540,7 +535,7 @@ def set_monitor_map(self, monitor_map: Dict[
self.__fec_data._monitor_map = monitor_map

def set_notification_protocol(
self, notification_protocol: NotificationProtocol):
self, notification_protocol: NotificationProtocol) -> None:
"""
Sets the notification_protocol.
Expand Down Expand Up @@ -568,7 +563,7 @@ def add_vertex(cls, vertex: ApplicationVertex) -> None:
# Avoid the safety check in FecDataView
PacmanDataWriter.add_vertex(vertex)

def set_n_run_steps(self, n_run_steps: int):
def set_n_run_steps(self, n_run_steps: int) -> None:
"""
Sets the number of expected run-steps
Expand Down
Loading
Loading