Skip to content

Commit

Permalink
typing
Browse files Browse the repository at this point in the history
  • Loading branch information
Christian-B committed Dec 16, 2024
1 parent ee021ca commit c9c09bc
Show file tree
Hide file tree
Showing 10 changed files with 44 additions and 39 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ class AbstractRewritesDataSpecification(object, metaclass=AbstractBase):
__slots__ = ()

@abstractmethod
def regenerate_data_specification(
self, spec: DataSpecificationReloader, placement: Placement):
def regenerate_data_specification(self, spec: DataSpecificationReloader,
placement: Placement) -> None:
"""
Regenerate the data specification, only generating regions that
have changed and need to be reloaded.
Expand All @@ -55,7 +55,7 @@ def reload_required(self) -> bool:
raise NotImplementedError

@abstractmethod
def set_reload_required(self, new_value: bool):
def set_reload_required(self, new_value: bool) -> None:
"""
Indicate that the regions have been reloaded.
Expand Down
18 changes: 10 additions & 8 deletions spinn_front_end_common/data/fec_data_view.py
Original file line number Diff line number Diff line change
Expand Up @@ -870,7 +870,8 @@ def get_system_provenance_dir_path(cls) -> str:
cls.get_provenance_dir_path(), "system_provenance_data")

@classmethod
def _child_folder(cls, parent, child_name, must_create=False):
def _child_folder(cls, parent: str, child_name: str,
must_create: bool = False) -> str:
"""
:param str parent:
:param str child_name:
Expand Down Expand Up @@ -979,7 +980,7 @@ def get_live_packet_recorder_params(cls) -> Dict[
def add_live_packet_gatherer_parameters(
cls, live_packet_gatherer_params: LivePacketGatherParameters,
vertex_to_record_from: ApplicationVertex,
partition_ids: Iterable[str]):
partition_ids: Iterable[str]) -> None:
"""
Adds parameters for a new live packet gatherer (LPG) if needed, or
adds to the tracker for parameters.
Expand Down Expand Up @@ -1252,7 +1253,7 @@ def get_n_database_socket_addresses(cls) -> int:

@classmethod
def add_database_socket_address(
cls, database_socket_address: SocketAddress):
cls, database_socket_address: SocketAddress) -> None:
"""
Adds a socket address to the list of known addresses.
Expand All @@ -1267,7 +1268,8 @@ def add_database_socket_address(

@classmethod
def add_database_socket_addresses(
cls, database_socket_addresses: Optional[Iterable[SocketAddress]]):
cls, database_socket_addresses: Optional[Iterable[SocketAddress]]
) -> None:
"""
Adds all socket addresses to the list of known addresses.
Expand Down Expand Up @@ -1297,7 +1299,7 @@ def get_notification_protocol(cls) -> NotificationProtocol:

@classmethod
def add_live_output_vertex(
cls, vertex: ApplicationVertex, partition_id: str):
cls, vertex: ApplicationVertex, partition_id: str) -> None:
"""
Add a vertex that is to be output live, and so wants its atom IDs
recorded in the database.
Expand All @@ -1324,9 +1326,9 @@ def iterate_live_output_vertices(
return iter(cls.__fec_data._live_output_vertices)

@classmethod
def get_next_ds_references(cls, number):
def get_next_ds_references(cls, number: int) -> List[int]:
"""
Get a a list of unique data specification references
Get a list of unique data specification references
These will be unique since the last hard reset
Expand All @@ -1339,7 +1341,7 @@ def get_next_ds_references(cls, number):
return list(references)

@classmethod
def add_live_output_device(cls, device: LiveOutputDevice):
def add_live_output_device(cls, device: LiveOutputDevice) -> None:
"""
Add a live output device.
Expand Down
2 changes: 1 addition & 1 deletion spinn_front_end_common/data/fec_data_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def _soft_reset(self) -> None:

def __create_run_dir_path(self) -> None:
self.set_run_dir_path(self._child_folder(
self.__fec_data._timestamp_dir_path,
self.get_timestamp_dir_path(),
f"run_{self.__fec_data._run_number}"))

def __create_reports_directory(self) -> None:
Expand Down
8 changes: 4 additions & 4 deletions spinn_front_end_common/interface/ds/ds_sqllite_database.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def __init__(self, database_file: Optional[str] = None):
super().__init__(
database_file, ddl_file=_DDL_FILE if self._init_file else None)

def _context_entered(self):
def _context_entered(self) -> None:
super()._context_entered()
if self._init_file:
self.__init_ethernets()
Expand All @@ -88,7 +88,7 @@ def __init_ethernets(self) -> None:
for ethernet in eth_chips))

def set_core(self, x: int, y: int, p: int,
vertex: AbstractHasAssociatedBinary):
vertex: AbstractHasAssociatedBinary) -> None:
"""
Creates a database record for the core with this x,y,z
Expand Down Expand Up @@ -145,7 +145,7 @@ def get_core_infos(self, is_system: bool) -> List[
row["ethernet_x"], row["ethernet_y"]))
return core_infos

def _set_chip(self, x: int, y: int):
def _set_chip(self, x: int, y: int) -> None:
"""
:param int x:
:param int y:
Expand All @@ -167,7 +167,7 @@ def _set_chip(self, x: int, y: int):

def set_memory_region(
self, x: int, y: int, p: int, region_num: int, size: int,
reference: Optional[int], label: Optional[str]):
reference: Optional[int], label: Optional[str]) -> int:
"""
Writes the information to reserve a memory region into the database
Expand Down
14 changes: 8 additions & 6 deletions spinn_front_end_common/interface/provenance/global_provenance.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def __init__(
SQLiteDB.__init__(self, database_file, ddl_file=_DDL_FILE,
row_factory=None, text_factory=None)

def insert_version(self, description: str, the_value: str):
def insert_version(self, description: str, the_value: str) -> None:
"""
Inserts data into the version_provenance table
Expand All @@ -100,7 +100,8 @@ def insert_version(self, description: str, the_value: str):
VALUES(?, ?)
""", [description, the_value])

def insert_category(self, category: TimerCategory, machine_on: bool):
def insert_category(
self, category: TimerCategory, machine_on: bool) -> int:
"""
Inserts category into the category_timer_provenance returning id
Expand All @@ -119,7 +120,8 @@ def insert_category(self, category: TimerCategory, machine_on: bool):
FecDataView.get_run_step()])
return self.lastrowid

def insert_category_timing(self, category_id: int, delta: timedelta):
def insert_category_timing(
self, category_id: int, delta: timedelta) -> None:
"""
Inserts run time into the category
Expand All @@ -140,7 +142,7 @@ def insert_category_timing(self, category_id: int, delta: timedelta):

def insert_timing(
self, category: int, algorithm: str, work: TimerWork,
delta: timedelta, skip_reason: Optional[str]):
delta: timedelta, skip_reason: Optional[str]) -> None:
"""
Inserts algorithms run times into the timer_provenance table
Expand All @@ -164,7 +166,7 @@ def insert_timing(
[category, algorithm, work.work_name, time_taken, skip_reason])

def store_log(self, level: int, message: str,
timestamp: Optional[datetime] = None):
timestamp: Optional[datetime] = None) -> None:
"""
Stores log messages into the database
Expand All @@ -181,7 +183,7 @@ def store_log(self, level: int, message: str,
""",
[timestamp, level, message])

def _test_log_locked(self, text: str):
def _test_log_locked(self, text: str) -> None:
"""
THIS IS A TESTING METHOD.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,8 +106,8 @@ def insert_monitor(self, x: int, y: int, description: str,
if get_config_bool("Reports", "write_provenance"):
self.insert_monitor_value(x, y, description, the_value)

def insert_monitor_value(
self, x: int, y: int, description: str, the_value: _SqliteTypes):
def insert_monitor_value(self, x: int, y: int, description: str,
the_value: _SqliteTypes) -> None:
"""
Inserts data into the `monitor_provenance` table.
Expand Down
2 changes: 1 addition & 1 deletion spinn_front_end_common/utilities/base_database.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
_SqliteTypes: TypeAlias = Union[str, int, float, bytes, None]


def _timestamp():
def _timestamp() -> int:
return int(time.time() * _SECONDS_TO_MICRO_SECONDS_CONVERSION)


Expand Down
19 changes: 10 additions & 9 deletions spinn_front_end_common/utilities/scp/reinjector_control_process.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class ReinjectorControlProcess(AbstractMultiConnectionProcess):
"""
__slots__ = ()

def clear_queue(self, core_subsets: CoreSubsets):
def clear_queue(self, core_subsets: CoreSubsets) -> None:
"""
Clear the reinjection queue.
Expand All @@ -46,7 +46,7 @@ def clear_queue(self, core_subsets: CoreSubsets):
self._send_request(ClearReinjectionQueueMessage(
core_subset.x, core_subset.y, processor_id))

def reset_counters(self, core_subsets: CoreSubsets):
def reset_counters(self, core_subsets: CoreSubsets) -> None:
"""
Reset the packet counters.
Expand All @@ -61,7 +61,7 @@ def reset_counters(self, core_subsets: CoreSubsets):
@staticmethod
def __handle_response(
result: Dict[Chip, ReInjectionStatus],
response: GetReinjectionStatusMessageResponse):
response: GetReinjectionStatusMessageResponse) -> None:
"""
:param dict result:
:param GetReinjectionStatusMessageResponse response:
Expand Down Expand Up @@ -113,7 +113,8 @@ def get_reinjection_status_for_core_subsets(

def set_packet_types(
self, core_subsets: CoreSubsets, point_to_point: bool,
multicast: bool, nearest_neighbour: bool, fixed_route: bool):
multicast: bool, nearest_neighbour: bool,
fixed_route: bool) -> None:
"""
Set what types of packets should be reinjected.
Expand All @@ -131,8 +132,8 @@ def set_packet_types(
core_subset.x, core_subset.y, processor_id, multicast,
point_to_point, fixed_route, nearest_neighbour))

def set_wait1_timeout(
self, mantissa: int, exponent: int, core_subsets: CoreSubsets):
def set_wait1_timeout(self, mantissa: int, exponent: int,
core_subsets: CoreSubsets) -> None:
"""
The wait1 timeout is the time from when a packet is received to
when emergency routing becomes enabled.
Expand All @@ -147,8 +148,8 @@ def set_wait1_timeout(
self.__set_timeout(
core_subset, processor_id, mantissa, exponent, wait=1)

def set_wait2_timeout(
self, mantissa: int, exponent: int, core_subsets: CoreSubsets):
def set_wait2_timeout(self, mantissa: int, exponent: int,
core_subsets: CoreSubsets) -> None:
"""
The wait2 timeout is the time from when a packet has emergency
routing enabled for it to when it is dropped.
Expand All @@ -165,7 +166,7 @@ def set_wait2_timeout(

def __set_timeout(
self, core: CoreSubset, processor_id: int,
mantissa: int, exponent: int, *, wait: int):
mantissa: int, exponent: int, *, wait: int) -> None:
"""
Set a timeout for a router controlled by an extra monitor on a core.
This is not a parallelised operation in order to aid debugging when
Expand Down
4 changes: 2 additions & 2 deletions spinn_front_end_common/utilities/utility_calls.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
T = TypeVar("T")


def _mkdir(directory: str):
def _mkdir(directory: str) -> None:
"""
Make a directory if it doesn't exist.
Expand Down Expand Up @@ -141,7 +141,7 @@ def parse_old_spalloc(
def retarget_tag(
connection: Union[SpallocEIEIOListener, SpallocEIEIOConnection,
SCAMPConnection], x: int, y: int, tag: int,
ip_address: Optional[str] = None, strip: bool = True):
ip_address: Optional[str] = None, strip: bool = True) -> None:
"""
Make a tag deliver to the given connection.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,12 +68,12 @@ def reload_required(self) -> bool:
return self._requires_regions_to_be_reloaded

@overrides(AbstractRewritesDataSpecification.set_reload_required)
def set_reload_required(self, new_value: bool):
def set_reload_required(self, new_value: bool) -> None:
self._requires_regions_to_be_reloaded = new_value

@overrides(AbstractRewritesDataSpecification.regenerate_data_specification)
def regenerate_data_specification(
self, spec: DataSpecificationReloader, placement: Placement):
def regenerate_data_specification(self, spec: DataSpecificationReloader,
placement: Placement) -> None:
global regenerate_call_count
for region_id, size, data in reload_region_data[placement.p]:
spec.reserve_memory_region(region_id, size)
Expand Down

0 comments on commit c9c09bc

Please sign in to comment.