From 0ce1c1e5f5a4cbf0a9bd8441c548efa94a86fbd2 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Fri, 5 Apr 2024 10:06:35 -0700 Subject: [PATCH 01/23] WIP --- performance-metrics/Makefile | 6 +- performance-metrics/Pipfile | 4 +- performance-metrics/Pipfile.lock | 21 ++- .../src/performance_metrics/function_timer.py | 67 +++++++ .../test_function_timer.py | 165 ++++++++++++++++++ 5 files changed, 257 insertions(+), 6 deletions(-) create mode 100644 performance-metrics/src/performance_metrics/function_timer.py create mode 100644 performance-metrics/tests/performance_metrics/test_function_timer.py diff --git a/performance-metrics/Makefile b/performance-metrics/Makefile index cce4fd7d93a..4e5e7780782 100644 --- a/performance-metrics/Makefile +++ b/performance-metrics/Makefile @@ -25,4 +25,8 @@ clean: .PHONY: wheel wheel: $(python) setup.py $(wheel_opts) bdist_wheel - rm -rf build \ No newline at end of file + rm -rf build + +.PHONY: test +test: + $(pytest) tests -vvv \ No newline at end of file diff --git a/performance-metrics/Pipfile b/performance-metrics/Pipfile index df5a3de89d6..a71db703e33 100644 --- a/performance-metrics/Pipfile +++ b/performance-metrics/Pipfile @@ -5,15 +5,17 @@ name = "pypi" [packages] opentrons-shared-data = {file = "../shared-data/python", editable = true} +performance-metrics = {file = ".", editable = true} [dev-packages] -pytest = "==7.2.2" +pytest = "==7.4.4" mypy = "==1.8.0" flake8 = "==7.0.0" flake8-annotations = "~=3.0.1" flake8-docstrings = "~=1.7.0" flake8-noqa = "~=1.4.0" black = "==22.3.0" +pytest-asyncio = "~=0.23.0" [requires] python_version = "3.10" diff --git a/performance-metrics/Pipfile.lock b/performance-metrics/Pipfile.lock index 61556f3dee9..bc430c83d8e 100644 --- a/performance-metrics/Pipfile.lock +++ b/performance-metrics/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "fa95804888e2d45ce401c98bafc9b543cb6e1afe0a36713660d3f5517ac02b8e" + "sha256": "93a35b59bb63ce1a4141b485ea63a81ce03448af4b50ca68b952ed0ee448220c" }, "pipfile-spec": 6, "requires": { @@ -37,6 +37,10 @@ "file": "../shared-data/python", "markers": "python_version >= '3.8'" }, + "performance-metrics": { + "editable": true, + "file": "." + }, "pydantic": { "hashes": [ "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de", @@ -333,12 +337,21 @@ }, "pytest": { "hashes": [ - "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e", - "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4" + "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280", + "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8" ], "index": "pypi", "markers": "python_version >= '3.7'", - "version": "==7.2.2" + "version": "==7.4.4" + }, + "pytest-asyncio": { + "hashes": [ + "sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a", + "sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==0.23.6" }, "snowballstemmer": { "hashes": [ diff --git a/performance-metrics/src/performance_metrics/function_timer.py b/performance-metrics/src/performance_metrics/function_timer.py new file mode 100644 index 00000000000..8416838c6d9 --- /dev/null +++ b/performance-metrics/src/performance_metrics/function_timer.py @@ -0,0 +1,67 @@ +"""A module that contains a context manager to track the start and end time of a function.""" + +from contextlib import ContextDecorator, AsyncContextDecorator +from datetime import datetime, timezone +from dataclasses import dataclass +from typing import Type +from types import TracebackType + + +@dataclass +class FunctionTime: + """A dataclass to store the start and end time of a function. + + The dates are in UTC timezone. + """ + + start_time: datetime + end_time: datetime + + +class FunctionTimer(ContextDecorator, AsyncContextDecorator): + """A context manager that tracks the start and end time of a function. + + Handles both synchronous and asynchronous functions. + """ + + def __init__(self) -> None: + self._start_time: datetime | None = None + self._end_time: datetime | None = None + + def __enter__(self) -> "FunctionTimer": + """Set the start time of the function.""" + self._start_time = self._get_datetime() + return self + + def __exit__( + self, + exc_type: Type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + """Set the end time of the function.""" + self._end_time = self._get_datetime() + + async def __aenter__(self) -> "FunctionTimer": + """Set the start time of the function.""" + self._start_time = self._get_datetime() + return self + + async def __aexit__( + self, + exc_type: Type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + """Set the end time of the function.""" + self._end_time = self._get_datetime() + + def _get_datetime(self) -> datetime: + """Get the current datetime in UTC timezone.""" + return datetime.now(timezone.utc) + + def get_time(self) -> FunctionTime: + """Return a FunctionTime object with the start and end time of the function.""" + assert self._start_time is not None + assert self._end_time is not None + return FunctionTime(start_time=self._start_time, end_time=self._end_time) diff --git a/performance-metrics/tests/performance_metrics/test_function_timer.py b/performance-metrics/tests/performance_metrics/test_function_timer.py new file mode 100644 index 00000000000..9d047c69df2 --- /dev/null +++ b/performance-metrics/tests/performance_metrics/test_function_timer.py @@ -0,0 +1,165 @@ +"""This module contains tests for timing the execution of synchronous and asynchronous functions using the FunctionTimer class. + +It includes functions and their variants that raise exceptions to simulate errors during execution. Each test function is designed +to ensure the FunctionTimer accurately measures execution times and handles exceptions correctly for both synchronous and asynchronous +calls. This serves as a comprehensive suite to validate the functionality of FunctionTimer in various scenarios. +""" + +import time +import asyncio +import pytest +from performance_metrics.function_timer import FunctionTimer + + +def synchronous_function() -> None: + """Prints a message indicating a synchronous function is running.""" + print("synchronous_function") + + +def synchronous_function_with_exception() -> None: + """Prints a message then raises an exception to simulate error in synchronous execution.""" + print("synchronous_function_with_exception") + raise Exception("An exception") + + +async def asynchronous_function() -> None: + """Prints a message indicating an asynchronous function is running.""" + print("asynchronous_function") + + +async def asynchronous_function_with_exception() -> None: + """Prints a message then raises an exception to simulate error in asynchronous execution.""" + print("asynchronous_function_with_exception") + raise Exception("An exception") + + +async def long_running_task() -> None: + """Simulates a longer running asynchronous task.""" + await asyncio.sleep(2) + + +async def short_running_task() -> None: + """Simulates a shorter running asynchronous task.""" + await asyncio.sleep(1) + + +def test_synchronous_function() -> None: + """Tests that the synchronous function is timed correctly by FunctionTimer.""" + with FunctionTimer() as tracker: + synchronous_function() + + time = tracker.get_time() + assert time.start_time < time.end_time + + +def test_synchronous_function_with_exception() -> None: + """Tests that FunctionTimer can handle exceptions in synchronous functions correctly.""" + with pytest.raises(Exception): + with FunctionTimer() as tracker: + synchronous_function_with_exception() + time = tracker.get_time() + assert time.start_time < time.end_time + + +async def test_asynchronous_function() -> None: + """Tests that the asynchronous function is timed correctly by FunctionTimer.""" + async with FunctionTimer() as tracker: + await asynchronous_function() + + time = tracker.get_time() + assert time.start_time < time.end_time + + +async def test_asynchronous_function_with_exception() -> None: + """Tests that FunctionTimer can handle exceptions in asynchronous functions correctly.""" + with pytest.raises(Exception): + async with FunctionTimer() as tracker: + await asynchronous_function_with_exception() + + time = tracker.get_time() + assert time.start_time < time.end_time + + +async def test_asynchronous_and_synchronous_function() -> None: + """Tests the timing of a mixed sequence of synchronous and asynchronous functions with FunctionTimer.""" + async with FunctionTimer() as tracker: + synchronous_function() + await asynchronous_function() + + time = tracker.get_time() + assert time.start_time < time.end_time + + +async def test_synchronous_and_asynchronous_function_with_exception() -> None: + """Tests that FunctionTimer can handle a mixed sequence of functions, including an exception, correctly.""" + with pytest.raises(Exception): + async with FunctionTimer() as tracker: + synchronous_function_with_exception() + await asynchronous_function() + + time = tracker.get_time() + assert time.start_time < time.end_time + + +def test_instantaneous_function() -> None: + """Tests that the FunctionTimer can measure the time of an almost instantaneous function.""" + + def instantaneous_function() -> None: + """A function that executes almost instantaneously.""" + pass + + with FunctionTimer() as tracker: + instantaneous_function() + + time = tracker.get_time() + assert time.start_time <= time.end_time + + +def test_known_duration_function() -> None: + """Tests the FunctionTimer's accuracy by comparing with a known sleep duration.""" + sleep_duration = 0.5 + + def known_duration_function() -> None: + time.sleep(sleep_duration) + + with FunctionTimer() as tracker: + known_duration_function() + + time_info = tracker.get_time() + measured_duration = (time_info.end_time - time_info.start_time).total_seconds() + assert abs(measured_duration - sleep_duration) < 0.05 + + +async def test_async_functions_in_parallel() -> None: + """Tests timing of multiple asynchronous functions executed in parallel.""" + + async def async_sleep_function(duration: float) -> None: + await asyncio.sleep(duration) + + async with FunctionTimer() as tracker: + await asyncio.gather( + async_sleep_function(0.5), + async_sleep_function(1), + async_sleep_function(1.5), + ) + + time = tracker.get_time() + assert time.start_time < time.end_time + + +async def test_function_timer_with_async_contexts() -> None: + """Tests that the FunctionTimer context manager correctly times overlapping asynchronous tasks.""" + + async with FunctionTimer() as f1_timer: + await long_running_task() + + # Within the same block, start a short running task + async with FunctionTimer() as f2_timer: + await short_running_task() + + f1_time = f1_timer.get_time() + f2_time = f2_timer.get_time() + + # Assertions to ensure correct timing relationships + assert f1_time.start_time < f2_time.start_time, "f1 should start before f2" + assert f1_time.end_time > f2_time.end_time, "f1 should end after f2 has ended" From 6a745d8865b66690d57eebff08e917563fa7a378 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Mon, 8 Apr 2024 09:30:48 -0700 Subject: [PATCH 02/23] test: add more test cases --- .../src/performance_metrics/function_timer.py | 5 +- .../test_function_timer.py | 73 +++++++++++++++++-- 2 files changed, 68 insertions(+), 10 deletions(-) diff --git a/performance-metrics/src/performance_metrics/function_timer.py b/performance-metrics/src/performance_metrics/function_timer.py index 8416838c6d9..c3c2ff10dcf 100644 --- a/performance-metrics/src/performance_metrics/function_timer.py +++ b/performance-metrics/src/performance_metrics/function_timer.py @@ -22,6 +22,7 @@ class FunctionTimer(ContextDecorator, AsyncContextDecorator): """A context manager that tracks the start and end time of a function. Handles both synchronous and asynchronous functions. + Handles nested usage of the context manager. """ def __init__(self) -> None: @@ -62,6 +63,6 @@ def _get_datetime(self) -> datetime: def get_time(self) -> FunctionTime: """Return a FunctionTime object with the start and end time of the function.""" - assert self._start_time is not None - assert self._end_time is not None + assert self._start_time is not None, "The start time is not set." + assert self._end_time is not None, "The end time is not set." return FunctionTime(start_time=self._start_time, end_time=self._end_time) diff --git a/performance-metrics/tests/performance_metrics/test_function_timer.py b/performance-metrics/tests/performance_metrics/test_function_timer.py index 9d047c69df2..fa38fd9e0c2 100644 --- a/performance-metrics/tests/performance_metrics/test_function_timer.py +++ b/performance-metrics/tests/performance_metrics/test_function_timer.py @@ -4,7 +4,6 @@ to ensure the FunctionTimer accurately measures execution times and handles exceptions correctly for both synchronous and asynchronous calls. This serves as a comprehensive suite to validate the functionality of FunctionTimer in various scenarios. """ - import time import asyncio import pytest @@ -40,7 +39,12 @@ async def long_running_task() -> None: async def short_running_task() -> None: """Simulates a shorter running asynchronous task.""" - await asyncio.sleep(1) + await asyncio.sleep(0.5) + + +################## +# TEST FUNCTIONS # +################## def test_synchronous_function() -> None: @@ -100,6 +104,36 @@ async def test_synchronous_and_asynchronous_function_with_exception() -> None: time = tracker.get_time() assert time.start_time < time.end_time +async def test_nested_synchronous_functions() -> None: + """Tests that the FunctionTimer correctly times nested synchronous functions.""" + with FunctionTimer() as outer_tracker: + synchronous_function() + with FunctionTimer() as inner_tracker: + synchronous_function() + + outer_time = outer_tracker.get_time() + inner_time = inner_tracker.get_time() + + assert outer_time.start_time < outer_time.end_time + assert inner_time.start_time < inner_time.end_time + assert outer_time.start_time < inner_time.start_time + assert outer_time.end_time > inner_time.end_time + +async def test_timing_sychronous_function_nested_inside_async_function() -> None: + """Tests that the FunctionTimer correctly times a synchronous function inside an asynchronous context manager.""" + async with FunctionTimer() as async_tracker: + await asynchronous_function() + with FunctionTimer() as sync_tracker: + synchronous_function() + + async_time = async_tracker.get_time() + sync_time = sync_tracker.get_time() + + assert async_time.start_time < async_time.end_time + assert sync_time.start_time < sync_time.end_time + assert async_time.start_time < sync_time.start_time + assert async_time.end_time > sync_time.end_time + def test_instantaneous_function() -> None: """Tests that the FunctionTimer can measure the time of an almost instantaneous function.""" @@ -149,17 +183,40 @@ async def async_sleep_function(duration: float) -> None: async def test_function_timer_with_async_contexts() -> None: """Tests that the FunctionTimer context manager correctly times overlapping asynchronous tasks.""" + # 1. Start long_running_task + # 2. __aenter__ will be called on long_running_task + # 3. Start short_running_task + # 4. __aenter__ will be called on short_running_task + # 5. Finish short_running_task + # 6. __aexit__ will be called on short_running_task + # 7. Finish long_running_task + # 8. __aexit__ will be called on long_running_task async with FunctionTimer() as f1_timer: await long_running_task() - # Within the same block, start a short running task async with FunctionTimer() as f2_timer: await short_running_task() - f1_time = f1_timer.get_time() - f2_time = f2_timer.get_time() + f1_time = f1_timer.get_time() + f2_time = f2_timer.get_time() + + assert f1_time.start_time < f1_time.end_time + assert f2_time.start_time < f2_time.end_time + assert f1_time.start_time < f2_time.start_time + assert f1_time.end_time > f2_time.end_time + +def test_direct_use_without_context_manager(): + timer = FunctionTimer() + assert timer._start_time is None + assert timer._end_time is None + + with pytest.raises(AssertionError): + timer.get_time() + +def test_calling_get_time_before_context_manager_finishes(): + with pytest.raises(AssertionError): + with FunctionTimer() as timer: + synchronous_function() + timer.get_time() - # Assertions to ensure correct timing relationships - assert f1_time.start_time < f2_time.start_time, "f1 should start before f2" - assert f1_time.end_time > f2_time.end_time, "f1 should end after f2 has ended" From 6fa0d1cc05460643a0e6617c823aaba97b266226 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Mon, 8 Apr 2024 09:31:10 -0700 Subject: [PATCH 03/23] chore: don't allow decorator ussage --- performance-metrics/src/performance_metrics/function_timer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/performance-metrics/src/performance_metrics/function_timer.py b/performance-metrics/src/performance_metrics/function_timer.py index c3c2ff10dcf..92bbad3335b 100644 --- a/performance-metrics/src/performance_metrics/function_timer.py +++ b/performance-metrics/src/performance_metrics/function_timer.py @@ -1,6 +1,6 @@ """A module that contains a context manager to track the start and end time of a function.""" -from contextlib import ContextDecorator, AsyncContextDecorator +from contextlib import AbstractContextManager, AbstractAsyncContextManager from datetime import datetime, timezone from dataclasses import dataclass from typing import Type @@ -18,7 +18,7 @@ class FunctionTime: end_time: datetime -class FunctionTimer(ContextDecorator, AsyncContextDecorator): +class FunctionTimer(AbstractContextManager, AbstractAsyncContextManager): """A context manager that tracks the start and end time of a function. Handles both synchronous and asynchronous functions. From 38c13fab0b9226625b7ec4edae16bb93dc4fde08 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Mon, 8 Apr 2024 09:40:18 -0700 Subject: [PATCH 04/23] test: allow end times to be the same on nested context managers --- .../tests/performance_metrics/test_function_timer.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/performance-metrics/tests/performance_metrics/test_function_timer.py b/performance-metrics/tests/performance_metrics/test_function_timer.py index fa38fd9e0c2..a8bb551d6dd 100644 --- a/performance-metrics/tests/performance_metrics/test_function_timer.py +++ b/performance-metrics/tests/performance_metrics/test_function_timer.py @@ -117,7 +117,7 @@ async def test_nested_synchronous_functions() -> None: assert outer_time.start_time < outer_time.end_time assert inner_time.start_time < inner_time.end_time assert outer_time.start_time < inner_time.start_time - assert outer_time.end_time > inner_time.end_time + assert outer_time.end_time >= inner_time.end_time async def test_timing_sychronous_function_nested_inside_async_function() -> None: """Tests that the FunctionTimer correctly times a synchronous function inside an asynchronous context manager.""" @@ -132,7 +132,7 @@ async def test_timing_sychronous_function_nested_inside_async_function() -> None assert async_time.start_time < async_time.end_time assert sync_time.start_time < sync_time.end_time assert async_time.start_time < sync_time.start_time - assert async_time.end_time > sync_time.end_time + assert async_time.end_time >= sync_time.end_time def test_instantaneous_function() -> None: @@ -204,7 +204,7 @@ async def test_function_timer_with_async_contexts() -> None: assert f1_time.start_time < f1_time.end_time assert f2_time.start_time < f2_time.end_time assert f1_time.start_time < f2_time.start_time - assert f1_time.end_time > f2_time.end_time + assert f1_time.end_time >= f2_time.end_time def test_direct_use_without_context_manager(): timer = FunctionTimer() From bc4adc24fc016b4a39c073bc1537d17b9aa5589e Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Mon, 8 Apr 2024 09:44:20 -0700 Subject: [PATCH 05/23] chore: don't talk so much --- performance-metrics/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/performance-metrics/Makefile b/performance-metrics/Makefile index 4e5e7780782..fd4dd421ad2 100644 --- a/performance-metrics/Makefile +++ b/performance-metrics/Makefile @@ -29,4 +29,4 @@ wheel: .PHONY: test test: - $(pytest) tests -vvv \ No newline at end of file + $(pytest) tests \ No newline at end of file From e7e87df0859b754ee4dc6409d9063c22151be9c3 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Mon, 8 Apr 2024 09:56:58 -0700 Subject: [PATCH 06/23] formatting and linting --- .../src/performance_metrics/function_timer.py | 5 +++- .../test_function_timer.py | 27 ++++++++++++++----- 2 files changed, 25 insertions(+), 7 deletions(-) diff --git a/performance-metrics/src/performance_metrics/function_timer.py b/performance-metrics/src/performance_metrics/function_timer.py index 92bbad3335b..a953272347a 100644 --- a/performance-metrics/src/performance_metrics/function_timer.py +++ b/performance-metrics/src/performance_metrics/function_timer.py @@ -18,7 +18,10 @@ class FunctionTime: end_time: datetime -class FunctionTimer(AbstractContextManager, AbstractAsyncContextManager): +class FunctionTimer( + AbstractContextManager["FunctionTimer"], + AbstractAsyncContextManager["FunctionTimer"], +): """A context manager that tracks the start and end time of a function. Handles both synchronous and asynchronous functions. diff --git a/performance-metrics/tests/performance_metrics/test_function_timer.py b/performance-metrics/tests/performance_metrics/test_function_timer.py index a8bb551d6dd..27146197cbd 100644 --- a/performance-metrics/tests/performance_metrics/test_function_timer.py +++ b/performance-metrics/tests/performance_metrics/test_function_timer.py @@ -104,6 +104,7 @@ async def test_synchronous_and_asynchronous_function_with_exception() -> None: time = tracker.get_time() assert time.start_time < time.end_time + async def test_nested_synchronous_functions() -> None: """Tests that the FunctionTimer correctly times nested synchronous functions.""" with FunctionTimer() as outer_tracker: @@ -119,6 +120,7 @@ async def test_nested_synchronous_functions() -> None: assert outer_time.start_time < inner_time.start_time assert outer_time.end_time >= inner_time.end_time + async def test_timing_sychronous_function_nested_inside_async_function() -> None: """Tests that the FunctionTimer correctly times a synchronous function inside an asynchronous context manager.""" async with FunctionTimer() as async_tracker: @@ -206,17 +208,30 @@ async def test_function_timer_with_async_contexts() -> None: assert f1_time.start_time < f2_time.start_time assert f1_time.end_time >= f2_time.end_time -def test_direct_use_without_context_manager(): + +def test_direct_use_without_context_manager() -> None: + """Tests the behavior of FunctionTimer when used directly without a context manager block. + + Verifies that the start and end times are not set and that an appropriate assertion is raised when attempting to access them. + """ timer = FunctionTimer() - assert timer._start_time is None - assert timer._end_time is None - + assert ( + timer._start_time is None + ), "Start time should be None when not used within a context manager" + assert ( + timer._end_time is None + ), "End time should be None when not used within a context manager" + with pytest.raises(AssertionError): timer.get_time() -def test_calling_get_time_before_context_manager_finishes(): + +def test_calling_get_time_before_context_manager_finishes() -> None: + """Tests that attempting to call get_time before the context manager has properly finished (exited) results in an assertion error. + + This simulates the scenario where get_time is called prematurely, ensuring the timer enforces correct usage patterns. + """ with pytest.raises(AssertionError): with FunctionTimer() as timer: synchronous_function() timer.get_time() - From be8587bf31cddd4d2db68937c90e6e965a7e3fa6 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Mon, 8 Apr 2024 14:15:18 -0700 Subject: [PATCH 07/23] Starting to address PR feedback --- .../src/performance_metrics/function_timer.py | 149 +++++----- .../test_function_timer.py | 272 ++++-------------- 2 files changed, 140 insertions(+), 281 deletions(-) diff --git a/performance-metrics/src/performance_metrics/function_timer.py b/performance-metrics/src/performance_metrics/function_timer.py index a953272347a..3e960c587bf 100644 --- a/performance-metrics/src/performance_metrics/function_timer.py +++ b/performance-metrics/src/performance_metrics/function_timer.py @@ -1,71 +1,78 @@ -"""A module that contains a context manager to track the start and end time of a function.""" - -from contextlib import AbstractContextManager, AbstractAsyncContextManager -from datetime import datetime, timezone -from dataclasses import dataclass -from typing import Type -from types import TracebackType - - -@dataclass -class FunctionTime: - """A dataclass to store the start and end time of a function. - - The dates are in UTC timezone. - """ - - start_time: datetime - end_time: datetime - - -class FunctionTimer( - AbstractContextManager["FunctionTimer"], - AbstractAsyncContextManager["FunctionTimer"], -): - """A context manager that tracks the start and end time of a function. - - Handles both synchronous and asynchronous functions. - Handles nested usage of the context manager. - """ - - def __init__(self) -> None: - self._start_time: datetime | None = None - self._end_time: datetime | None = None - - def __enter__(self) -> "FunctionTimer": - """Set the start time of the function.""" - self._start_time = self._get_datetime() - return self - - def __exit__( - self, - exc_type: Type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - """Set the end time of the function.""" - self._end_time = self._get_datetime() - - async def __aenter__(self) -> "FunctionTimer": - """Set the start time of the function.""" - self._start_time = self._get_datetime() - return self - - async def __aexit__( - self, - exc_type: Type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - """Set the end time of the function.""" - self._end_time = self._get_datetime() - - def _get_datetime(self) -> datetime: - """Get the current datetime in UTC timezone.""" - return datetime.now(timezone.utc) - - def get_time(self) -> FunctionTime: - """Return a FunctionTime object with the start and end time of the function.""" - assert self._start_time is not None, "The start time is not set." - assert self._end_time is not None, "The end time is not set." - return FunctionTime(start_time=self._start_time, end_time=self._end_time) +"""This module offers a mechanism for measuring and storing the execution durations of both synchronous and asynchronous functions. + +The FunctionTimer class is intended to be used as a decorator to wrap functions and measure their execution times. +""" + +from time import perf_counter_ns +from typing import Protocol, Callable, TypeVar +from typing_extensions import ParamSpec +import inspect + +P = ParamSpec("P") +R = TypeVar("R") + + +class StoreDuration(Protocol): + """Protocol for a callback function that stores the duration between two timestamps.""" + + def __call__(self, start_time: int, end_time: int) -> None: + """Stores the duration of an operation. + + Args: + start_time (int): The start timestamp in nanoseconds. + end_time (int): The end timestamp in nanoseconds. + """ + pass + + +class FunctionTimer: + """A class designed to measure and store the execution duration of functions, both synchronous and asynchronous.""" + + def __init__(self, store_duration: StoreDuration) -> None: + """Initializes the FunctionTimer with a specified storage mechanism for the execution duration. + + Args: + store_duration: A callback function that stores the execution duration. + """ + self._store_duration = store_duration + + def measure_duration(self, func: Callable[P, R]) -> Callable[P, R]: + """Creates a wrapper around a given function to measure its execution duration. + + The wrapper calculates the duration of function execution and stores it using the provided + storage mechanism. Supports both synchronous and asynchronous functions. + + Args: + func: The function whose execution duration is to be measured. + + Returns: + A wrapped version of the input function with duration measurement capability. + """ + if inspect.iscoroutinefunction(func): + + async def async_wrapper(*args: P.args, **kwargs: P.kwargs) -> R: + """An asynchronous wrapper function for measuring execution duration.""" + start_time = perf_counter_ns() + try: + result: R = await func(*args, **kwargs) + except Exception as e: + raise e + finally: + self._store_duration(start_time, perf_counter_ns()) + return result + + return async_wrapper # type: ignore + else: + + def sync_wrapper(*args: P.args, **kwargs: P.kwargs) -> R: + """A synchronous wrapper function for measuring execution duration.""" + start_time = perf_counter_ns() + try: + result: R = func(*args, **kwargs) + except Exception as e: + raise e + finally: + self._store_duration(start_time, perf_counter_ns()) + return result + + return sync_wrapper diff --git a/performance-metrics/tests/performance_metrics/test_function_timer.py b/performance-metrics/tests/performance_metrics/test_function_timer.py index 27146197cbd..2df2dfe33e2 100644 --- a/performance-metrics/tests/performance_metrics/test_function_timer.py +++ b/performance-metrics/tests/performance_metrics/test_function_timer.py @@ -1,237 +1,89 @@ -"""This module contains tests for timing the execution of synchronous and asynchronous functions using the FunctionTimer class. +"""This module contains tests for the FunctionTimer class, focusing on its ability to accurately measure and record the execution times of synchronous and asynchronous functions, including when exceptions are raised.""" -It includes functions and their variants that raise exceptions to simulate errors during execution. Each test function is designed -to ensure the FunctionTimer accurately measures execution times and handles exceptions correctly for both synchronous and asynchronous -calls. This serves as a comprehensive suite to validate the functionality of FunctionTimer in various scenarios. -""" -import time import asyncio +from typing import List import pytest -from performance_metrics.function_timer import FunctionTimer +from time import sleep +from performance_metrics.function_timer import FunctionTimer, StoreDuration -def synchronous_function() -> None: - """Prints a message indicating a synchronous function is running.""" - print("synchronous_function") +@pytest.fixture +def durations() -> List[int]: + """Fixture that provides an empty list to store durations. This list is reset before each test.""" + return [] -def synchronous_function_with_exception() -> None: - """Prints a message then raises an exception to simulate error in synchronous execution.""" - print("synchronous_function_with_exception") - raise Exception("An exception") +@pytest.fixture +def store_duration_mock(durations: List[int]) -> StoreDuration: + """Provides a mock function for storing duration measurements. It appends durations to the provided list.""" + def _mock(start_time: int, end_time: int) -> None: + durations.append(end_time - start_time) -async def asynchronous_function() -> None: - """Prints a message indicating an asynchronous function is running.""" - print("asynchronous_function") + return _mock -async def asynchronous_function_with_exception() -> None: - """Prints a message then raises an exception to simulate error in asynchronous execution.""" - print("asynchronous_function_with_exception") - raise Exception("An exception") +@pytest.fixture +def function_timer(store_duration_mock: StoreDuration) -> FunctionTimer: + """Creates a FunctionTimer instance with a mock storage function for testing.""" + return FunctionTimer(store_duration=store_duration_mock) -async def long_running_task() -> None: - """Simulates a longer running asynchronous task.""" - await asyncio.sleep(2) +def test_sync_function(function_timer: FunctionTimer, durations: List[int]) -> None: + """Tests accurate measurement of a synchronous function's execution time.""" + @function_timer.measure_duration + def sync_test() -> None: + sleep(0.01) -async def short_running_task() -> None: - """Simulates a shorter running asynchronous task.""" - await asyncio.sleep(0.5) + sync_test() + assert len(durations) == 1 + assert durations[0] > 0 -################## -# TEST FUNCTIONS # -################## +@pytest.mark.asyncio +async def test_async_function( + function_timer: FunctionTimer, durations: List[int] +) -> None: + """Tests accurate measurement of an asynchronous function's execution time.""" + @function_timer.measure_duration + async def async_test() -> None: + await asyncio.sleep(0.01) -def test_synchronous_function() -> None: - """Tests that the synchronous function is timed correctly by FunctionTimer.""" - with FunctionTimer() as tracker: - synchronous_function() + await async_test() + assert len(durations) == 1 + assert durations[0] > 0 - time = tracker.get_time() - assert time.start_time < time.end_time +def test_sync_function_exception( + function_timer: FunctionTimer, durations: List[int] +) -> None: + """Tests duration measurement of a synchronous function that raises an exception.""" -def test_synchronous_function_with_exception() -> None: - """Tests that FunctionTimer can handle exceptions in synchronous functions correctly.""" - with pytest.raises(Exception): - with FunctionTimer() as tracker: - synchronous_function_with_exception() - time = tracker.get_time() - assert time.start_time < time.end_time + @function_timer.measure_duration + def sync_test_exception() -> None: + sleep(0.01) + raise ValueError("Intentional Error") + with pytest.raises(ValueError): + sync_test_exception() + assert len(durations) == 1 + assert durations[0] > 0 -async def test_asynchronous_function() -> None: - """Tests that the asynchronous function is timed correctly by FunctionTimer.""" - async with FunctionTimer() as tracker: - await asynchronous_function() - time = tracker.get_time() - assert time.start_time < time.end_time +@pytest.mark.asyncio +async def test_async_function_exception( + function_timer: FunctionTimer, durations: List[int] +) -> None: + """Tests duration measurement of an asynchronous function that raises an exception.""" + @function_timer.measure_duration + async def async_test_exception() -> None: + await asyncio.sleep(0.01) + raise ValueError("Intentional Error") -async def test_asynchronous_function_with_exception() -> None: - """Tests that FunctionTimer can handle exceptions in asynchronous functions correctly.""" - with pytest.raises(Exception): - async with FunctionTimer() as tracker: - await asynchronous_function_with_exception() - - time = tracker.get_time() - assert time.start_time < time.end_time - - -async def test_asynchronous_and_synchronous_function() -> None: - """Tests the timing of a mixed sequence of synchronous and asynchronous functions with FunctionTimer.""" - async with FunctionTimer() as tracker: - synchronous_function() - await asynchronous_function() - - time = tracker.get_time() - assert time.start_time < time.end_time - - -async def test_synchronous_and_asynchronous_function_with_exception() -> None: - """Tests that FunctionTimer can handle a mixed sequence of functions, including an exception, correctly.""" - with pytest.raises(Exception): - async with FunctionTimer() as tracker: - synchronous_function_with_exception() - await asynchronous_function() - - time = tracker.get_time() - assert time.start_time < time.end_time - - -async def test_nested_synchronous_functions() -> None: - """Tests that the FunctionTimer correctly times nested synchronous functions.""" - with FunctionTimer() as outer_tracker: - synchronous_function() - with FunctionTimer() as inner_tracker: - synchronous_function() - - outer_time = outer_tracker.get_time() - inner_time = inner_tracker.get_time() - - assert outer_time.start_time < outer_time.end_time - assert inner_time.start_time < inner_time.end_time - assert outer_time.start_time < inner_time.start_time - assert outer_time.end_time >= inner_time.end_time - - -async def test_timing_sychronous_function_nested_inside_async_function() -> None: - """Tests that the FunctionTimer correctly times a synchronous function inside an asynchronous context manager.""" - async with FunctionTimer() as async_tracker: - await asynchronous_function() - with FunctionTimer() as sync_tracker: - synchronous_function() - - async_time = async_tracker.get_time() - sync_time = sync_tracker.get_time() - - assert async_time.start_time < async_time.end_time - assert sync_time.start_time < sync_time.end_time - assert async_time.start_time < sync_time.start_time - assert async_time.end_time >= sync_time.end_time - - -def test_instantaneous_function() -> None: - """Tests that the FunctionTimer can measure the time of an almost instantaneous function.""" - - def instantaneous_function() -> None: - """A function that executes almost instantaneously.""" - pass - - with FunctionTimer() as tracker: - instantaneous_function() - - time = tracker.get_time() - assert time.start_time <= time.end_time - - -def test_known_duration_function() -> None: - """Tests the FunctionTimer's accuracy by comparing with a known sleep duration.""" - sleep_duration = 0.5 - - def known_duration_function() -> None: - time.sleep(sleep_duration) - - with FunctionTimer() as tracker: - known_duration_function() - - time_info = tracker.get_time() - measured_duration = (time_info.end_time - time_info.start_time).total_seconds() - assert abs(measured_duration - sleep_duration) < 0.05 - - -async def test_async_functions_in_parallel() -> None: - """Tests timing of multiple asynchronous functions executed in parallel.""" - - async def async_sleep_function(duration: float) -> None: - await asyncio.sleep(duration) - - async with FunctionTimer() as tracker: - await asyncio.gather( - async_sleep_function(0.5), - async_sleep_function(1), - async_sleep_function(1.5), - ) - - time = tracker.get_time() - assert time.start_time < time.end_time - - -async def test_function_timer_with_async_contexts() -> None: - """Tests that the FunctionTimer context manager correctly times overlapping asynchronous tasks.""" - # 1. Start long_running_task - # 2. __aenter__ will be called on long_running_task - # 3. Start short_running_task - # 4. __aenter__ will be called on short_running_task - # 5. Finish short_running_task - # 6. __aexit__ will be called on short_running_task - # 7. Finish long_running_task - # 8. __aexit__ will be called on long_running_task - - async with FunctionTimer() as f1_timer: - await long_running_task() - - async with FunctionTimer() as f2_timer: - await short_running_task() - - f1_time = f1_timer.get_time() - f2_time = f2_timer.get_time() - - assert f1_time.start_time < f1_time.end_time - assert f2_time.start_time < f2_time.end_time - assert f1_time.start_time < f2_time.start_time - assert f1_time.end_time >= f2_time.end_time - - -def test_direct_use_without_context_manager() -> None: - """Tests the behavior of FunctionTimer when used directly without a context manager block. - - Verifies that the start and end times are not set and that an appropriate assertion is raised when attempting to access them. - """ - timer = FunctionTimer() - assert ( - timer._start_time is None - ), "Start time should be None when not used within a context manager" - assert ( - timer._end_time is None - ), "End time should be None when not used within a context manager" - - with pytest.raises(AssertionError): - timer.get_time() - - -def test_calling_get_time_before_context_manager_finishes() -> None: - """Tests that attempting to call get_time before the context manager has properly finished (exited) results in an assertion error. - - This simulates the scenario where get_time is called prematurely, ensuring the timer enforces correct usage patterns. - """ - with pytest.raises(AssertionError): - with FunctionTimer() as timer: - synchronous_function() - timer.get_time() + with pytest.raises(ValueError): + await async_test_exception() + assert len(durations) == 1 + assert durations[0] > 0 From 4b9b27b01878b01a657ed94ffce2db8655aaa5f5 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Tue, 9 Apr 2024 07:31:25 -0700 Subject: [PATCH 08/23] Forgot to add begin time --- .../src/performance_metrics/function_timer.py | 91 ++++++++++++++++--- .../test_function_timer.py | 84 +++++++++++------ 2 files changed, 133 insertions(+), 42 deletions(-) diff --git a/performance-metrics/src/performance_metrics/function_timer.py b/performance-metrics/src/performance_metrics/function_timer.py index 3e960c587bf..b9496296f79 100644 --- a/performance-metrics/src/performance_metrics/function_timer.py +++ b/performance-metrics/src/performance_metrics/function_timer.py @@ -3,8 +3,8 @@ The FunctionTimer class is intended to be used as a decorator to wrap functions and measure their execution times. """ -from time import perf_counter_ns -from typing import Protocol, Callable, TypeVar +from time import perf_counter_ns, clock_gettime_ns, CLOCK_REALTIME +from typing import Iterator, Protocol, Callable, TypeVar, List, Tuple from typing_extensions import ParamSpec import inspect @@ -12,29 +12,80 @@ R = TypeVar("R") -class StoreDuration(Protocol): - """Protocol for a callback function that stores the duration between two timestamps.""" +class CanStoreTimingResult(Protocol): + """A protocol for a function that can store the result of a timing operation.""" - def __call__(self, start_time: int, end_time: int) -> None: + def store( + self, + function_start_time: int, + duration_measurement_start_time: int, + duration_measurement_end_time: int, + ) -> None: """Stores the duration of an operation. Args: - start_time (int): The start timestamp in nanoseconds. - end_time (int): The end timestamp in nanoseconds. + function_start_time: The time at which the function started executing. + duration_measurement_start_time: The time at which the duration measurement started. + duration_measurement_end_time: The time at which the duration measurement ended. """ pass +class TimingResultStore(CanStoreTimingResult): + """A class that stores the result of a timing operation.""" + + def __init__(self) -> None: + """Initializes the TimingResultStore with a storage method. + + Args: + storage_method: A method that stores the result of a timing operation. + """ + self._storage: List[Tuple[int, int, int]] = [] + + def __len__(self) -> int: + """Returns the number of stored timing results.""" + return len(self._storage) + + def __getitem__(self, index: int) -> Tuple[int, int, int]: + """Returns the timing result at the specified index.""" + return self._storage[index] + + def __iter__(self) -> Iterator[Tuple[int, int, int]]: + """Returns an iterator over the stored timing results.""" + return iter(self._storage) + + def store( + self, + function_start_time: int, + duration_measurement_start_time: int, + duration_measurement_end_time: int, + ) -> None: + """Stores the duration of an operation. + + Args: + function_start_time: The time at which the function started executing. + duration_measurement_start_time: The time at which the duration measurement started. + duration_measurement_end_time: The time at which the duration measurement ended. + """ + self._storage.append( + ( + function_start_time, + duration_measurement_start_time, + duration_measurement_end_time, + ) + ) + + class FunctionTimer: """A class designed to measure and store the execution duration of functions, both synchronous and asynchronous.""" - def __init__(self, store_duration: StoreDuration) -> None: + def __init__(self, can_store: CanStoreTimingResult) -> None: """Initializes the FunctionTimer with a specified storage mechanism for the execution duration. Args: - store_duration: A callback function that stores the execution duration. + can_store: A callback function that stores the execution duration. """ - self._store_duration = store_duration + self._can_store = can_store def measure_duration(self, func: Callable[P, R]) -> Callable[P, R]: """Creates a wrapper around a given function to measure its execution duration. @@ -52,13 +103,19 @@ def measure_duration(self, func: Callable[P, R]) -> Callable[P, R]: async def async_wrapper(*args: P.args, **kwargs: P.kwargs) -> R: """An asynchronous wrapper function for measuring execution duration.""" - start_time = perf_counter_ns() + function_start_time = clock_gettime_ns(CLOCK_REALTIME) + duration_measurement_start_time = perf_counter_ns() try: result: R = await func(*args, **kwargs) except Exception as e: raise e finally: - self._store_duration(start_time, perf_counter_ns()) + duration_measurement_end_time = perf_counter_ns() + self._can_store.store( + function_start_time, + duration_measurement_start_time, + duration_measurement_end_time, + ) return result return async_wrapper # type: ignore @@ -66,13 +123,19 @@ async def async_wrapper(*args: P.args, **kwargs: P.kwargs) -> R: def sync_wrapper(*args: P.args, **kwargs: P.kwargs) -> R: """A synchronous wrapper function for measuring execution duration.""" - start_time = perf_counter_ns() + function_start_time = clock_gettime_ns(CLOCK_REALTIME) + duration_measurement_start_time = perf_counter_ns() try: result: R = func(*args, **kwargs) except Exception as e: raise e finally: - self._store_duration(start_time, perf_counter_ns()) + duration_measurement_end_time = perf_counter_ns() + self._can_store.store( + function_start_time, + duration_measurement_start_time, + duration_measurement_end_time, + ) return result return sync_wrapper diff --git a/performance-metrics/tests/performance_metrics/test_function_timer.py b/performance-metrics/tests/performance_metrics/test_function_timer.py index 2df2dfe33e2..eb1c22fabb1 100644 --- a/performance-metrics/tests/performance_metrics/test_function_timer.py +++ b/performance-metrics/tests/performance_metrics/test_function_timer.py @@ -1,35 +1,30 @@ """This module contains tests for the FunctionTimer class, focusing on its ability to accurately measure and record the execution times of synchronous and asynchronous functions, including when exceptions are raised.""" import asyncio -from typing import List import pytest from time import sleep -from performance_metrics.function_timer import FunctionTimer, StoreDuration +from performance_metrics.function_timer import ( + FunctionTimer, + CanStoreTimingResult, + TimingResultStore, +) @pytest.fixture -def durations() -> List[int]: +def timing_result_store() -> TimingResultStore: """Fixture that provides an empty list to store durations. This list is reset before each test.""" - return [] + return TimingResultStore() @pytest.fixture -def store_duration_mock(durations: List[int]) -> StoreDuration: - """Provides a mock function for storing duration measurements. It appends durations to the provided list.""" - - def _mock(start_time: int, end_time: int) -> None: - durations.append(end_time - start_time) - - return _mock - - -@pytest.fixture -def function_timer(store_duration_mock: StoreDuration) -> FunctionTimer: +def function_timer(timing_result_store: CanStoreTimingResult) -> FunctionTimer: """Creates a FunctionTimer instance with a mock storage function for testing.""" - return FunctionTimer(store_duration=store_duration_mock) + return FunctionTimer(can_store=timing_result_store) -def test_sync_function(function_timer: FunctionTimer, durations: List[int]) -> None: +def test_sync_function( + function_timer: FunctionTimer, timing_result_store: TimingResultStore +) -> None: """Tests accurate measurement of a synchronous function's execution time.""" @function_timer.measure_duration @@ -37,13 +32,13 @@ def sync_test() -> None: sleep(0.01) sync_test() - assert len(durations) == 1 - assert durations[0] > 0 + assert len(timing_result_store) == 1 + assert timing_result_store[0][1] < timing_result_store[0][2] @pytest.mark.asyncio async def test_async_function( - function_timer: FunctionTimer, durations: List[int] + function_timer: FunctionTimer, timing_result_store: TimingResultStore ) -> None: """Tests accurate measurement of an asynchronous function's execution time.""" @@ -52,12 +47,12 @@ async def async_test() -> None: await asyncio.sleep(0.01) await async_test() - assert len(durations) == 1 - assert durations[0] > 0 + assert len(timing_result_store) == 1 + assert timing_result_store[0][1] < timing_result_store[0][2] def test_sync_function_exception( - function_timer: FunctionTimer, durations: List[int] + function_timer: FunctionTimer, timing_result_store: TimingResultStore ) -> None: """Tests duration measurement of a synchronous function that raises an exception.""" @@ -68,13 +63,13 @@ def sync_test_exception() -> None: with pytest.raises(ValueError): sync_test_exception() - assert len(durations) == 1 - assert durations[0] > 0 + assert len(timing_result_store) == 1 + assert timing_result_store[0][1] < timing_result_store[0][2] @pytest.mark.asyncio async def test_async_function_exception( - function_timer: FunctionTimer, durations: List[int] + function_timer: FunctionTimer, timing_result_store: TimingResultStore ) -> None: """Tests duration measurement of an asynchronous function that raises an exception.""" @@ -85,5 +80,38 @@ async def async_test_exception() -> None: with pytest.raises(ValueError): await async_test_exception() - assert len(durations) == 1 - assert durations[0] > 0 + assert len(timing_result_store) == 1 + assert timing_result_store[0][1] < timing_result_store[0][2] + + +def test_sync_function_multiple_calls( + function_timer: FunctionTimer, timing_result_store: TimingResultStore +) -> None: + """Tests duration measurement of multiple calls to a synchronous function.""" + + @function_timer.measure_duration + def sync_test_multiple_calls() -> None: + sleep(0.01) + + sync_test_multiple_calls() + sync_test_multiple_calls() + assert len(timing_result_store) == 2 + assert timing_result_store[0][0] < timing_result_store[1][0] + assert all(start < end for _, start, end in timing_result_store) + + +@pytest.mark.asyncio +async def test_async_function_multiple_calls( + function_timer: FunctionTimer, timing_result_store: TimingResultStore +) -> None: + """Tests duration measurement of multiple calls to an asynchronous function.""" + + @function_timer.measure_duration + async def async_test_multiple_calls() -> None: + await asyncio.sleep(0.01) + + await async_test_multiple_calls() + await async_test_multiple_calls() + assert len(timing_result_store) == 2 + assert timing_result_store[0][0] < timing_result_store[1][0] + assert all(start < end for _, start, end in timing_result_store) From 640a3b4920cca20805db5612062e5a9846829c56 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Wed, 10 Apr 2024 08:11:23 -0700 Subject: [PATCH 09/23] cleanup --- .../src/performance_metrics/function_timer.py | 156 ++++++++++++------ 1 file changed, 104 insertions(+), 52 deletions(-) diff --git a/performance-metrics/src/performance_metrics/function_timer.py b/performance-metrics/src/performance_metrics/function_timer.py index b9496296f79..fb83d913a9f 100644 --- a/performance-metrics/src/performance_metrics/function_timer.py +++ b/performance-metrics/src/performance_metrics/function_timer.py @@ -1,10 +1,11 @@ """This module offers a mechanism for measuring and storing the execution durations of both synchronous and asynchronous functions. -The FunctionTimer class is intended to be used as a decorator to wrap functions and measure their execution times. +The FunctionTimer class is intended to be used as a decorator to wrap functions and measure their execution times. It utilizes `perf_counter_ns` for high-resolution performance counter measurements and `clock_gettime_ns(CLOCK_REALTIME)` for real-time clock measurements. The use of `perf_counter_ns` ensures the highest possible resolution timer, which is essential for accurate duration measurement, especially for short-running functions. `clock_gettime_ns(CLOCK_REALTIME)` is used to capture the actual start time in real-world time, which is useful for correlating events or logs with other time-based data. + """ from time import perf_counter_ns, clock_gettime_ns, CLOCK_REALTIME -from typing import Iterator, Protocol, Callable, TypeVar, List, Tuple +from typing import Awaitable, Iterator, Protocol, Callable, TypeVar, List, Tuple from typing_extensions import ParamSpec import inspect @@ -13,7 +14,10 @@ class CanStoreTimingResult(Protocol): - """A protocol for a function that can store the result of a timing operation.""" + """Protocol for a class that can store the result of a timing operation. + + Implementing classes must provide a `store` method. + """ def store( self, @@ -32,14 +36,13 @@ def store( class TimingResultStore(CanStoreTimingResult): - """A class that stores the result of a timing operation.""" + """A class that stores the result of a timing operation. - def __init__(self) -> None: - """Initializes the TimingResultStore with a storage method. + Specifically captures the start, measurement start, and end times of function executions. + """ - Args: - storage_method: A method that stores the result of a timing operation. - """ + def __init__(self) -> None: + """Initializes the TimingResultStore.""" self._storage: List[Tuple[int, int, int]] = [] def __len__(self) -> int: @@ -60,7 +63,7 @@ def store( duration_measurement_start_time: int, duration_measurement_end_time: int, ) -> None: - """Stores the duration of an operation. + """Stores timing information of an operation in nanoseconds. Args: function_start_time: The time at which the function started executing. @@ -77,22 +80,107 @@ def store( class FunctionTimer: - """A class designed to measure and store the execution duration of functions, both synchronous and asynchronous.""" + """A decorator class for measuring and storing the execution duration of functions. + + It supports both synchronous and asynchronous functions. + """ def __init__(self, can_store: CanStoreTimingResult) -> None: - """Initializes the FunctionTimer with a specified storage mechanism for the execution duration. + """Initializes the FunctionTimer with a specified storage function. Args: - can_store: A callback function that stores the execution duration. + can_store: A function that stores the execution duration. """ self._can_store = can_store + def _begin_timing(self) -> Tuple[int, int]: + """Starts the timing process, capturing both the current real-time and a high-resolution performance counter. + + Returns: + A tuple containing the current real-time (`clock_gettime_ns(CLOCK_REALTIME)`) and an initial performance counter (`perf_counter_ns()`). Both values are measured in nanoseconds. The combination of these counters allows us to accurately measure execution durations while also correlating these measurements to real-world time. + """ + return clock_gettime_ns(CLOCK_REALTIME), perf_counter_ns() + + def _end_timing(self) -> int: + """Ends the timing process, capturing the final high-resolution performance counter. + + Returns: + The final performance counter, measured in nanoseconds. This value is captured using `perf_counter_ns()` to ensure consistency with the initial performance counter, providing an accurate duration measurement. + """ + return perf_counter_ns() + + def _async_wrapper( + self, func: Callable[P, Awaitable[R]] + ) -> Callable[P, Awaitable[R]]: + """Wraps an asynchronous function for duration measurement. + + Args: + func: The asynchronous function to be wrapped. + + Returns: + A wrapped version of the input function with duration measurement capability. + """ + + async def async_wrapper(*args: P.args, **kwargs: P.kwargs) -> R: + """An asynchronous wrapper function for measuring execution duration. + + If an exception is raised during the execution of the function, it is re-raised after + the duration measurement is stored. + """ + function_start_time, duration_measurement_start_time = self._begin_timing() + try: + result: R = await func(*args, **kwargs) + except Exception as e: + raise e + finally: + self._can_store.store( + function_start_time, + duration_measurement_start_time, + self._end_timing(), + ) + return result + + return async_wrapper + + def _sync_wrapper(self, func: Callable[P, R]) -> Callable[P, R]: + """Wraps a synchronous function for duration measurement. + + Args: + func: The synchronous function to be wrapped. + + Returns: + A wrapped version of the input function with duration measurement capability. + """ + + def sync_wrapper(*args: P.args, **kwargs: P.kwargs) -> R: + """A synchronous wrapper function for measuring execution duration. + + If an exception is raised during the execution of the function, it is re-raised after + the duration measurement is stored. + """ + function_start_time, duration_measurement_start_time = self._begin_timing() + try: + result: R = func(*args, **kwargs) + except Exception as e: + raise e + finally: + self._can_store.store( + function_start_time, + duration_measurement_start_time, + self._end_timing(), + ) + return result + + return sync_wrapper + def measure_duration(self, func: Callable[P, R]) -> Callable[P, R]: - """Creates a wrapper around a given function to measure its execution duration. + """Creates a wrapper around a given function to measure the execution duration. The wrapper calculates the duration of function execution and stores it using the provided storage mechanism. Supports both synchronous and asynchronous functions. + This method is intended to be used as a decorator. + Args: func: The function whose execution duration is to be measured. @@ -100,42 +188,6 @@ def measure_duration(self, func: Callable[P, R]) -> Callable[P, R]: A wrapped version of the input function with duration measurement capability. """ if inspect.iscoroutinefunction(func): - - async def async_wrapper(*args: P.args, **kwargs: P.kwargs) -> R: - """An asynchronous wrapper function for measuring execution duration.""" - function_start_time = clock_gettime_ns(CLOCK_REALTIME) - duration_measurement_start_time = perf_counter_ns() - try: - result: R = await func(*args, **kwargs) - except Exception as e: - raise e - finally: - duration_measurement_end_time = perf_counter_ns() - self._can_store.store( - function_start_time, - duration_measurement_start_time, - duration_measurement_end_time, - ) - return result - - return async_wrapper # type: ignore + return self._async_wrapper(func) # type: ignore else: - - def sync_wrapper(*args: P.args, **kwargs: P.kwargs) -> R: - """A synchronous wrapper function for measuring execution duration.""" - function_start_time = clock_gettime_ns(CLOCK_REALTIME) - duration_measurement_start_time = perf_counter_ns() - try: - result: R = func(*args, **kwargs) - except Exception as e: - raise e - finally: - duration_measurement_end_time = perf_counter_ns() - self._can_store.store( - function_start_time, - duration_measurement_start_time, - duration_measurement_end_time, - ) - return result - - return sync_wrapper + return self._sync_wrapper(func) From 3fd539093807481341a03fff1b6ac47c647c0fa0 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Thu, 11 Apr 2024 10:54:02 -0700 Subject: [PATCH 10/23] chore: go back to context manager --- performance-metrics/Pipfile.lock | 2 +- .../src/performance_metrics/datashapes.py | 18 ++ .../src/performance_metrics/function_timer.py | 203 +++--------- .../test_function_timer.py | 299 +++++++++++++----- 4 files changed, 280 insertions(+), 242 deletions(-) create mode 100644 performance-metrics/src/performance_metrics/datashapes.py diff --git a/performance-metrics/Pipfile.lock b/performance-metrics/Pipfile.lock index bc430c83d8e..5c836231b7e 100644 --- a/performance-metrics/Pipfile.lock +++ b/performance-metrics/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "93a35b59bb63ce1a4141b485ea63a81ce03448af4b50ca68b952ed0ee448220c" + "sha256": "d811fa2b7dca8a5be8b2dba79ab7200243b2e10fb65f9ee221623f2710b24372" }, "pipfile-spec": 6, "requires": { diff --git a/performance-metrics/src/performance_metrics/datashapes.py b/performance-metrics/src/performance_metrics/datashapes.py new file mode 100644 index 00000000000..6c855680886 --- /dev/null +++ b/performance-metrics/src/performance_metrics/datashapes.py @@ -0,0 +1,18 @@ +"""Defines data classes and enums used in the performance metrics module.""" + +import dataclasses + + +@dataclasses.dataclass(frozen=True) +class RawDurationData: + """Represents raw duration data for a process or function. + + Attributes: + - function_start_time (int): The start time of the function. + - duration_measurement_start_time (int): The start time for duration measurement. + - duration_measurement_end_time (int): The end time for duration measurement. + """ + + func_start: int + duration_start: int + duration_end: int diff --git a/performance-metrics/src/performance_metrics/function_timer.py b/performance-metrics/src/performance_metrics/function_timer.py index fb83d913a9f..2cda334b797 100644 --- a/performance-metrics/src/performance_metrics/function_timer.py +++ b/performance-metrics/src/performance_metrics/function_timer.py @@ -5,93 +5,25 @@ """ from time import perf_counter_ns, clock_gettime_ns, CLOCK_REALTIME -from typing import Awaitable, Iterator, Protocol, Callable, TypeVar, List, Tuple -from typing_extensions import ParamSpec -import inspect +from types import TracebackType +from typing import ( + Type, + Tuple, +) +from performance_metrics.datashapes import RawDurationData +from contextlib import AbstractAsyncContextManager, AbstractContextManager -P = ParamSpec("P") -R = TypeVar("R") - -class CanStoreTimingResult(Protocol): - """Protocol for a class that can store the result of a timing operation. - - Implementing classes must provide a `store` method. - """ - - def store( - self, - function_start_time: int, - duration_measurement_start_time: int, - duration_measurement_end_time: int, - ) -> None: - """Stores the duration of an operation. - - Args: - function_start_time: The time at which the function started executing. - duration_measurement_start_time: The time at which the duration measurement started. - duration_measurement_end_time: The time at which the duration measurement ended. - """ - pass - - -class TimingResultStore(CanStoreTimingResult): - """A class that stores the result of a timing operation. - - Specifically captures the start, measurement start, and end times of function executions. - """ - - def __init__(self) -> None: - """Initializes the TimingResultStore.""" - self._storage: List[Tuple[int, int, int]] = [] - - def __len__(self) -> int: - """Returns the number of stored timing results.""" - return len(self._storage) - - def __getitem__(self, index: int) -> Tuple[int, int, int]: - """Returns the timing result at the specified index.""" - return self._storage[index] - - def __iter__(self) -> Iterator[Tuple[int, int, int]]: - """Returns an iterator over the stored timing results.""" - return iter(self._storage) - - def store( - self, - function_start_time: int, - duration_measurement_start_time: int, - duration_measurement_end_time: int, - ) -> None: - """Stores timing information of an operation in nanoseconds. - - Args: - function_start_time: The time at which the function started executing. - duration_measurement_start_time: The time at which the duration measurement started. - duration_measurement_end_time: The time at which the duration measurement ended. - """ - self._storage.append( - ( - function_start_time, - duration_measurement_start_time, - duration_measurement_end_time, - ) - ) - - -class FunctionTimer: +class FunctionTimer(AbstractAsyncContextManager, AbstractContextManager): # type: ignore """A decorator class for measuring and storing the execution duration of functions. It supports both synchronous and asynchronous functions. """ - def __init__(self, can_store: CanStoreTimingResult) -> None: - """Initializes the FunctionTimer with a specified storage function. - - Args: - can_store: A function that stores the execution duration. - """ - self._can_store = can_store + def __init__(self) -> None: + self._func_start_time: int | None = None + self._duration_start_time: int | None = None + self._duration_end_time: int | None = None def _begin_timing(self) -> Tuple[int, int]: """Starts the timing process, capturing both the current real-time and a high-resolution performance counter. @@ -109,85 +41,46 @@ def _end_timing(self) -> int: """ return perf_counter_ns() - def _async_wrapper( - self, func: Callable[P, Awaitable[R]] - ) -> Callable[P, Awaitable[R]]: - """Wraps an asynchronous function for duration measurement. - - Args: - func: The asynchronous function to be wrapped. - - Returns: - A wrapped version of the input function with duration measurement capability. - """ - - async def async_wrapper(*args: P.args, **kwargs: P.kwargs) -> R: - """An asynchronous wrapper function for measuring execution duration. - - If an exception is raised during the execution of the function, it is re-raised after - the duration measurement is stored. - """ - function_start_time, duration_measurement_start_time = self._begin_timing() - try: - result: R = await func(*args, **kwargs) - except Exception as e: - raise e - finally: - self._can_store.store( - function_start_time, - duration_measurement_start_time, - self._end_timing(), - ) - return result - - return async_wrapper - - def _sync_wrapper(self, func: Callable[P, R]) -> Callable[P, R]: - """Wraps a synchronous function for duration measurement. - - Args: - func: The synchronous function to be wrapped. + def __enter__(self) -> "FunctionTimer": + """Set the start time of the function.""" + self._func_start_time, self._duration_start_time = self._begin_timing() + return self - Returns: - A wrapped version of the input function with duration measurement capability. - """ - - def sync_wrapper(*args: P.args, **kwargs: P.kwargs) -> R: - """A synchronous wrapper function for measuring execution duration. - - If an exception is raised during the execution of the function, it is re-raised after - the duration measurement is stored. - """ - function_start_time, duration_measurement_start_time = self._begin_timing() - try: - result: R = func(*args, **kwargs) - except Exception as e: - raise e - finally: - self._can_store.store( - function_start_time, - duration_measurement_start_time, - self._end_timing(), - ) - return result - - return sync_wrapper - - def measure_duration(self, func: Callable[P, R]) -> Callable[P, R]: - """Creates a wrapper around a given function to measure the execution duration. + def __exit__( + self, + exc_type: Type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + """Set the end time of the function.""" + self._duration_end_time = self._end_timing() - The wrapper calculates the duration of function execution and stores it using the provided - storage mechanism. Supports both synchronous and asynchronous functions. + async def __aenter__(self) -> "FunctionTimer": + """Set the start time of the function.""" + self._func_start_time, self._duration_start_time = self._begin_timing() + return self - This method is intended to be used as a decorator. + async def __aexit__( + self, + exc_type: Type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + """Set the end time of the function.""" + self._duration_end_time = self._end_timing() - Args: - func: The function whose execution duration is to be measured. + def get_data(self) -> RawDurationData: + """Returns the duration data for the function. Returns: - A wrapped version of the input function with duration measurement capability. + RawDurationData: The duration data for the function. """ - if inspect.iscoroutinefunction(func): - return self._async_wrapper(func) # type: ignore - else: - return self._sync_wrapper(func) + assert self._func_start_time is not None + assert self._duration_start_time is not None + assert self._duration_end_time is not None + + return RawDurationData( + func_start=self._func_start_time, + duration_start=self._duration_start_time, + duration_end=self._duration_end_time, + ) diff --git a/performance-metrics/tests/performance_metrics/test_function_timer.py b/performance-metrics/tests/performance_metrics/test_function_timer.py index eb1c22fabb1..8a42c1ac203 100644 --- a/performance-metrics/tests/performance_metrics/test_function_timer.py +++ b/performance-metrics/tests/performance_metrics/test_function_timer.py @@ -1,117 +1,244 @@ -"""This module contains tests for the FunctionTimer class, focusing on its ability to accurately measure and record the execution times of synchronous and asynchronous functions, including when exceptions are raised.""" +"""This module contains tests for timing the execution of synchronous and asynchronous functions using the FunctionTimer class. +It includes functions and their variants that raise exceptions to simulate errors during execution. Each test function is designed +to ensure the FunctionTimer accurately measures execution times and handles exceptions correctly for both synchronous and asynchronous +calls. This serves as a comprehensive suite to validate the functionality of FunctionTimer in various scenarios. +""" +import time import asyncio import pytest -from time import sleep -from performance_metrics.function_timer import ( - FunctionTimer, - CanStoreTimingResult, - TimingResultStore, -) +from performance_metrics.function_timer import FunctionTimer -@pytest.fixture -def timing_result_store() -> TimingResultStore: - """Fixture that provides an empty list to store durations. This list is reset before each test.""" - return TimingResultStore() +def synchronous_function() -> None: + """Prints a message indicating a synchronous function is running.""" + print("synchronous_function") -@pytest.fixture -def function_timer(timing_result_store: CanStoreTimingResult) -> FunctionTimer: - """Creates a FunctionTimer instance with a mock storage function for testing.""" - return FunctionTimer(can_store=timing_result_store) +def synchronous_function_with_exception() -> None: + """Prints a message then raises an exception to simulate error in synchronous execution.""" + print("synchronous_function_with_exception") + raise Exception("An exception") -def test_sync_function( - function_timer: FunctionTimer, timing_result_store: TimingResultStore -) -> None: - """Tests accurate measurement of a synchronous function's execution time.""" +async def asynchronous_function() -> None: + """Prints a message indicating an asynchronous function is running.""" + print("asynchronous_function") - @function_timer.measure_duration - def sync_test() -> None: - sleep(0.01) - sync_test() - assert len(timing_result_store) == 1 - assert timing_result_store[0][1] < timing_result_store[0][2] +async def asynchronous_function_with_exception() -> None: + """Prints a message then raises an exception to simulate error in asynchronous execution.""" + print("asynchronous_function_with_exception") + raise Exception("An exception") -@pytest.mark.asyncio -async def test_async_function( - function_timer: FunctionTimer, timing_result_store: TimingResultStore -) -> None: - """Tests accurate measurement of an asynchronous function's execution time.""" +async def long_running_task() -> None: + """Simulates a longer running asynchronous task.""" + await asyncio.sleep(2) - @function_timer.measure_duration - async def async_test() -> None: - await asyncio.sleep(0.01) - await async_test() - assert len(timing_result_store) == 1 - assert timing_result_store[0][1] < timing_result_store[0][2] +async def short_running_task() -> None: + """Simulates a shorter running asynchronous task.""" + await asyncio.sleep(0.5) -def test_sync_function_exception( - function_timer: FunctionTimer, timing_result_store: TimingResultStore -) -> None: - """Tests duration measurement of a synchronous function that raises an exception.""" +################## +# TEST FUNCTIONS # +################## - @function_timer.measure_duration - def sync_test_exception() -> None: - sleep(0.01) - raise ValueError("Intentional Error") - with pytest.raises(ValueError): - sync_test_exception() - assert len(timing_result_store) == 1 - assert timing_result_store[0][1] < timing_result_store[0][2] +def test_synchronous_function() -> None: + """Tests that the synchronous function is timed correctly by FunctionTimer.""" + with FunctionTimer() as timer: + synchronous_function() + duration_data = timer.get_data() + assert duration_data.duration_start < duration_data.duration_end -@pytest.mark.asyncio -async def test_async_function_exception( - function_timer: FunctionTimer, timing_result_store: TimingResultStore -) -> None: - """Tests duration measurement of an asynchronous function that raises an exception.""" - @function_timer.measure_duration - async def async_test_exception() -> None: - await asyncio.sleep(0.01) - raise ValueError("Intentional Error") +def test_synchronous_function_with_exception() -> None: + """Tests that FunctionTimer can handle exceptions in synchronous functions correctly.""" + with pytest.raises(Exception): + with FunctionTimer() as timer: + synchronous_function_with_exception() - with pytest.raises(ValueError): - await async_test_exception() - assert len(timing_result_store) == 1 - assert timing_result_store[0][1] < timing_result_store[0][2] + duration_data = timer.get_data() + assert duration_data.duration_start < duration_data.duration_end -def test_sync_function_multiple_calls( - function_timer: FunctionTimer, timing_result_store: TimingResultStore -) -> None: - """Tests duration measurement of multiple calls to a synchronous function.""" +async def test_asynchronous_function() -> None: + """Tests that the asynchronous function is timed correctly by FunctionTimer.""" + async with FunctionTimer() as timer: + await asynchronous_function() - @function_timer.measure_duration - def sync_test_multiple_calls() -> None: - sleep(0.01) + duration_data = timer.get_data() + assert duration_data.duration_start < duration_data.duration_end - sync_test_multiple_calls() - sync_test_multiple_calls() - assert len(timing_result_store) == 2 - assert timing_result_store[0][0] < timing_result_store[1][0] - assert all(start < end for _, start, end in timing_result_store) +async def test_asynchronous_function_with_exception() -> None: + """Tests that FunctionTimer can handle exceptions in asynchronous functions correctly.""" + with pytest.raises(Exception): + async with FunctionTimer() as timer: + await asynchronous_function_with_exception() -@pytest.mark.asyncio -async def test_async_function_multiple_calls( - function_timer: FunctionTimer, timing_result_store: TimingResultStore -) -> None: - """Tests duration measurement of multiple calls to an asynchronous function.""" + duration_data = timer.get_data() + assert duration_data.duration_start < duration_data.duration_end - @function_timer.measure_duration - async def async_test_multiple_calls() -> None: - await asyncio.sleep(0.01) - await async_test_multiple_calls() - await async_test_multiple_calls() - assert len(timing_result_store) == 2 - assert timing_result_store[0][0] < timing_result_store[1][0] - assert all(start < end for _, start, end in timing_result_store) +async def test_asynchronous_and_synchronous_function() -> None: + """Tests the timing of a mixed sequence of synchronous and asynchronous functions with FunctionTimer.""" + async with FunctionTimer() as timer: + synchronous_function() + await asynchronous_function() + + duration_data = timer.get_data() + assert duration_data.duration_start < duration_data.duration_end + + +async def test_synchronous_and_asynchronous_function_with_exception() -> None: + """Tests that FunctionTimer can handle a mixed sequence of functions, including an exception, correctly.""" + with pytest.raises(Exception): + async with FunctionTimer() as timer: + synchronous_function_with_exception() + await asynchronous_function() + + duration_data = timer.get_data() + assert duration_data.duration_start < duration_data.duration_end + + +async def test_nested_synchronous_functions() -> None: + """Tests that the FunctionTimer correctly times nested synchronous functions.""" + with FunctionTimer() as outer_timer: + synchronous_function() + with FunctionTimer() as inner_timer: + synchronous_function() + + outer_duration_data = outer_timer.get_data() + inner_duration_data = inner_timer.get_data() + + assert outer_duration_data.duration_start < outer_duration_data.duration_end + assert inner_duration_data.duration_start < inner_duration_data.duration_end + assert outer_duration_data.duration_start < inner_duration_data.duration_start + assert outer_duration_data.duration_end >= inner_duration_data.duration_end + + +async def test_timing_sychronous_function_nested_inside_async_function() -> None: + """Tests that the FunctionTimer correctly times a synchronous function inside an asynchronous context manager.""" + async with FunctionTimer() as async_timer: + await asynchronous_function() + with FunctionTimer() as sync_timer: + synchronous_function() + + async_duration_data = async_timer.get_data() + sync_duration_data = sync_timer.get_data() + + assert async_duration_data.duration_start < async_duration_data.duration_end + assert sync_duration_data.duration_start < sync_duration_data.duration_end + assert async_duration_data.duration_start < sync_duration_data.duration_start + assert async_duration_data.duration_end >= sync_duration_data.duration_end + + +def test_instantaneous_function() -> None: + """Tests that the FunctionTimer can measure the time of an almost instantaneous function.""" + + def instantaneous_function() -> None: + """A function that executes almost instantaneously.""" + pass + + with FunctionTimer() as timer: + instantaneous_function() + + duration_data = timer.get_data() + assert duration_data.duration_start <= duration_data.duration_end + + +def test_known_duration_function() -> None: + """Tests the FunctionTimer's accuracy by comparing with a known sleep duration.""" + sleep_duration = 0.5 + + def known_duration_function() -> None: + time.sleep(sleep_duration) + + with FunctionTimer() as timer: + known_duration_function() + + duration_data = timer.get_data() + measured_duration_nanoseconds = abs( + duration_data.duration_start - duration_data.duration_end + ) + measure_duration_seconds = measured_duration_nanoseconds / 1_000_000_000 + assert abs(measure_duration_seconds - sleep_duration) < 0.05 + + +async def test_async_functions_in_parallel() -> None: + """Tests timing of multiple asynchronous functions executed in parallel.""" + + async def async_sleep_function(duration: float) -> None: + await asyncio.sleep(duration) + + async with FunctionTimer() as timer: + await asyncio.gather( + async_sleep_function(0.5), + async_sleep_function(1), + async_sleep_function(1.5), + ) + + duration_data = timer.get_data() + assert duration_data.duration_start < duration_data.duration_end + + +async def test_function_timer_with_async_contexts() -> None: + """Tests that the FunctionTimer context manager correctly times overlapping asynchronous tasks.""" + # 1. Start long_running_task + # 2. __aenter__ will be called on long_running_task + # 3. Start short_running_task + # 4. __aenter__ will be called on short_running_task + # 5. Finish short_running_task + # 6. __aexit__ will be called on short_running_task + # 7. Finish long_running_task + # 8. __aexit__ will be called on long_running_task + + async with FunctionTimer() as f1_timer: + await long_running_task() + + async with FunctionTimer() as f2_timer: + await short_running_task() + + f1_duration_data = f1_timer.get_data() + f2_duration_data = f2_timer.get_data() + + assert f1_duration_data.duration_start < f1_duration_data.duration_end + assert f2_duration_data.duration_start < f2_duration_data.duration_end + assert f1_duration_data.duration_start < f2_duration_data.duration_start + assert f1_duration_data.duration_end >= f2_duration_data.duration_end + + +def test_direct_use_without_context_manager() -> None: + """Tests the behavior of FunctionTimer when used directly without a context manager block. + + Verifies that the start and end times are not set and that an appropriate assertion is raised when attempting to access them. + """ + timer = FunctionTimer() + assert ( + timer._func_start_time is None + ), "_func_start_time should be None when not used within a context manager" + assert ( + timer._duration_start_time is None + ), "_duration_start_time should be None when not used within a context manager" + assert ( + timer._duration_end_time is None + ), "_duration_end_time should be None when not used within a context manager" + + with pytest.raises(AssertionError): + timer.get_data() + + +def test_calling_get_data_before_context_manager_finishes() -> None: + """Tests that attempting to call get_data before the context manager has properly finished (exited) results in an assertion error. + + This simulates the scenario where get_data is called prematurely, ensuring the timer enforces correct usage patterns. + """ + with pytest.raises(AssertionError): + with FunctionTimer() as timer: + synchronous_function() + timer.get_data() From 5e3e8cdca5326de8a0d1c1b6d82643df61f9107c Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Wed, 10 Apr 2024 12:08:10 -0700 Subject: [PATCH 11/23] init --- .../src/performance_metrics/datashapes.py | 41 +++++++---- .../robot_context_tracker.py | 35 +++++++++ .../test_function_timer.py | 2 + .../test_robot_context_tracker.py | 71 +++++++++++++++++++ .../opentrons_shared_data/robot/dev_types.py | 2 +- 5 files changed, 138 insertions(+), 13 deletions(-) create mode 100644 performance-metrics/src/performance_metrics/robot_context_tracker.py create mode 100644 performance-metrics/tests/performance_metrics/test_robot_context_tracker.py diff --git a/performance-metrics/src/performance_metrics/datashapes.py b/performance-metrics/src/performance_metrics/datashapes.py index 6c855680886..467a6bbbcd5 100644 --- a/performance-metrics/src/performance_metrics/datashapes.py +++ b/performance-metrics/src/performance_metrics/datashapes.py @@ -1,18 +1,35 @@ -"""Defines data classes and enums used in the performance metrics module.""" - +import datetime +import enum import dataclasses +from typing import Literal + + +class RobotContextStates(enum.Enum): + STARTING_UP = (0, Literal["STARTING_UP"]) + CALIBRATING = (1, Literal["CALIBRATING"]) + ANALYZING_PROTOCOL = (2, Literal["ANALYZING_PROTOCOL"]) + RUNNING_PROTOCOL = (3, Literal["RUNNING_PROTOCOL"]) + SHUTTING_DOWN = (4, Literal["SHUTTING_DOWN"]) + + def __init__(self, state_id: str, state_name: str) -> None: + self.state_id = state_id + self.state_name = state_name + + @classmethod + def from_id(cls, state_id: int) -> "RobotContextStates": + for state in RobotContextStates: + if state.state_id == state_id: + return state + raise ValueError(f"Invalid state id: {state_id}") -@dataclasses.dataclass(frozen=True) +@dataclasses.dataclass class RawDurationData: - """Represents raw duration data for a process or function. + function_start_time: int + duration_measurement_start_time: int + duration_measurement_end_time: int - Attributes: - - function_start_time (int): The start time of the function. - - duration_measurement_start_time (int): The start time for duration measurement. - - duration_measurement_end_time (int): The end time for duration measurement. - """ - func_start: int - duration_start: int - duration_end: int +@dataclasses.dataclass +class RawContextData(RawDurationData): + state: int diff --git a/performance-metrics/src/performance_metrics/robot_context_tracker.py b/performance-metrics/src/performance_metrics/robot_context_tracker.py new file mode 100644 index 00000000000..57f9ad8fe54 --- /dev/null +++ b/performance-metrics/src/performance_metrics/robot_context_tracker.py @@ -0,0 +1,35 @@ +from functools import wraps, partial +from performance_metrics.datashapes import RawContextData, RobotContextStates, RawDurationData +from performance_metrics.function_timer import FunctionTimer + + +class RobotContextTracker: + def __init__(self): + self._storage = [] + + def _store( + self, state: RobotContextStates, raw_duration_data: RawDurationData + ) -> None: + self._storage.append( + RawContextData( + function_start_time=raw_duration_data.function_start_time, + duration_measurement_start_time=raw_duration_data.duration_measurement_start_time, + duration_measurement_end_time=raw_duration_data.duration_measurement_end_time, + state=state.state_id, + ) + ) + + def track(self, state: RobotContextStates): + def inner_decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + # Can't set state=state here for some reason + # It gets overwritten when partial(raw_duration_data) is called + partial_store_func = partial(self._store, state) + timer = FunctionTimer(callback=partial_store_func) + result = timer.measure_duration(func)(*args, **kwargs) + return result + + return wrapper + + return inner_decorator diff --git a/performance-metrics/tests/performance_metrics/test_function_timer.py b/performance-metrics/tests/performance_metrics/test_function_timer.py index 8a42c1ac203..b617e242d65 100644 --- a/performance-metrics/tests/performance_metrics/test_function_timer.py +++ b/performance-metrics/tests/performance_metrics/test_function_timer.py @@ -6,6 +6,8 @@ """ import time import asyncio +from typing import List +from performance_metrics.datashapes import RawDurationData import pytest from performance_metrics.function_timer import FunctionTimer diff --git a/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py b/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py new file mode 100644 index 00000000000..ae20e7bfc08 --- /dev/null +++ b/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py @@ -0,0 +1,71 @@ +import pytest +from performance_metrics.robot_context_tracker import RobotContextTracker +from performance_metrics.datashapes import RobotContextStates +from time import sleep + +# In milliseconds +STARTING_TIME = 1 // 1000.0 +CALIBRATING_TIME = 2 // 1000.0 +ANALYZING_TIME = 3 // 1000.0 +RUNNING_TIME = 4 // 1000.0 +SHUTTING_DOWN_TIME = 5 // 1000.0 + + +@pytest.fixture +def robot_context_tracker() -> RobotContextTracker: + return RobotContextTracker() + + +def test_robot_context_tracker(robot_context_tracker: RobotContextTracker) -> None: + @robot_context_tracker.track(state=RobotContextStates.STARTING_UP) + def starting_robot() -> None: + sleep(STARTING_TIME) + + @robot_context_tracker.track(state=RobotContextStates.CALIBRATING) + def calibrating_robot() -> None: + sleep(CALIBRATING_TIME) + + @robot_context_tracker.track(state=RobotContextStates.ANALYZING_PROTOCOL) + def analyzing_protocol() -> None: + sleep(ANALYZING_TIME) + + @robot_context_tracker.track(state=RobotContextStates.RUNNING_PROTOCOL) + def running_protocol() -> None: + sleep(RUNNING_TIME) + + @robot_context_tracker.track(state=RobotContextStates.SHUTTING_DOWN) + def shutting_down_robot() -> None: + sleep(SHUTTING_DOWN_TIME) + + assert len(robot_context_tracker._storage) == 0 + starting_robot() + assert len(robot_context_tracker._storage) == 1 + calibrating_robot() + assert len(robot_context_tracker._storage) == 2 + analyzing_protocol() + assert len(robot_context_tracker._storage) == 3 + running_protocol() + assert len(robot_context_tracker._storage) == 4 + shutting_down_robot() + + assert len(robot_context_tracker._storage) == 5 + assert ( + RobotContextStates.from_id(robot_context_tracker._storage[0].state) + == RobotContextStates.STARTING_UP + ) + assert ( + RobotContextStates.from_id(robot_context_tracker._storage[1].state) + == RobotContextStates.CALIBRATING + ) + assert ( + RobotContextStates.from_id(robot_context_tracker._storage[2].state) + == RobotContextStates.ANALYZING_PROTOCOL + ) + assert ( + RobotContextStates.from_id(robot_context_tracker._storage[3].state) + == RobotContextStates.RUNNING_PROTOCOL + ) + assert ( + RobotContextStates.from_id(robot_context_tracker._storage[4].state) + == RobotContextStates.SHUTTING_DOWN + ) diff --git a/shared-data/python/opentrons_shared_data/robot/dev_types.py b/shared-data/python/opentrons_shared_data/robot/dev_types.py index 555ec6008ba..e3d2d080070 100644 --- a/shared-data/python/opentrons_shared_data/robot/dev_types.py +++ b/shared-data/python/opentrons_shared_data/robot/dev_types.py @@ -34,4 +34,4 @@ class RobotDefinition(TypedDict): displayName: str robotType: RobotType - models: List[str] + models: List[str] \ No newline at end of file From a0873facfa13c337cf60d38afe79250d4c9981f6 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Wed, 10 Apr 2024 13:02:54 -0700 Subject: [PATCH 12/23] clean clean clean --- .../src/performance_metrics/datashapes.py | 49 ++++++++++---- .../src/performance_metrics/function_timer.py | 11 ++-- .../robot_context_tracker.py | 48 +++++++++++--- .../test_robot_context_tracker.py | 64 +++++++++---------- 4 files changed, 115 insertions(+), 57 deletions(-) diff --git a/performance-metrics/src/performance_metrics/datashapes.py b/performance-metrics/src/performance_metrics/datashapes.py index 467a6bbbcd5..b9c43a98e61 100644 --- a/performance-metrics/src/performance_metrics/datashapes.py +++ b/performance-metrics/src/performance_metrics/datashapes.py @@ -1,22 +1,35 @@ -import datetime -import enum +"""Defines data classes and enums used in the performance metrics module.""" + +from enum import Enum import dataclasses -from typing import Literal -class RobotContextStates(enum.Enum): - STARTING_UP = (0, Literal["STARTING_UP"]) - CALIBRATING = (1, Literal["CALIBRATING"]) - ANALYZING_PROTOCOL = (2, Literal["ANALYZING_PROTOCOL"]) - RUNNING_PROTOCOL = (3, Literal["RUNNING_PROTOCOL"]) - SHUTTING_DOWN = (4, Literal["SHUTTING_DOWN"]) +class RobotContextStates(Enum): + """Enum representing different states of a robot's operation context.""" + + STARTING_UP = 0, "STARTING_UP" + CALIBRATING = 1, "CALIBRATING" + ANALYZING_PROTOCOL = 2, "ANALYZING_PROTOCOL" + RUNNING_PROTOCOL = 3, "RUNNING_PROTOCOL" + SHUTTING_DOWN = 4, "SHUTTING_DOWN" - def __init__(self, state_id: str, state_name: str) -> None: + def __init__(self, state_id: int, state_name: str) -> None: self.state_id = state_id self.state_name = state_name @classmethod def from_id(cls, state_id: int) -> "RobotContextStates": + """Returns the enum member matching the given state ID. + + Args: + state_id: The ID of the state to retrieve. + + Returns: + RobotContextStates: The enum member corresponding to the given ID. + + Raises: + ValueError: If no matching state is found. + """ for state in RobotContextStates: if state.state_id == state_id: return state @@ -25,6 +38,14 @@ def from_id(cls, state_id: int) -> "RobotContextStates": @dataclasses.dataclass class RawDurationData: + """Represents raw duration data for a process or function. + + Attributes: + - function_start_time (int): The start time of the function. + - duration_measurement_start_time (int): The start time for duration measurement. + - duration_measurement_end_time (int): The end time for duration measurement. + """ + function_start_time: int duration_measurement_start_time: int duration_measurement_end_time: int @@ -32,4 +53,10 @@ class RawDurationData: @dataclasses.dataclass class RawContextData(RawDurationData): - state: int + """Extends RawDurationData with context state information. + + Attributes: + - state (RobotContextStates): The current state of the context. + """ + + state: RobotContextStates diff --git a/performance-metrics/src/performance_metrics/function_timer.py b/performance-metrics/src/performance_metrics/function_timer.py index 2cda334b797..d9c8f95922a 100644 --- a/performance-metrics/src/performance_metrics/function_timer.py +++ b/performance-metrics/src/performance_metrics/function_timer.py @@ -1,7 +1,8 @@ -"""This module offers a mechanism for measuring and storing the execution durations of both synchronous and asynchronous functions. - -The FunctionTimer class is intended to be used as a decorator to wrap functions and measure their execution times. It utilizes `perf_counter_ns` for high-resolution performance counter measurements and `clock_gettime_ns(CLOCK_REALTIME)` for real-time clock measurements. The use of `perf_counter_ns` ensures the highest possible resolution timer, which is essential for accurate duration measurement, especially for short-running functions. `clock_gettime_ns(CLOCK_REALTIME)` is used to capture the actual start time in real-world time, which is useful for correlating events or logs with other time-based data. +"""Module for measuring and storing execution durations of functions. +Provides a `FunctionTimer` class that can be used as a decorator to measure +the execution time of both synchronous and asynchronous functions, utilizing high-resolution +performance counters for accuracy. """ from time import perf_counter_ns, clock_gettime_ns, CLOCK_REALTIME @@ -29,7 +30,7 @@ def _begin_timing(self) -> Tuple[int, int]: """Starts the timing process, capturing both the current real-time and a high-resolution performance counter. Returns: - A tuple containing the current real-time (`clock_gettime_ns(CLOCK_REALTIME)`) and an initial performance counter (`perf_counter_ns()`). Both values are measured in nanoseconds. The combination of these counters allows us to accurately measure execution durations while also correlating these measurements to real-world time. + A tuple containing the current real-time (`clock_gettime_ns(CLOCK_REALTIME)`) and an initial performance counter (`perf_counter_ns()`). Both values are measured in nanoseconds. """ return clock_gettime_ns(CLOCK_REALTIME), perf_counter_ns() @@ -37,7 +38,7 @@ def _end_timing(self) -> int: """Ends the timing process, capturing the final high-resolution performance counter. Returns: - The final performance counter, measured in nanoseconds. This value is captured using `perf_counter_ns()` to ensure consistency with the initial performance counter, providing an accurate duration measurement. + The final performance counter, measured in nanoseconds. """ return perf_counter_ns() diff --git a/performance-metrics/src/performance_metrics/robot_context_tracker.py b/performance-metrics/src/performance_metrics/robot_context_tracker.py index 57f9ad8fe54..49aa1302e18 100644 --- a/performance-metrics/src/performance_metrics/robot_context_tracker.py +++ b/performance-metrics/src/performance_metrics/robot_context_tracker.py @@ -1,32 +1,62 @@ +"""Module for tracking robot context and execution duration for different operations.""" + from functools import wraps, partial -from performance_metrics.datashapes import RawContextData, RobotContextStates, RawDurationData +from typing import Callable, TypeVar, List +from typing_extensions import ParamSpec +from performance_metrics.datashapes import ( + RawContextData, + RobotContextStates, + RawDurationData, +) from performance_metrics.function_timer import FunctionTimer +P = ParamSpec("P") +R = TypeVar("R") + class RobotContextTracker: - def __init__(self): - self._storage = [] + """Tracks and stores robot context and execution duration for different operations.""" + + def __init__(self) -> None: + """Initializes the RobotContextTracker with an empty storage list.""" + self._storage: List[RawContextData] = [] def _store( self, state: RobotContextStates, raw_duration_data: RawDurationData ) -> None: + """Stores the context and duration data for a robot operation. + + Args: + state : The state of the robot during the operation. + raw_duration_data : The duration data for the operation. + """ self._storage.append( RawContextData( function_start_time=raw_duration_data.function_start_time, duration_measurement_start_time=raw_duration_data.duration_measurement_start_time, duration_measurement_end_time=raw_duration_data.duration_measurement_end_time, - state=state.state_id, + state=state, ) ) - def track(self, state: RobotContextStates): - def inner_decorator(func): + def track(self, state: RobotContextStates) -> Callable: # type: ignore + """Decorator factory for tracking the execution duration and state of robot operations. + + Args: + state: The state to track for the decorated function. + + Returns: + Callable: A decorator that wraps a function to track its execution duration and state. + """ + + def inner_decorator(func: Callable[P, R]) -> Callable[P, R]: @wraps(func) - def wrapper(*args, **kwargs): - # Can't set state=state here for some reason - # It gets overwritten when partial(raw_duration_data) is called + def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: + # Create a partially filled function with the current state pre-filled partial_store_func = partial(self._store, state) + # Initialize the FunctionTimer with the partial function as a callback timer = FunctionTimer(callback=partial_store_func) + # Measure and store the duration of the function call result = timer.measure_duration(func)(*args, **kwargs) return result diff --git a/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py b/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py index ae20e7bfc08..ac96faf9756 100644 --- a/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py +++ b/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py @@ -1,22 +1,27 @@ +"""Tests for the RobotContextTracker class in performance_metrics.robot_context_tracker.""" + import pytest from performance_metrics.robot_context_tracker import RobotContextTracker from performance_metrics.datashapes import RobotContextStates from time import sleep -# In milliseconds -STARTING_TIME = 1 // 1000.0 -CALIBRATING_TIME = 2 // 1000.0 -ANALYZING_TIME = 3 // 1000.0 -RUNNING_TIME = 4 // 1000.0 -SHUTTING_DOWN_TIME = 5 // 1000.0 +# Corrected times in seconds +STARTING_TIME = 0.001 +CALIBRATING_TIME = 0.002 +ANALYZING_TIME = 0.003 +RUNNING_TIME = 0.004 +SHUTTING_DOWN_TIME = 0.005 @pytest.fixture def robot_context_tracker() -> RobotContextTracker: + """Fixture to provide a fresh instance of RobotContextTracker for each test.""" return RobotContextTracker() def test_robot_context_tracker(robot_context_tracker: RobotContextTracker) -> None: + """Tests the tracking of various robot context states through RobotContextTracker.""" + @robot_context_tracker.track(state=RobotContextStates.STARTING_UP) def starting_robot() -> None: sleep(STARTING_TIME) @@ -37,35 +42,30 @@ def running_protocol() -> None: def shutting_down_robot() -> None: sleep(SHUTTING_DOWN_TIME) - assert len(robot_context_tracker._storage) == 0 + # Ensure storage is initially empty + assert ( + len(robot_context_tracker._storage) == 0 + ), "Storage should be initially empty." + starting_robot() - assert len(robot_context_tracker._storage) == 1 calibrating_robot() - assert len(robot_context_tracker._storage) == 2 analyzing_protocol() - assert len(robot_context_tracker._storage) == 3 running_protocol() - assert len(robot_context_tracker._storage) == 4 shutting_down_robot() - assert len(robot_context_tracker._storage) == 5 - assert ( - RobotContextStates.from_id(robot_context_tracker._storage[0].state) - == RobotContextStates.STARTING_UP - ) - assert ( - RobotContextStates.from_id(robot_context_tracker._storage[1].state) - == RobotContextStates.CALIBRATING - ) - assert ( - RobotContextStates.from_id(robot_context_tracker._storage[2].state) - == RobotContextStates.ANALYZING_PROTOCOL - ) - assert ( - RobotContextStates.from_id(robot_context_tracker._storage[3].state) - == RobotContextStates.RUNNING_PROTOCOL - ) - assert ( - RobotContextStates.from_id(robot_context_tracker._storage[4].state) - == RobotContextStates.SHUTTING_DOWN - ) + # Verify that all states were tracked + assert len(robot_context_tracker._storage) == 5, "All states should be tracked." + + # Validate the sequence and accuracy of tracked states + expected_states = [ + RobotContextStates.STARTING_UP, + RobotContextStates.CALIBRATING, + RobotContextStates.ANALYZING_PROTOCOL, + RobotContextStates.RUNNING_PROTOCOL, + RobotContextStates.SHUTTING_DOWN, + ] + for i, state in enumerate(expected_states): + assert ( + RobotContextStates.from_id(robot_context_tracker._storage[i].state.state_id) + == state + ), f"State at index {i} should be {state}." From ea49b1efce2f8f8cf577ea653570029bbcf7dd99 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Wed, 10 Apr 2024 13:13:25 -0700 Subject: [PATCH 13/23] more test cases --- .../test_robot_context_tracker.py | 137 ++++++++++++++++++ 1 file changed, 137 insertions(+) diff --git a/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py b/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py index ac96faf9756..cb35c4bc827 100644 --- a/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py +++ b/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py @@ -1,5 +1,6 @@ """Tests for the RobotContextTracker class in performance_metrics.robot_context_tracker.""" +import asyncio import pytest from performance_metrics.robot_context_tracker import RobotContextTracker from performance_metrics.datashapes import RobotContextStates @@ -69,3 +70,139 @@ def shutting_down_robot() -> None: RobotContextStates.from_id(robot_context_tracker._storage[i].state.state_id) == state ), f"State at index {i} should be {state}." + + +def test_multiple_operations_single_state( + robot_context_tracker: RobotContextTracker, +) -> None: + """Tests tracking multiple operations within a single robot context state.""" + + @robot_context_tracker.track(state=RobotContextStates.RUNNING_PROTOCOL) + def first_operation() -> None: + sleep(RUNNING_TIME) + + @robot_context_tracker.track(state=RobotContextStates.RUNNING_PROTOCOL) + def second_operation() -> None: + sleep(RUNNING_TIME) + + first_operation() + second_operation() + + assert ( + len(robot_context_tracker._storage) == 2 + ), "Both operations should be tracked." + assert ( + robot_context_tracker._storage[0].state + == robot_context_tracker._storage[1].state + == RobotContextStates.RUNNING_PROTOCOL + ), "Both operations should have the same state." + + +def test_exception_handling_in_tracked_function( + robot_context_tracker: RobotContextTracker, +) -> None: + """Ensures exceptions in tracked operations are handled correctly.""" + + @robot_context_tracker.track(state=RobotContextStates.SHUTTING_DOWN) + def error_prone_operation() -> None: + sleep(SHUTTING_DOWN_TIME) + raise RuntimeError("Simulated operation failure") + + with pytest.raises(RuntimeError): + error_prone_operation() + + assert ( + len(robot_context_tracker._storage) == 1 + ), "Failed operation should still be tracked." + assert ( + robot_context_tracker._storage[0].state == RobotContextStates.SHUTTING_DOWN + ), "State should be correctly logged despite the exception." + + +@pytest.mark.asyncio +async def test_async_operation_tracking( + robot_context_tracker: RobotContextTracker, +) -> None: + """Tests tracking of an asynchronous operation.""" + + @robot_context_tracker.track(state=RobotContextStates.ANALYZING_PROTOCOL) + async def async_analyzing_operation() -> None: + await asyncio.sleep(ANALYZING_TIME) + + await async_analyzing_operation() + + assert ( + len(robot_context_tracker._storage) == 1 + ), "Async operation should be tracked." + assert ( + robot_context_tracker._storage[0].state == RobotContextStates.ANALYZING_PROTOCOL + ), "State should be ANALYZING_PROTOCOL." + + +@pytest.mark.asyncio +async def test_async_operation_timing_accuracy( + robot_context_tracker: RobotContextTracker, +) -> None: + """Tests the timing accuracy of an async operation tracking.""" + + @robot_context_tracker.track(state=RobotContextStates.RUNNING_PROTOCOL) + async def async_running_operation() -> None: + await asyncio.sleep(RUNNING_TIME) + + await async_running_operation() + + duration_data = robot_context_tracker._storage[0] + measured_duration = ( + duration_data.duration_measurement_end_time + - duration_data.duration_measurement_start_time + ) + assert ( + abs(measured_duration - RUNNING_TIME * 1e9) < 1e7 + ), "Measured duration for async operation should closely match the expected duration." + + +@pytest.mark.asyncio +async def test_exception_in_async_operation( + robot_context_tracker: RobotContextTracker, +) -> None: + """Ensures exceptions in tracked async operations are correctly handled.""" + + @robot_context_tracker.track(state=RobotContextStates.SHUTTING_DOWN) + async def async_error_prone_operation() -> None: + await asyncio.sleep(SHUTTING_DOWN_TIME) + raise RuntimeError("Simulated async operation failure") + + with pytest.raises(RuntimeError): + await async_error_prone_operation() + + assert ( + len(robot_context_tracker._storage) == 1 + ), "Failed async operation should still be tracked." + assert ( + robot_context_tracker._storage[0].state == RobotContextStates.SHUTTING_DOWN + ), "State should be SHUTTING_DOWN despite the exception." + + +@pytest.mark.asyncio +async def test_concurrent_async_operations( + robot_context_tracker: RobotContextTracker, +) -> None: + """Tests tracking of concurrent async operations.""" + + @robot_context_tracker.track(state=RobotContextStates.CALIBRATING) + async def first_async_calibrating() -> None: + await asyncio.sleep(CALIBRATING_TIME) + + @robot_context_tracker.track(state=RobotContextStates.CALIBRATING) + async def second_async_calibrating() -> None: + await asyncio.sleep(CALIBRATING_TIME) + + await asyncio.gather(first_async_calibrating(), second_async_calibrating()) + + assert ( + len(robot_context_tracker._storage) == 2 + ), "Both concurrent async operations should be tracked." + assert all( + data.state == RobotContextStates.CALIBRATING + for data in robot_context_tracker._storage + ), "All tracked operations should be in CALIBRATING state." From cd130ac46dea762f9eda5a03a93f50610f565c1a Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Thu, 11 Apr 2024 08:30:51 -0700 Subject: [PATCH 14/23] oops --- shared-data/python/opentrons_shared_data/robot/dev_types.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/shared-data/python/opentrons_shared_data/robot/dev_types.py b/shared-data/python/opentrons_shared_data/robot/dev_types.py index e3d2d080070..555ec6008ba 100644 --- a/shared-data/python/opentrons_shared_data/robot/dev_types.py +++ b/shared-data/python/opentrons_shared_data/robot/dev_types.py @@ -34,4 +34,4 @@ class RobotDefinition(TypedDict): displayName: str robotType: RobotType - models: List[str] \ No newline at end of file + models: List[str] From 0d4f3a6d19f3890c5c3545ecbc1eb28f661a0287 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Thu, 11 Apr 2024 10:05:51 -0700 Subject: [PATCH 15/23] Don't use function timer as a decorator --- .../src/performance_metrics/datashapes.py | 10 +++++----- .../robot_context_tracker.py | 18 +++++++++--------- .../performance_metrics/test_function_timer.py | 11 +++++++++-- .../test_robot_context_tracker.py | 4 ++-- 4 files changed, 25 insertions(+), 18 deletions(-) diff --git a/performance-metrics/src/performance_metrics/datashapes.py b/performance-metrics/src/performance_metrics/datashapes.py index b9c43a98e61..21cf79f7c4e 100644 --- a/performance-metrics/src/performance_metrics/datashapes.py +++ b/performance-metrics/src/performance_metrics/datashapes.py @@ -36,7 +36,7 @@ def from_id(cls, state_id: int) -> "RobotContextStates": raise ValueError(f"Invalid state id: {state_id}") -@dataclasses.dataclass +@dataclasses.dataclass(frozen=True) class RawDurationData: """Represents raw duration data for a process or function. @@ -46,12 +46,12 @@ class RawDurationData: - duration_measurement_end_time (int): The end time for duration measurement. """ - function_start_time: int - duration_measurement_start_time: int - duration_measurement_end_time: int + func_start: int + duration_start: int + duration_end: int -@dataclasses.dataclass +@dataclasses.dataclass(frozen=True) class RawContextData(RawDurationData): """Extends RawDurationData with context state information. diff --git a/performance-metrics/src/performance_metrics/robot_context_tracker.py b/performance-metrics/src/performance_metrics/robot_context_tracker.py index 49aa1302e18..463a55ae1e4 100644 --- a/performance-metrics/src/performance_metrics/robot_context_tracker.py +++ b/performance-metrics/src/performance_metrics/robot_context_tracker.py @@ -32,9 +32,9 @@ def _store( """ self._storage.append( RawContextData( - function_start_time=raw_duration_data.function_start_time, - duration_measurement_start_time=raw_duration_data.duration_measurement_start_time, - duration_measurement_end_time=raw_duration_data.duration_measurement_end_time, + func_start=raw_duration_data.func_start, + duration_start=raw_duration_data.duration_start, + duration_end=raw_duration_data.duration_end, state=state, ) ) @@ -52,12 +52,12 @@ def track(self, state: RobotContextStates) -> Callable: # type: ignore def inner_decorator(func: Callable[P, R]) -> Callable[P, R]: @wraps(func) def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: - # Create a partially filled function with the current state pre-filled - partial_store_func = partial(self._store, state) - # Initialize the FunctionTimer with the partial function as a callback - timer = FunctionTimer(callback=partial_store_func) - # Measure and store the duration of the function call - result = timer.measure_duration(func)(*args, **kwargs) + + try: + with FunctionTimer() as timer: + result = func(*args, **kwargs) + finally: + self._store(state, timer.get_data()) return result return wrapper diff --git a/performance-metrics/tests/performance_metrics/test_function_timer.py b/performance-metrics/tests/performance_metrics/test_function_timer.py index b617e242d65..08287d4f061 100644 --- a/performance-metrics/tests/performance_metrics/test_function_timer.py +++ b/performance-metrics/tests/performance_metrics/test_function_timer.py @@ -6,8 +6,6 @@ """ import time import asyncio -from typing import List -from performance_metrics.datashapes import RawDurationData import pytest from performance_metrics.function_timer import FunctionTimer @@ -67,6 +65,10 @@ def test_synchronous_function_with_exception() -> None: duration_data = timer.get_data() assert duration_data.duration_start < duration_data.duration_end +async def test_asynchronous_function() -> None: + """Tests that the asynchronous function is timed correctly by FunctionTimer.""" + async with FunctionTimer() as timer: + await asynchronous_function() async def test_asynchronous_function() -> None: """Tests that the asynchronous function is timed correctly by FunctionTimer.""" @@ -86,6 +88,11 @@ async def test_asynchronous_function_with_exception() -> None: duration_data = timer.get_data() assert duration_data.duration_start < duration_data.duration_end +async def test_asynchronous_and_synchronous_function() -> None: + """Tests the timing of a mixed sequence of synchronous and asynchronous functions with FunctionTimer.""" + async with FunctionTimer() as timer: + synchronous_function() + await asynchronous_function() async def test_asynchronous_and_synchronous_function() -> None: """Tests the timing of a mixed sequence of synchronous and asynchronous functions with FunctionTimer.""" diff --git a/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py b/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py index cb35c4bc827..566ced1f05a 100644 --- a/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py +++ b/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py @@ -153,8 +153,8 @@ async def async_running_operation() -> None: duration_data = robot_context_tracker._storage[0] measured_duration = ( - duration_data.duration_measurement_end_time - - duration_data.duration_measurement_start_time + duration_data.duration_end + - duration_data.duration_start ) assert ( abs(measured_duration - RUNNING_TIME * 1e9) < 1e7 From 2f95d163178e35d4f91f691927c16a4a02f442d9 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Thu, 11 Apr 2024 10:07:35 -0700 Subject: [PATCH 16/23] formatting and linting --- .../src/performance_metrics/robot_context_tracker.py | 2 +- .../tests/performance_metrics/test_function_timer.py | 1 + .../tests/performance_metrics/test_robot_context_tracker.py | 5 +---- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/performance-metrics/src/performance_metrics/robot_context_tracker.py b/performance-metrics/src/performance_metrics/robot_context_tracker.py index 463a55ae1e4..4f90823641e 100644 --- a/performance-metrics/src/performance_metrics/robot_context_tracker.py +++ b/performance-metrics/src/performance_metrics/robot_context_tracker.py @@ -1,6 +1,6 @@ """Module for tracking robot context and execution duration for different operations.""" -from functools import wraps, partial +from functools import wraps from typing import Callable, TypeVar, List from typing_extensions import ParamSpec from performance_metrics.datashapes import ( diff --git a/performance-metrics/tests/performance_metrics/test_function_timer.py b/performance-metrics/tests/performance_metrics/test_function_timer.py index 08287d4f061..e9393442b9b 100644 --- a/performance-metrics/tests/performance_metrics/test_function_timer.py +++ b/performance-metrics/tests/performance_metrics/test_function_timer.py @@ -65,6 +65,7 @@ def test_synchronous_function_with_exception() -> None: duration_data = timer.get_data() assert duration_data.duration_start < duration_data.duration_end + async def test_asynchronous_function() -> None: """Tests that the asynchronous function is timed correctly by FunctionTimer.""" async with FunctionTimer() as timer: diff --git a/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py b/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py index 566ced1f05a..3b1604b1840 100644 --- a/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py +++ b/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py @@ -152,10 +152,7 @@ async def async_running_operation() -> None: await async_running_operation() duration_data = robot_context_tracker._storage[0] - measured_duration = ( - duration_data.duration_end - - duration_data.duration_start - ) + measured_duration = duration_data.duration_end - duration_data.duration_start assert ( abs(measured_duration - RUNNING_TIME * 1e9) < 1e7 ), "Measured duration for async operation should closely match the expected duration." From a6b7f1d4c8aaffb0df1ea96656525da4eeb38caa Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Thu, 11 Apr 2024 10:29:19 -0700 Subject: [PATCH 17/23] chore: add ability to turn off tracking --- .../robot_context_tracker.py | 6 ++++-- .../test_robot_context_tracker.py | 17 ++++++++++++++++- 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/performance-metrics/src/performance_metrics/robot_context_tracker.py b/performance-metrics/src/performance_metrics/robot_context_tracker.py index 4f90823641e..4829279e7c5 100644 --- a/performance-metrics/src/performance_metrics/robot_context_tracker.py +++ b/performance-metrics/src/performance_metrics/robot_context_tracker.py @@ -17,9 +17,10 @@ class RobotContextTracker: """Tracks and stores robot context and execution duration for different operations.""" - def __init__(self) -> None: + def __init__(self, should_track: bool = False) -> None: """Initializes the RobotContextTracker with an empty storage list.""" self._storage: List[RawContextData] = [] + self._should_track = should_track def _store( self, state: RobotContextStates, raw_duration_data: RawDurationData @@ -52,7 +53,8 @@ def track(self, state: RobotContextStates) -> Callable: # type: ignore def inner_decorator(func: Callable[P, R]) -> Callable[P, R]: @wraps(func) def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: - + if not self._should_track: + return func(*args, **kwargs) try: with FunctionTimer() as timer: result = func(*args, **kwargs) diff --git a/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py b/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py index 3b1604b1840..8d520246530 100644 --- a/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py +++ b/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py @@ -17,7 +17,7 @@ @pytest.fixture def robot_context_tracker() -> RobotContextTracker: """Fixture to provide a fresh instance of RobotContextTracker for each test.""" - return RobotContextTracker() + return RobotContextTracker(should_track=True) def test_robot_context_tracker(robot_context_tracker: RobotContextTracker) -> None: @@ -203,3 +203,18 @@ async def second_async_calibrating() -> None: data.state == RobotContextStates.CALIBRATING for data in robot_context_tracker._storage ), "All tracked operations should be in CALIBRATING state." + + +def test_no_tracking() -> None: + """Tests that operations are not tracked when tracking is disabled.""" + robot_context_tracker = RobotContextTracker(should_track=False) + + @robot_context_tracker.track(state=RobotContextStates.STARTING_UP) + def operation_without_tracking() -> None: + sleep(STARTING_TIME) + + operation_without_tracking() + + assert ( + len(robot_context_tracker._storage) == 0 + ), "Operation should not be tracked when tracking is disabled." From 18e0b15c25ac4719f809f4ad208c8b840746ce92 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Thu, 11 Apr 2024 10:49:23 -0700 Subject: [PATCH 18/23] rename --- .../src/performance_metrics/datashapes.py | 8 +-- .../robot_context_tracker.py | 6 +-- .../test_robot_context_tracker.py | 52 +++++++++---------- 3 files changed, 33 insertions(+), 33 deletions(-) diff --git a/performance-metrics/src/performance_metrics/datashapes.py b/performance-metrics/src/performance_metrics/datashapes.py index 21cf79f7c4e..f8c62bb39b2 100644 --- a/performance-metrics/src/performance_metrics/datashapes.py +++ b/performance-metrics/src/performance_metrics/datashapes.py @@ -4,7 +4,7 @@ import dataclasses -class RobotContextStates(Enum): +class RobotContextState(Enum): """Enum representing different states of a robot's operation context.""" STARTING_UP = 0, "STARTING_UP" @@ -18,7 +18,7 @@ def __init__(self, state_id: int, state_name: str) -> None: self.state_name = state_name @classmethod - def from_id(cls, state_id: int) -> "RobotContextStates": + def from_id(cls, state_id: int) -> "RobotContextState": """Returns the enum member matching the given state ID. Args: @@ -30,7 +30,7 @@ def from_id(cls, state_id: int) -> "RobotContextStates": Raises: ValueError: If no matching state is found. """ - for state in RobotContextStates: + for state in RobotContextState: if state.state_id == state_id: return state raise ValueError(f"Invalid state id: {state_id}") @@ -59,4 +59,4 @@ class RawContextData(RawDurationData): - state (RobotContextStates): The current state of the context. """ - state: RobotContextStates + state: RobotContextState diff --git a/performance-metrics/src/performance_metrics/robot_context_tracker.py b/performance-metrics/src/performance_metrics/robot_context_tracker.py index 4829279e7c5..23c5b434dff 100644 --- a/performance-metrics/src/performance_metrics/robot_context_tracker.py +++ b/performance-metrics/src/performance_metrics/robot_context_tracker.py @@ -5,7 +5,7 @@ from typing_extensions import ParamSpec from performance_metrics.datashapes import ( RawContextData, - RobotContextStates, + RobotContextState, RawDurationData, ) from performance_metrics.function_timer import FunctionTimer @@ -23,7 +23,7 @@ def __init__(self, should_track: bool = False) -> None: self._should_track = should_track def _store( - self, state: RobotContextStates, raw_duration_data: RawDurationData + self, state: RobotContextState, raw_duration_data: RawDurationData ) -> None: """Stores the context and duration data for a robot operation. @@ -40,7 +40,7 @@ def _store( ) ) - def track(self, state: RobotContextStates) -> Callable: # type: ignore + def track(self, state: RobotContextState) -> Callable: # type: ignore """Decorator factory for tracking the execution duration and state of robot operations. Args: diff --git a/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py b/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py index 8d520246530..0a145a480b6 100644 --- a/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py +++ b/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py @@ -3,7 +3,7 @@ import asyncio import pytest from performance_metrics.robot_context_tracker import RobotContextTracker -from performance_metrics.datashapes import RobotContextStates +from performance_metrics.datashapes import RobotContextState from time import sleep # Corrected times in seconds @@ -23,23 +23,23 @@ def robot_context_tracker() -> RobotContextTracker: def test_robot_context_tracker(robot_context_tracker: RobotContextTracker) -> None: """Tests the tracking of various robot context states through RobotContextTracker.""" - @robot_context_tracker.track(state=RobotContextStates.STARTING_UP) + @robot_context_tracker.track(state=RobotContextState.STARTING_UP) def starting_robot() -> None: sleep(STARTING_TIME) - @robot_context_tracker.track(state=RobotContextStates.CALIBRATING) + @robot_context_tracker.track(state=RobotContextState.CALIBRATING) def calibrating_robot() -> None: sleep(CALIBRATING_TIME) - @robot_context_tracker.track(state=RobotContextStates.ANALYZING_PROTOCOL) + @robot_context_tracker.track(state=RobotContextState.ANALYZING_PROTOCOL) def analyzing_protocol() -> None: sleep(ANALYZING_TIME) - @robot_context_tracker.track(state=RobotContextStates.RUNNING_PROTOCOL) + @robot_context_tracker.track(state=RobotContextState.RUNNING_PROTOCOL) def running_protocol() -> None: sleep(RUNNING_TIME) - @robot_context_tracker.track(state=RobotContextStates.SHUTTING_DOWN) + @robot_context_tracker.track(state=RobotContextState.SHUTTING_DOWN) def shutting_down_robot() -> None: sleep(SHUTTING_DOWN_TIME) @@ -59,15 +59,15 @@ def shutting_down_robot() -> None: # Validate the sequence and accuracy of tracked states expected_states = [ - RobotContextStates.STARTING_UP, - RobotContextStates.CALIBRATING, - RobotContextStates.ANALYZING_PROTOCOL, - RobotContextStates.RUNNING_PROTOCOL, - RobotContextStates.SHUTTING_DOWN, + RobotContextState.STARTING_UP, + RobotContextState.CALIBRATING, + RobotContextState.ANALYZING_PROTOCOL, + RobotContextState.RUNNING_PROTOCOL, + RobotContextState.SHUTTING_DOWN, ] for i, state in enumerate(expected_states): assert ( - RobotContextStates.from_id(robot_context_tracker._storage[i].state.state_id) + RobotContextState.from_id(robot_context_tracker._storage[i].state.state_id) == state ), f"State at index {i} should be {state}." @@ -77,11 +77,11 @@ def test_multiple_operations_single_state( ) -> None: """Tests tracking multiple operations within a single robot context state.""" - @robot_context_tracker.track(state=RobotContextStates.RUNNING_PROTOCOL) + @robot_context_tracker.track(state=RobotContextState.RUNNING_PROTOCOL) def first_operation() -> None: sleep(RUNNING_TIME) - @robot_context_tracker.track(state=RobotContextStates.RUNNING_PROTOCOL) + @robot_context_tracker.track(state=RobotContextState.RUNNING_PROTOCOL) def second_operation() -> None: sleep(RUNNING_TIME) @@ -94,7 +94,7 @@ def second_operation() -> None: assert ( robot_context_tracker._storage[0].state == robot_context_tracker._storage[1].state - == RobotContextStates.RUNNING_PROTOCOL + == RobotContextState.RUNNING_PROTOCOL ), "Both operations should have the same state." @@ -103,7 +103,7 @@ def test_exception_handling_in_tracked_function( ) -> None: """Ensures exceptions in tracked operations are handled correctly.""" - @robot_context_tracker.track(state=RobotContextStates.SHUTTING_DOWN) + @robot_context_tracker.track(state=RobotContextState.SHUTTING_DOWN) def error_prone_operation() -> None: sleep(SHUTTING_DOWN_TIME) raise RuntimeError("Simulated operation failure") @@ -115,7 +115,7 @@ def error_prone_operation() -> None: len(robot_context_tracker._storage) == 1 ), "Failed operation should still be tracked." assert ( - robot_context_tracker._storage[0].state == RobotContextStates.SHUTTING_DOWN + robot_context_tracker._storage[0].state == RobotContextState.SHUTTING_DOWN ), "State should be correctly logged despite the exception." @@ -125,7 +125,7 @@ async def test_async_operation_tracking( ) -> None: """Tests tracking of an asynchronous operation.""" - @robot_context_tracker.track(state=RobotContextStates.ANALYZING_PROTOCOL) + @robot_context_tracker.track(state=RobotContextState.ANALYZING_PROTOCOL) async def async_analyzing_operation() -> None: await asyncio.sleep(ANALYZING_TIME) @@ -135,7 +135,7 @@ async def async_analyzing_operation() -> None: len(robot_context_tracker._storage) == 1 ), "Async operation should be tracked." assert ( - robot_context_tracker._storage[0].state == RobotContextStates.ANALYZING_PROTOCOL + robot_context_tracker._storage[0].state == RobotContextState.ANALYZING_PROTOCOL ), "State should be ANALYZING_PROTOCOL." @@ -145,7 +145,7 @@ async def test_async_operation_timing_accuracy( ) -> None: """Tests the timing accuracy of an async operation tracking.""" - @robot_context_tracker.track(state=RobotContextStates.RUNNING_PROTOCOL) + @robot_context_tracker.track(state=RobotContextState.RUNNING_PROTOCOL) async def async_running_operation() -> None: await asyncio.sleep(RUNNING_TIME) @@ -164,7 +164,7 @@ async def test_exception_in_async_operation( ) -> None: """Ensures exceptions in tracked async operations are correctly handled.""" - @robot_context_tracker.track(state=RobotContextStates.SHUTTING_DOWN) + @robot_context_tracker.track(state=RobotContextState.SHUTTING_DOWN) async def async_error_prone_operation() -> None: await asyncio.sleep(SHUTTING_DOWN_TIME) raise RuntimeError("Simulated async operation failure") @@ -176,7 +176,7 @@ async def async_error_prone_operation() -> None: len(robot_context_tracker._storage) == 1 ), "Failed async operation should still be tracked." assert ( - robot_context_tracker._storage[0].state == RobotContextStates.SHUTTING_DOWN + robot_context_tracker._storage[0].state == RobotContextState.SHUTTING_DOWN ), "State should be SHUTTING_DOWN despite the exception." @@ -186,11 +186,11 @@ async def test_concurrent_async_operations( ) -> None: """Tests tracking of concurrent async operations.""" - @robot_context_tracker.track(state=RobotContextStates.CALIBRATING) + @robot_context_tracker.track(state=RobotContextState.CALIBRATING) async def first_async_calibrating() -> None: await asyncio.sleep(CALIBRATING_TIME) - @robot_context_tracker.track(state=RobotContextStates.CALIBRATING) + @robot_context_tracker.track(state=RobotContextState.CALIBRATING) async def second_async_calibrating() -> None: await asyncio.sleep(CALIBRATING_TIME) @@ -200,7 +200,7 @@ async def second_async_calibrating() -> None: len(robot_context_tracker._storage) == 2 ), "Both concurrent async operations should be tracked." assert all( - data.state == RobotContextStates.CALIBRATING + data.state == RobotContextState.CALIBRATING for data in robot_context_tracker._storage ), "All tracked operations should be in CALIBRATING state." @@ -209,7 +209,7 @@ def test_no_tracking() -> None: """Tests that operations are not tracked when tracking is disabled.""" robot_context_tracker = RobotContextTracker(should_track=False) - @robot_context_tracker.track(state=RobotContextStates.STARTING_UP) + @robot_context_tracker.track(state=RobotContextState.STARTING_UP) def operation_without_tracking() -> None: sleep(STARTING_TIME) From 161a405dc0493c3f4e91fc0e4c5a7b0cdfec3f3a Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Thu, 11 Apr 2024 10:59:52 -0700 Subject: [PATCH 19/23] missed this in the merge --- .../tests/performance_metrics/test_function_timer.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/performance-metrics/tests/performance_metrics/test_function_timer.py b/performance-metrics/tests/performance_metrics/test_function_timer.py index e9393442b9b..8a42c1ac203 100644 --- a/performance-metrics/tests/performance_metrics/test_function_timer.py +++ b/performance-metrics/tests/performance_metrics/test_function_timer.py @@ -66,11 +66,6 @@ def test_synchronous_function_with_exception() -> None: assert duration_data.duration_start < duration_data.duration_end -async def test_asynchronous_function() -> None: - """Tests that the asynchronous function is timed correctly by FunctionTimer.""" - async with FunctionTimer() as timer: - await asynchronous_function() - async def test_asynchronous_function() -> None: """Tests that the asynchronous function is timed correctly by FunctionTimer.""" async with FunctionTimer() as timer: @@ -89,11 +84,6 @@ async def test_asynchronous_function_with_exception() -> None: duration_data = timer.get_data() assert duration_data.duration_start < duration_data.duration_end -async def test_asynchronous_and_synchronous_function() -> None: - """Tests the timing of a mixed sequence of synchronous and asynchronous functions with FunctionTimer.""" - async with FunctionTimer() as timer: - synchronous_function() - await asynchronous_function() async def test_asynchronous_and_synchronous_function() -> None: """Tests the timing of a mixed sequence of synchronous and asynchronous functions with FunctionTimer.""" From 8768ed66d05320009d9a7fd7e63c90ee267b71ff Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Thu, 11 Apr 2024 12:12:33 -0700 Subject: [PATCH 20/23] chore: remove FunctionTimer --- .../src/performance_metrics/function_timer.py | 87 ------- .../robot_context_tracker.py | 41 ++- .../test_function_timer.py | 244 ------------------ 3 files changed, 16 insertions(+), 356 deletions(-) delete mode 100644 performance-metrics/src/performance_metrics/function_timer.py delete mode 100644 performance-metrics/tests/performance_metrics/test_function_timer.py diff --git a/performance-metrics/src/performance_metrics/function_timer.py b/performance-metrics/src/performance_metrics/function_timer.py deleted file mode 100644 index d9c8f95922a..00000000000 --- a/performance-metrics/src/performance_metrics/function_timer.py +++ /dev/null @@ -1,87 +0,0 @@ -"""Module for measuring and storing execution durations of functions. - -Provides a `FunctionTimer` class that can be used as a decorator to measure -the execution time of both synchronous and asynchronous functions, utilizing high-resolution -performance counters for accuracy. -""" - -from time import perf_counter_ns, clock_gettime_ns, CLOCK_REALTIME -from types import TracebackType -from typing import ( - Type, - Tuple, -) -from performance_metrics.datashapes import RawDurationData -from contextlib import AbstractAsyncContextManager, AbstractContextManager - - -class FunctionTimer(AbstractAsyncContextManager, AbstractContextManager): # type: ignore - """A decorator class for measuring and storing the execution duration of functions. - - It supports both synchronous and asynchronous functions. - """ - - def __init__(self) -> None: - self._func_start_time: int | None = None - self._duration_start_time: int | None = None - self._duration_end_time: int | None = None - - def _begin_timing(self) -> Tuple[int, int]: - """Starts the timing process, capturing both the current real-time and a high-resolution performance counter. - - Returns: - A tuple containing the current real-time (`clock_gettime_ns(CLOCK_REALTIME)`) and an initial performance counter (`perf_counter_ns()`). Both values are measured in nanoseconds. - """ - return clock_gettime_ns(CLOCK_REALTIME), perf_counter_ns() - - def _end_timing(self) -> int: - """Ends the timing process, capturing the final high-resolution performance counter. - - Returns: - The final performance counter, measured in nanoseconds. - """ - return perf_counter_ns() - - def __enter__(self) -> "FunctionTimer": - """Set the start time of the function.""" - self._func_start_time, self._duration_start_time = self._begin_timing() - return self - - def __exit__( - self, - exc_type: Type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - """Set the end time of the function.""" - self._duration_end_time = self._end_timing() - - async def __aenter__(self) -> "FunctionTimer": - """Set the start time of the function.""" - self._func_start_time, self._duration_start_time = self._begin_timing() - return self - - async def __aexit__( - self, - exc_type: Type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - """Set the end time of the function.""" - self._duration_end_time = self._end_timing() - - def get_data(self) -> RawDurationData: - """Returns the duration data for the function. - - Returns: - RawDurationData: The duration data for the function. - """ - assert self._func_start_time is not None - assert self._duration_start_time is not None - assert self._duration_end_time is not None - - return RawDurationData( - func_start=self._func_start_time, - duration_start=self._duration_start_time, - duration_end=self._duration_end_time, - ) diff --git a/performance-metrics/src/performance_metrics/robot_context_tracker.py b/performance-metrics/src/performance_metrics/robot_context_tracker.py index 23c5b434dff..bb2cf01f97b 100644 --- a/performance-metrics/src/performance_metrics/robot_context_tracker.py +++ b/performance-metrics/src/performance_metrics/robot_context_tracker.py @@ -1,14 +1,13 @@ """Module for tracking robot context and execution duration for different operations.""" from functools import wraps +from time import perf_counter_ns, clock_gettime_ns, CLOCK_REALTIME from typing import Callable, TypeVar, List from typing_extensions import ParamSpec from performance_metrics.datashapes import ( RawContextData, RobotContextState, - RawDurationData, ) -from performance_metrics.function_timer import FunctionTimer P = ParamSpec("P") R = TypeVar("R") @@ -22,24 +21,6 @@ def __init__(self, should_track: bool = False) -> None: self._storage: List[RawContextData] = [] self._should_track = should_track - def _store( - self, state: RobotContextState, raw_duration_data: RawDurationData - ) -> None: - """Stores the context and duration data for a robot operation. - - Args: - state : The state of the robot during the operation. - raw_duration_data : The duration data for the operation. - """ - self._storage.append( - RawContextData( - func_start=raw_duration_data.func_start, - duration_start=raw_duration_data.duration_start, - duration_end=raw_duration_data.duration_end, - state=state, - ) - ) - def track(self, state: RobotContextState) -> Callable: # type: ignore """Decorator factory for tracking the execution duration and state of robot operations. @@ -51,15 +32,25 @@ def track(self, state: RobotContextState) -> Callable: # type: ignore """ def inner_decorator(func: Callable[P, R]) -> Callable[P, R]: + if not self._should_track: + return func + @wraps(func) def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: - if not self._should_track: - return func(*args, **kwargs) + function_start_time = clock_gettime_ns(CLOCK_REALTIME) + duration_start_time = perf_counter_ns() try: - with FunctionTimer() as timer: - result = func(*args, **kwargs) + result = func(*args, **kwargs) finally: - self._store(state, timer.get_data()) + duration_end_time = perf_counter_ns() + self._storage.append( + RawContextData( + function_start_time, + duration_start_time, + duration_end_time, + state, + ) + ) return result return wrapper diff --git a/performance-metrics/tests/performance_metrics/test_function_timer.py b/performance-metrics/tests/performance_metrics/test_function_timer.py deleted file mode 100644 index 8a42c1ac203..00000000000 --- a/performance-metrics/tests/performance_metrics/test_function_timer.py +++ /dev/null @@ -1,244 +0,0 @@ -"""This module contains tests for timing the execution of synchronous and asynchronous functions using the FunctionTimer class. - -It includes functions and their variants that raise exceptions to simulate errors during execution. Each test function is designed -to ensure the FunctionTimer accurately measures execution times and handles exceptions correctly for both synchronous and asynchronous -calls. This serves as a comprehensive suite to validate the functionality of FunctionTimer in various scenarios. -""" -import time -import asyncio -import pytest -from performance_metrics.function_timer import FunctionTimer - - -def synchronous_function() -> None: - """Prints a message indicating a synchronous function is running.""" - print("synchronous_function") - - -def synchronous_function_with_exception() -> None: - """Prints a message then raises an exception to simulate error in synchronous execution.""" - print("synchronous_function_with_exception") - raise Exception("An exception") - - -async def asynchronous_function() -> None: - """Prints a message indicating an asynchronous function is running.""" - print("asynchronous_function") - - -async def asynchronous_function_with_exception() -> None: - """Prints a message then raises an exception to simulate error in asynchronous execution.""" - print("asynchronous_function_with_exception") - raise Exception("An exception") - - -async def long_running_task() -> None: - """Simulates a longer running asynchronous task.""" - await asyncio.sleep(2) - - -async def short_running_task() -> None: - """Simulates a shorter running asynchronous task.""" - await asyncio.sleep(0.5) - - -################## -# TEST FUNCTIONS # -################## - - -def test_synchronous_function() -> None: - """Tests that the synchronous function is timed correctly by FunctionTimer.""" - with FunctionTimer() as timer: - synchronous_function() - - duration_data = timer.get_data() - assert duration_data.duration_start < duration_data.duration_end - - -def test_synchronous_function_with_exception() -> None: - """Tests that FunctionTimer can handle exceptions in synchronous functions correctly.""" - with pytest.raises(Exception): - with FunctionTimer() as timer: - synchronous_function_with_exception() - - duration_data = timer.get_data() - assert duration_data.duration_start < duration_data.duration_end - - -async def test_asynchronous_function() -> None: - """Tests that the asynchronous function is timed correctly by FunctionTimer.""" - async with FunctionTimer() as timer: - await asynchronous_function() - - duration_data = timer.get_data() - assert duration_data.duration_start < duration_data.duration_end - - -async def test_asynchronous_function_with_exception() -> None: - """Tests that FunctionTimer can handle exceptions in asynchronous functions correctly.""" - with pytest.raises(Exception): - async with FunctionTimer() as timer: - await asynchronous_function_with_exception() - - duration_data = timer.get_data() - assert duration_data.duration_start < duration_data.duration_end - - -async def test_asynchronous_and_synchronous_function() -> None: - """Tests the timing of a mixed sequence of synchronous and asynchronous functions with FunctionTimer.""" - async with FunctionTimer() as timer: - synchronous_function() - await asynchronous_function() - - duration_data = timer.get_data() - assert duration_data.duration_start < duration_data.duration_end - - -async def test_synchronous_and_asynchronous_function_with_exception() -> None: - """Tests that FunctionTimer can handle a mixed sequence of functions, including an exception, correctly.""" - with pytest.raises(Exception): - async with FunctionTimer() as timer: - synchronous_function_with_exception() - await asynchronous_function() - - duration_data = timer.get_data() - assert duration_data.duration_start < duration_data.duration_end - - -async def test_nested_synchronous_functions() -> None: - """Tests that the FunctionTimer correctly times nested synchronous functions.""" - with FunctionTimer() as outer_timer: - synchronous_function() - with FunctionTimer() as inner_timer: - synchronous_function() - - outer_duration_data = outer_timer.get_data() - inner_duration_data = inner_timer.get_data() - - assert outer_duration_data.duration_start < outer_duration_data.duration_end - assert inner_duration_data.duration_start < inner_duration_data.duration_end - assert outer_duration_data.duration_start < inner_duration_data.duration_start - assert outer_duration_data.duration_end >= inner_duration_data.duration_end - - -async def test_timing_sychronous_function_nested_inside_async_function() -> None: - """Tests that the FunctionTimer correctly times a synchronous function inside an asynchronous context manager.""" - async with FunctionTimer() as async_timer: - await asynchronous_function() - with FunctionTimer() as sync_timer: - synchronous_function() - - async_duration_data = async_timer.get_data() - sync_duration_data = sync_timer.get_data() - - assert async_duration_data.duration_start < async_duration_data.duration_end - assert sync_duration_data.duration_start < sync_duration_data.duration_end - assert async_duration_data.duration_start < sync_duration_data.duration_start - assert async_duration_data.duration_end >= sync_duration_data.duration_end - - -def test_instantaneous_function() -> None: - """Tests that the FunctionTimer can measure the time of an almost instantaneous function.""" - - def instantaneous_function() -> None: - """A function that executes almost instantaneously.""" - pass - - with FunctionTimer() as timer: - instantaneous_function() - - duration_data = timer.get_data() - assert duration_data.duration_start <= duration_data.duration_end - - -def test_known_duration_function() -> None: - """Tests the FunctionTimer's accuracy by comparing with a known sleep duration.""" - sleep_duration = 0.5 - - def known_duration_function() -> None: - time.sleep(sleep_duration) - - with FunctionTimer() as timer: - known_duration_function() - - duration_data = timer.get_data() - measured_duration_nanoseconds = abs( - duration_data.duration_start - duration_data.duration_end - ) - measure_duration_seconds = measured_duration_nanoseconds / 1_000_000_000 - assert abs(measure_duration_seconds - sleep_duration) < 0.05 - - -async def test_async_functions_in_parallel() -> None: - """Tests timing of multiple asynchronous functions executed in parallel.""" - - async def async_sleep_function(duration: float) -> None: - await asyncio.sleep(duration) - - async with FunctionTimer() as timer: - await asyncio.gather( - async_sleep_function(0.5), - async_sleep_function(1), - async_sleep_function(1.5), - ) - - duration_data = timer.get_data() - assert duration_data.duration_start < duration_data.duration_end - - -async def test_function_timer_with_async_contexts() -> None: - """Tests that the FunctionTimer context manager correctly times overlapping asynchronous tasks.""" - # 1. Start long_running_task - # 2. __aenter__ will be called on long_running_task - # 3. Start short_running_task - # 4. __aenter__ will be called on short_running_task - # 5. Finish short_running_task - # 6. __aexit__ will be called on short_running_task - # 7. Finish long_running_task - # 8. __aexit__ will be called on long_running_task - - async with FunctionTimer() as f1_timer: - await long_running_task() - - async with FunctionTimer() as f2_timer: - await short_running_task() - - f1_duration_data = f1_timer.get_data() - f2_duration_data = f2_timer.get_data() - - assert f1_duration_data.duration_start < f1_duration_data.duration_end - assert f2_duration_data.duration_start < f2_duration_data.duration_end - assert f1_duration_data.duration_start < f2_duration_data.duration_start - assert f1_duration_data.duration_end >= f2_duration_data.duration_end - - -def test_direct_use_without_context_manager() -> None: - """Tests the behavior of FunctionTimer when used directly without a context manager block. - - Verifies that the start and end times are not set and that an appropriate assertion is raised when attempting to access them. - """ - timer = FunctionTimer() - assert ( - timer._func_start_time is None - ), "_func_start_time should be None when not used within a context manager" - assert ( - timer._duration_start_time is None - ), "_duration_start_time should be None when not used within a context manager" - assert ( - timer._duration_end_time is None - ), "_duration_end_time should be None when not used within a context manager" - - with pytest.raises(AssertionError): - timer.get_data() - - -def test_calling_get_data_before_context_manager_finishes() -> None: - """Tests that attempting to call get_data before the context manager has properly finished (exited) results in an assertion error. - - This simulates the scenario where get_data is called prematurely, ensuring the timer enforces correct usage patterns. - """ - with pytest.raises(AssertionError): - with FunctionTimer() as timer: - synchronous_function() - timer.get_data() From 73c819757dd8b8125e60ac67f47d468a45dee538 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Fri, 12 Apr 2024 07:41:18 -0700 Subject: [PATCH 21/23] chore: use double ended queue instead of a list --- .../src/performance_metrics/robot_context_tracker.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/performance-metrics/src/performance_metrics/robot_context_tracker.py b/performance-metrics/src/performance_metrics/robot_context_tracker.py index bb2cf01f97b..8cb68384fcd 100644 --- a/performance-metrics/src/performance_metrics/robot_context_tracker.py +++ b/performance-metrics/src/performance_metrics/robot_context_tracker.py @@ -2,8 +2,9 @@ from functools import wraps from time import perf_counter_ns, clock_gettime_ns, CLOCK_REALTIME -from typing import Callable, TypeVar, List +from typing import Callable, TypeVar from typing_extensions import ParamSpec +from collections import deque from performance_metrics.datashapes import ( RawContextData, RobotContextState, @@ -18,7 +19,7 @@ class RobotContextTracker: def __init__(self, should_track: bool = False) -> None: """Initializes the RobotContextTracker with an empty storage list.""" - self._storage: List[RawContextData] = [] + self._storage: deque[RawContextData] = deque() self._should_track = should_track def track(self, state: RobotContextState) -> Callable: # type: ignore From 023d3a58e6a347d854f5aa44e47156a1a5ec6c27 Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Fri, 12 Apr 2024 07:52:21 -0700 Subject: [PATCH 22/23] chore(performance-metrics): add CI testing and linting (#14877) # Overview Add CI testing and lint checks --- .../performance-metrics-test-lint.yaml | 54 +++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 .github/workflows/performance-metrics-test-lint.yaml diff --git a/.github/workflows/performance-metrics-test-lint.yaml b/.github/workflows/performance-metrics-test-lint.yaml new file mode 100644 index 00000000000..e57df828caf --- /dev/null +++ b/.github/workflows/performance-metrics-test-lint.yaml @@ -0,0 +1,54 @@ +# This workflow runs lint on pull requests that touch anything in the performance-metrics directory + +name: 'performance-metrics test & lint' + +on: + pull_request: + paths: + - 'performance-metrics/**' + - '.github/workflows/performance-metrics-test-lint.yaml' + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +defaults: + run: + shell: bash + +jobs: + lint: + name: 'performance-metrics test & lint' + timeout-minutes: 5 + runs-on: 'ubuntu-latest' + steps: + - name: Checkout opentrons repo + uses: 'actions/checkout@v4' + + - name: Setup Python + uses: 'actions/setup-python@v5' + with: + python-version: '3.10' + cache: 'pipenv' + cache-dependency-path: performance-metrics/Pipfile.lock + + - name: "Install Python deps" + uses: './.github/actions/python/setup' + with: + project: 'performance-metrics' + + - name: Setup + id: install + working-directory: ./performance-metrics + run: make setup + + - name: Test + if: always() && steps.install.outcome == 'success' || steps.install.outcome == 'skipped' + working-directory: ./performance-metrics + run: make test + + - name: Lint + if: always() && steps.install.outcome == 'success' || steps.install.outcome == 'skipped' + working-directory: ./performance-metrics + run: make lint From 40092d97ae0f0aba622dc7531881549007268a0b Mon Sep 17 00:00:00 2001 From: Derek Maggio Date: Fri, 12 Apr 2024 07:57:40 -0700 Subject: [PATCH 23/23] feat(performance-metrics): store to file (#14882) # Overview Add functionality to store CSV data in a file # Test Plan - Added test cases to verify storing to a file correctly # Changelog - Added `store` method to RobotContextTracker - Added data shaping to RawContextData - Tests # Review requests None # Risk assessment Low --- .../src/performance_metrics/datashapes.py | 29 ++++++++------ .../robot_context_tracker.py | 18 ++++++++- .../test_robot_context_tracker.py | 39 +++++++++++++++++-- 3 files changed, 69 insertions(+), 17 deletions(-) diff --git a/performance-metrics/src/performance_metrics/datashapes.py b/performance-metrics/src/performance_metrics/datashapes.py index f8c62bb39b2..81b0234a723 100644 --- a/performance-metrics/src/performance_metrics/datashapes.py +++ b/performance-metrics/src/performance_metrics/datashapes.py @@ -2,6 +2,7 @@ from enum import Enum import dataclasses +from typing import Tuple class RobotContextState(Enum): @@ -37,26 +38,30 @@ def from_id(cls, state_id: int) -> "RobotContextState": @dataclasses.dataclass(frozen=True) -class RawDurationData: - """Represents raw duration data for a process or function. +class RawContextData: + """Represents raw duration data with context state information. Attributes: - function_start_time (int): The start time of the function. - duration_measurement_start_time (int): The start time for duration measurement. - duration_measurement_end_time (int): The end time for duration measurement. + - state (RobotContextStates): The current state of the context. """ func_start: int duration_start: int duration_end: int - - -@dataclasses.dataclass(frozen=True) -class RawContextData(RawDurationData): - """Extends RawDurationData with context state information. - - Attributes: - - state (RobotContextStates): The current state of the context. - """ - state: RobotContextState + + @classmethod + def headers(self) -> Tuple[str, str, str]: + """Returns the headers for the raw context data.""" + return ("state_id", "function_start_time", "duration") + + def csv_row(self) -> Tuple[int, int, int]: + """Returns the raw context data as a string.""" + return ( + self.state.state_id, + self.func_start, + self.duration_end - self.duration_start, + ) diff --git a/performance-metrics/src/performance_metrics/robot_context_tracker.py b/performance-metrics/src/performance_metrics/robot_context_tracker.py index 8cb68384fcd..188129046ff 100644 --- a/performance-metrics/src/performance_metrics/robot_context_tracker.py +++ b/performance-metrics/src/performance_metrics/robot_context_tracker.py @@ -1,5 +1,9 @@ """Module for tracking robot context and execution duration for different operations.""" +import csv +from pathlib import Path +import os + from functools import wraps from time import perf_counter_ns, clock_gettime_ns, CLOCK_REALTIME from typing import Callable, TypeVar @@ -17,9 +21,10 @@ class RobotContextTracker: """Tracks and stores robot context and execution duration for different operations.""" - def __init__(self, should_track: bool = False) -> None: + def __init__(self, storage_file_path: Path, should_track: bool = False) -> None: """Initializes the RobotContextTracker with an empty storage list.""" self._storage: deque[RawContextData] = deque() + self._storage_file_path = storage_file_path self._should_track = should_track def track(self, state: RobotContextState) -> Callable: # type: ignore @@ -57,3 +62,14 @@ def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: return wrapper return inner_decorator + + def store(self) -> None: + """Returns the stored context data and clears the storage list.""" + stored_data = self._storage.copy() + self._storage.clear() + rows_to_write = [context_data.csv_row() for context_data in stored_data] + os.makedirs(self._storage_file_path.parent, exist_ok=True) + with open(self._storage_file_path, "a") as storage_file: + writer = csv.writer(storage_file) + writer.writerow(RawContextData.headers()) + writer.writerows(rows_to_write) diff --git a/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py b/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py index 0a145a480b6..d78d5054fe6 100644 --- a/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py +++ b/performance-metrics/tests/performance_metrics/test_robot_context_tracker.py @@ -1,6 +1,7 @@ """Tests for the RobotContextTracker class in performance_metrics.robot_context_tracker.""" import asyncio +from pathlib import Path import pytest from performance_metrics.robot_context_tracker import RobotContextTracker from performance_metrics.datashapes import RobotContextState @@ -15,9 +16,9 @@ @pytest.fixture -def robot_context_tracker() -> RobotContextTracker: +def robot_context_tracker(tmp_path: Path) -> RobotContextTracker: """Fixture to provide a fresh instance of RobotContextTracker for each test.""" - return RobotContextTracker(should_track=True) + return RobotContextTracker(storage_file_path=tmp_path, should_track=True) def test_robot_context_tracker(robot_context_tracker: RobotContextTracker) -> None: @@ -205,9 +206,9 @@ async def second_async_calibrating() -> None: ), "All tracked operations should be in CALIBRATING state." -def test_no_tracking() -> None: +def test_no_tracking(tmp_path: Path) -> None: """Tests that operations are not tracked when tracking is disabled.""" - robot_context_tracker = RobotContextTracker(should_track=False) + robot_context_tracker = RobotContextTracker(tmp_path, should_track=False) @robot_context_tracker.track(state=RobotContextState.STARTING_UP) def operation_without_tracking() -> None: @@ -218,3 +219,33 @@ def operation_without_tracking() -> None: assert ( len(robot_context_tracker._storage) == 0 ), "Operation should not be tracked when tracking is disabled." + + +async def test_storing_to_file(tmp_path: Path) -> None: + """Tests storing the tracked data to a file.""" + file_path = tmp_path / "test_file.csv" + robot_context_tracker = RobotContextTracker(file_path, should_track=True) + + @robot_context_tracker.track(state=RobotContextState.STARTING_UP) + def starting_robot() -> None: + sleep(STARTING_TIME) + + @robot_context_tracker.track(state=RobotContextState.CALIBRATING) + def calibrating_robot() -> None: + sleep(CALIBRATING_TIME) + + @robot_context_tracker.track(state=RobotContextState.ANALYZING_PROTOCOL) + def analyzing_protocol() -> None: + sleep(ANALYZING_TIME) + + starting_robot() + calibrating_robot() + analyzing_protocol() + + robot_context_tracker.store() + + with open(file_path, "r") as file: + lines = file.readlines() + assert ( + len(lines) == 4 + ), "All stored data + header should be written to the file."