From 5160d11dc8a91e6d54560c0d4df2dd279168b217 Mon Sep 17 00:00:00 2001 From: Dustin Kaiser Date: Wed, 18 Sep 2024 16:11:58 +0200 Subject: [PATCH 001/104] Fix aiohttp server autoinstrumentation with a hack --- .../src/servicelib/aiohttp/tracing.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/packages/service-library/src/servicelib/aiohttp/tracing.py b/packages/service-library/src/servicelib/aiohttp/tracing.py index 686112eaedd..b8394618be2 100644 --- a/packages/service-library/src/servicelib/aiohttp/tracing.py +++ b/packages/service-library/src/servicelib/aiohttp/tracing.py @@ -9,7 +9,10 @@ OTLPSpanExporter as OTLPSpanExporterHTTP, ) from opentelemetry.instrumentation.aiohttp_client import AioHttpClientInstrumentor -from opentelemetry.instrumentation.aiohttp_server import AioHttpServerInstrumentor +from opentelemetry.instrumentation.aiohttp_server import ( + AioHttpServerInstrumentor, + middleware, +) from opentelemetry.instrumentation.aiopg import AiopgInstrumentor from opentelemetry.instrumentation.requests import RequestsInstrumentor from opentelemetry.sdk.resources import Resource @@ -21,7 +24,7 @@ def setup_tracing( - app: web.Application, # pylint: disable=unused-argument + app: web.Application, tracing_settings: TracingSettings, service_name: str, instrument_aiopg: bool = False, # noqa: FBT001, FBT002 @@ -37,9 +40,8 @@ def setup_tracing( log.warning("Skipping opentelemetry tracing setup") return if not opentelemetry_collector_endpoint or not opentelemetry_collector_port: - raise RuntimeError( - f"Variable opentelemetry_collector_endpoint [{tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT}] or opentelemetry_collector_port [{tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_PORT}] unset. Tracing options incomplete." - ) + msg = f"Variable opentelemetry_collector_endpoint [{tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT}] or opentelemetry_collector_port [{tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_PORT}] unset. Tracing options incomplete." + raise RuntimeError(msg) resource = Resource(attributes={"service.name": service_name}) trace.set_tracer_provider(TracerProvider(resource=resource)) tracer_provider: trace.TracerProvider = trace.get_tracer_provider() @@ -61,6 +63,7 @@ def setup_tracing( tracer_provider.add_span_processor(BatchSpanProcessor(otlp_exporter)) # type: ignore[attr-defined] # https://github.com/open-telemetry/opentelemetry-python/issues/3713 # Instrument aiohttp server and client AioHttpServerInstrumentor().instrument() + app.middlewares.append(middleware) AioHttpClientInstrumentor().instrument() if instrument_aiopg: AiopgInstrumentor().instrument() From 61366a3844465713454ec746ddc8e26d0cf9076e Mon Sep 17 00:00:00 2001 From: Dustin Kaiser Date: Wed, 2 Oct 2024 08:28:41 +0200 Subject: [PATCH 002/104] minor: fix typo --- services/datcore-adapter/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/datcore-adapter/Dockerfile b/services/datcore-adapter/Dockerfile index fc4c2fcd403..af314327054 100644 --- a/services/datcore-adapter/Dockerfile +++ b/services/datcore-adapter/Dockerfile @@ -4,7 +4,7 @@ FROM python:${PYTHON_VERSION}-slim-bookworm as base # # USAGE: -# cd sercices/datcore-adapter +# cd services/datcore-adapter # docker build -f Dockerfile -t datcore-adapter:prod --target production ../../ # docker run datcore-adapter:prod # From d1a70da6638ca0386d082a224cb073fa2c817e78 Mon Sep 17 00:00:00 2001 From: Andrei Neagu <5694077+GitHK@users.noreply.github.com> Date: Thu, 19 Sep 2024 10:10:04 +0200 Subject: [PATCH 003/104] =?UTF-8?q?=F0=9F=8E=A8=20backend=20pulls=20inputs?= =?UTF-8?q?=20before=20iframe=20is=20displayed=20(#6392)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrei Neagu --- .../dynamic_sidecar/scheduler/_core/_events_user_services.py | 3 +++ .../client/source/class/osparc/data/model/IframeHandler.js | 4 +++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py index d9c9815b8d5..f708c1cb22c 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py @@ -223,6 +223,9 @@ async def progress_create_containers( scheduler_data.dynamic_sidecar.were_containers_created = True + # NOTE: user services are already in running state, meaning it is safe to pull inputs + await sidecars_client.pull_service_input_ports(dynamic_sidecar_endpoint) + start_duration = ( scheduler_data.dynamic_sidecar.instrumentation.elapsed_since_start_request() ) diff --git a/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js b/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js index 57f42a800df..3260b28bc26 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js +++ b/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js @@ -364,7 +364,9 @@ qx.Class.define("osparc.data.model.IframeHandler", { }; node.fireDataEvent("showInLogger", msgData); this.__restartIFrame(); - node.callRetrieveInputs(); + if (!node.isDynamicV2()) { + node.callRetrieveInputs(); + } }, __restartIFrame: function() { From 77e54cef3d68f04f968dc4468dd896509ba304d0 Mon Sep 17 00:00:00 2001 From: Dustin Kaiser <8209087+mrnicegyu11@users.noreply.github.com> Date: Thu, 19 Sep 2024 11:17:07 +0200 Subject: [PATCH 004/104] =?UTF-8?q?=F0=9F=9A=A8=F0=9F=90=9B=20Mitigate=20d?= =?UTF-8?q?y-service=20file=20upload=20bug:=20Increase=20traefik=20timeout?= =?UTF-8?q?s=20(#6395)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Dustin Kaiser --- services/docker-compose.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/services/docker-compose.yml b/services/docker-compose.yml index eea7ae1726b..60ce2c26b17 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -1192,8 +1192,14 @@ services: - "--metrics.prometheus.entryPoint=metrics" - "--entryPoints.http.address=:80" - "--entryPoints.http.forwardedHeaders.insecure" + - "--entryPoints.http.transport.respondingTimeouts.idleTimeout=21600s" #6h, for https://github.com/traefik/traefik/issues/10805 + - "--entryPoints.http.transport.respondingTimeouts.writeTimeout=21600s" #6h, for https://github.com/traefik/traefik/issues/10805 + - "--entryPoints.http.transport.respondingTimeouts.readTimeout=21600s" #6h, for https://github.com/traefik/traefik/issues/10805 - "--entryPoints.simcore_api.address=:10081" - "--entryPoints.simcore_api.address=:10081" - "--entryPoints.simcore_api.forwardedHeaders.insecure" + - "--entryPoints.simcore_api.transport.respondingTimeouts.idleTimeout=21600s" #6h, for https://github.com/traefik/traefik/issues/10805 + - "--entryPoints.simcore_api.transport.respondingTimeouts.writeTimeout=21600s" #6h, for https://github.com/traefik/traefik/issues/10805 + - "--entryPoints.simcore_api.transport.respondingTimeouts.readTimeout=21600s" #6h, for https://github.com/traefik/traefik/issues/10805 - "--entryPoints.traefik_monitor.address=:8080" - "--entryPoints.traefik_monitor.forwardedHeaders.insecure" - "--providers.swarm.endpoint=unix:///var/run/docker.sock" @@ -1206,7 +1212,6 @@ services: - "--tracing.addinternals" - "--tracing.otlp=true" - "--tracing.otlp.http=true" - # - "--tracing.otlp.http.endpoint=0.0.0.0:4318/v1/traces" volumes: # So that Traefik can listen to the Docker events - /var/run/docker.sock:/var/run/docker.sock From b44eb753c50096baa22c19935a0c4a2fb4849ce1 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Thu, 19 Sep 2024 14:56:15 +0200 Subject: [PATCH 005/104] =?UTF-8?q?=E2=9C=A8=20[Frontend]=20Force=20reload?= =?UTF-8?q?=20``no-cache``=20(#6397)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source/class/osparc/NewUITracker.js | 8 +++++-- .../class/osparc/ui/message/FlashMessage.js | 19 +++++++++++------ .../client/source/class/osparc/utils/Utils.js | 21 +++++++++++++++++++ 3 files changed, 40 insertions(+), 8 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/NewUITracker.js b/services/static-webserver/client/source/class/osparc/NewUITracker.js index 842e95fdb1d..04a19536128 100644 --- a/services/static-webserver/client/source/class/osparc/NewUITracker.js +++ b/services/static-webserver/client/source/class/osparc/NewUITracker.js @@ -33,9 +33,13 @@ qx.Class.define("osparc.NewUITracker", { let msg = ""; msg += qx.locale.Manager.tr("A new version of the application is now available."); msg += "
"; - msg += qx.locale.Manager.tr("Reload to get the latest features."); + msg += qx.locale.Manager.tr("Click the Reload button to get the latest features."); // permanent message - osparc.FlashMessenger.getInstance().logAs(msg, "INFO", 0); + const flashMessage = osparc.FlashMessenger.getInstance().logAs(msg, "INFO", 0).set({ + maxWidth: 500 + }); + const reloadButton = osparc.utils.Utils.reloadNoCacheButton(); + flashMessage.addWidget(reloadButton); this.stopTracker(); } }; diff --git a/services/static-webserver/client/source/class/osparc/ui/message/FlashMessage.js b/services/static-webserver/client/source/class/osparc/ui/message/FlashMessage.js index a16ec4deb7f..7169b47b935 100644 --- a/services/static-webserver/client/source/class/osparc/ui/message/FlashMessage.js +++ b/services/static-webserver/client/source/class/osparc/ui/message/FlashMessage.js @@ -31,7 +31,7 @@ qx.Class.define("osparc.ui.message.FlashMessage", { */ construct: function(message, level, duration) { this.base(arguments); - this._setLayout(new qx.ui.layout.HBox(15)); + this._setLayout(new qx.ui.layout.VBox(15)); this.set({ padding: 18, @@ -112,15 +112,18 @@ qx.Class.define("osparc.ui.message.FlashMessage", { }, members: { - __closeCb: null, _createChildControlImpl: function(id) { let control; switch (id) { + case "message-layout": + control = new qx.ui.container.Composite(new qx.ui.layout.HBox(15)); + this._add(control); + break; case "badge": control = new qx.ui.basic.Image().set({ alignY: "middle" }); - this._add(control); + this.getChildControl("message-layout").add(control); break; case "message": control = new qx.ui.basic.Label().set({ @@ -128,7 +131,7 @@ qx.Class.define("osparc.ui.message.FlashMessage", { selectable: true, rich: true }); - this._add(control, { + this.getChildControl("message-layout").add(control, { flex: 1 }); break; @@ -136,7 +139,7 @@ qx.Class.define("osparc.ui.message.FlashMessage", { control = new osparc.ui.basic.IconButton("@MaterialIcons/close/16", () => this.fireEvent("closeMessage")).set({ alignY: "middle" }); - this._add(control); + this.getChildControl("message-layout").add(control); break; } return control || this.base(arguments, id); @@ -147,6 +150,10 @@ qx.Class.define("osparc.ui.message.FlashMessage", { if (label) { label.setValue(value); } - } + }, + + addWidget: function(widget) { + this._add(widget); + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/utils/Utils.js b/services/static-webserver/client/source/class/osparc/utils/Utils.js index 4474ed9e16c..23947464d6d 100644 --- a/services/static-webserver/client/source/class/osparc/utils/Utils.js +++ b/services/static-webserver/client/source/class/osparc/utils/Utils.js @@ -249,6 +249,27 @@ qx.Class.define("osparc.utils.Utils", { // window.location.href = window.location.href.replace(/#.*$/, ""); }, + reloadNoCacheButton: function() { + const reloadButton = new qx.ui.form.Button().set({ + label: qx.locale.Manager.tr("Reload"), + icon: "@FontAwesome5Solid/redo/16", + font: "text-16", + gap: 10, + appearance: "strong-button", + allowGrowX: false, + center: true, + alignX: "center", + }); + reloadButton.addListener("execute", () => { + // this argument, which is passed and consumed by the boot.js init file, + // adds a `nocache=rand()` query argument to the js resource calls. + // This forces a hard reload + const noCacheUrl = window.location.href + "?qooxdoo:add-no-cache=true"; + window.location.href = noCacheUrl; + }); + return reloadButton; + }, + getUniqueStudyName: function(preferredName, list) { let title = preferredName; const existingTitles = list.map(study => study.name); From ed2db583574a64b315c657bc457cda85898f3834 Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Thu, 19 Sep 2024 23:11:36 +0200 Subject: [PATCH 006/104] =?UTF-8?q?=F0=9F=90=9B=E2=9A=97=EF=B8=8FPrometheu?= =?UTF-8?q?s=20instrumentation=20incorrectly=20setup=20(#6398)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../fastapi/prometheus_instrumentation.py | 26 ++++---- .../src/servicelib/fastapi/tracing.py | 25 +++---- .../src/servicelib/instrumentation.py | 11 ++++ services/agent/tests/unit/test_core_routes.py | 8 +-- .../modules/instrumentation/_core.py | 6 +- .../modules/instrumentation/_models.py | 42 ++++++++---- .../modules/instrumentation/_utils.py | 5 +- services/autoscaling/tests/unit/conftest.py | 8 ++- .../test_modules_instrumentation_utils.py | 5 +- .../modules/instrumentation/_models.py | 66 +++++++++++-------- 10 files changed, 126 insertions(+), 76 deletions(-) diff --git a/packages/service-library/src/servicelib/fastapi/prometheus_instrumentation.py b/packages/service-library/src/servicelib/fastapi/prometheus_instrumentation.py index 626d5559df7..847585c52fc 100644 --- a/packages/service-library/src/servicelib/fastapi/prometheus_instrumentation.py +++ b/packages/service-library/src/servicelib/fastapi/prometheus_instrumentation.py @@ -2,23 +2,27 @@ from fastapi import FastAPI +from prometheus_client import CollectorRegistry from prometheus_fastapi_instrumentator import Instrumentator def setup_prometheus_instrumentation(app: FastAPI) -> Instrumentator: + # NOTE: use that registry to prevent having a global one + app.state.prometheus_registry = registry = CollectorRegistry(auto_describe=True) + instrumentator = Instrumentator( + should_instrument_requests_inprogress=False, # bug in https://github.com/trallnag/prometheus-fastapi-instrumentator/issues/317 + inprogress_labels=False, + registry=registry, + ).instrument(app) - instrumentator = ( - Instrumentator( - should_instrument_requests_inprogress=True, inprogress_labels=False - ) - .instrument(app) - .expose(app, include_in_schema=False) - ) + async def _on_startup() -> None: + instrumentator.expose(app, include_in_schema=False) - def _unregister(): - for collector in list(instrumentator.registry._collector_to_names.keys()): - instrumentator.registry.unregister(collector) + def _unregister() -> None: + # NOTE: avoid registering collectors multiple times when running unittests consecutively (https://stackoverflow.com/a/62489287) + for collector in list(registry._collector_to_names.keys()): # noqa: SLF001 + registry.unregister(collector) - # avoid registering collectors multiple times when running unittests consecutively (https://stackoverflow.com/a/62489287) + app.add_event_handler("startup", _on_startup) app.add_event_handler("shutdown", _unregister) return instrumentator diff --git a/packages/service-library/src/servicelib/fastapi/tracing.py b/packages/service-library/src/servicelib/fastapi/tracing.py index ba1c7d9e565..e0f670686f5 100644 --- a/packages/service-library/src/servicelib/fastapi/tracing.py +++ b/packages/service-library/src/servicelib/fastapi/tracing.py @@ -1,6 +1,7 @@ """ Adds fastapi middleware for tracing using opentelemetry instrumentation. """ + import logging from fastapi import FastAPI @@ -8,9 +9,7 @@ from opentelemetry.exporter.otlp.proto.http.trace_exporter import ( OTLPSpanExporter as OTLPSpanExporterHTTP, ) -from opentelemetry.instrumentation.fastapi import ( - FastAPIInstrumentor, # pylint: disable=no-name-in-module -) +from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor from opentelemetry.sdk.resources import Resource from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor @@ -21,24 +20,19 @@ def setup_tracing( app: FastAPI, tracing_settings: TracingSettings, service_name: str -) -> FastAPIInstrumentor | None: +) -> None: if ( not tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT and not tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_PORT ): log.warning("Skipping opentelemetry tracing setup") - return None - if ( - not tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT - or not tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_PORT - ): - raise RuntimeError( - f"Variable opentelemetry_collector_endpoint [{tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT}] or opentelemetry_collector_port [{tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_PORT}] unset. Tracing options incomplete." - ) + return + # Set up the tracer provider resource = Resource(attributes={"service.name": service_name}) trace.set_tracer_provider(TracerProvider(resource=resource)) - tracer_provider = trace.get_tracer_provider() + global_tracer_provider = trace.get_tracer_provider() + assert isinstance(global_tracer_provider, TracerProvider) # nosec tracing_destination: str = f"{tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT}:{tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_PORT}/v1/traces" log.info( "Trying to connect service %s to tracing collector at %s.", @@ -48,7 +42,6 @@ def setup_tracing( # Configure OTLP exporter to send spans to the collector otlp_exporter = OTLPSpanExporterHTTP(endpoint=tracing_destination) span_processor = BatchSpanProcessor(otlp_exporter) - # Mypy bug --> https://github.com/open-telemetry/opentelemetry-python/issues/3713 - tracer_provider.add_span_processor(span_processor) # type: ignore[attr-defined] + global_tracer_provider.add_span_processor(span_processor) # Instrument FastAPI - return FastAPIInstrumentor().instrument_app(app) # type: ignore[no-any-return] + FastAPIInstrumentor().instrument_app(app) diff --git a/packages/service-library/src/servicelib/instrumentation.py b/packages/service-library/src/servicelib/instrumentation.py index d1fa57f66e4..002e1942853 100644 --- a/packages/service-library/src/servicelib/instrumentation.py +++ b/packages/service-library/src/servicelib/instrumentation.py @@ -1,2 +1,13 @@ +from dataclasses import dataclass + +from prometheus_client import CollectorRegistry + + +@dataclass(slots=True, kw_only=True) +class MetricsBase: + subsystem: str + registry: CollectorRegistry + + def get_metrics_namespace(application_name: str) -> str: return application_name.replace("-", "_") diff --git a/services/agent/tests/unit/test_core_routes.py b/services/agent/tests/unit/test_core_routes.py index 1fd0252d1aa..c20b8714757 100644 --- a/services/agent/tests/unit/test_core_routes.py +++ b/services/agent/tests/unit/test_core_routes.py @@ -29,7 +29,7 @@ def test_client(initialized_app: FastAPI) -> TestClient: def test_health_ok(env: None, test_client: TestClient): response = test_client.get("/health") assert response.status_code == status.HTTP_200_OK - assert response.json() == None + assert response.json() is None def test_health_fails_not_started( @@ -37,7 +37,7 @@ def test_health_fails_not_started( ): task_monitor: TaskMonitor = initialized_app.state.task_monitor # emulate monitor not being started - task_monitor._was_started = False + task_monitor._was_started = False # noqa: SLF001 response = test_client.get("/health") assert response.status_code == status.HTTP_503_SERVICE_UNAVAILABLE @@ -50,8 +50,8 @@ def test_health_fails_hanging_tasks( task_monitor: TaskMonitor = initialized_app.state.task_monitor # emulate tasks hanging - for task_data in task_monitor._to_start.values(): - task_data._start_time = time() - 1e6 + for task_data in task_monitor._to_start.values(): # noqa: SLF001 + task_data._start_time = time() - 1e6 # noqa: SLF001 response = test_client.get("/health") assert response.status_code == status.HTTP_503_SERVICE_UNAVAILABLE diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_core.py b/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_core.py index f7aaadbdc2a..e3bc20ef518 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_core.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_core.py @@ -22,8 +22,10 @@ async def on_startup() -> None: metrics_subsystem = ( "dynamic" if app_settings.AUTOSCALING_NODES_MONITORING else "computational" ) - app.state.instrumentation = AutoscalingInstrumentation( - registry=instrumentator.registry, subsystem=metrics_subsystem + app.state.instrumentation = ( + AutoscalingInstrumentation( # pylint: disable=unexpected-keyword-arg + registry=instrumentator.registry, subsystem=metrics_subsystem + ) ) async def on_shutdown() -> None: diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_models.py b/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_models.py index 056a77ea2a5..3831b33b826 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_models.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_models.py @@ -2,6 +2,7 @@ from typing import Final from prometheus_client import CollectorRegistry, Counter, Histogram +from servicelib.instrumentation import MetricsBase from ...models import BufferPoolManager, Cluster from ._constants import ( @@ -13,11 +14,6 @@ from ._utils import TrackedGauge, create_gauge -@dataclass(slots=True, kw_only=True) -class MetricsBase: - subsystem: str - - @dataclass(slots=True, kw_only=True) class ClusterMetrics(MetricsBase): # pylint: disable=too-many-instance-attributes active_nodes: TrackedGauge = field(init=False) @@ -36,7 +32,12 @@ def __post_init__(self) -> None: cluster_subsystem = f"{self.subsystem}_cluster" # Creating and assigning gauges using the field names and the metric definitions for field_name, definition in CLUSTER_METRICS_DEFINITIONS.items(): - gauge = create_gauge(field_name, definition, cluster_subsystem) + gauge = create_gauge( + field_name=field_name, + definition=definition, + subsystem=cluster_subsystem, + registry=self.registry, + ) setattr(self, field_name, gauge) def update_from_cluster(self, cluster: Cluster) -> None: @@ -65,6 +66,7 @@ def __post_init__(self) -> None: labelnames=EC2_INSTANCE_LABELS, namespace=METRICS_NAMESPACE, subsystem=self.subsystem, + registry=self.registry, ) self.started_instances = Counter( "started_instances_total", @@ -72,6 +74,7 @@ def __post_init__(self) -> None: labelnames=EC2_INSTANCE_LABELS, namespace=METRICS_NAMESPACE, subsystem=self.subsystem, + registry=self.registry, ) self.stopped_instances = Counter( "stopped_instances_total", @@ -79,6 +82,7 @@ def __post_init__(self) -> None: labelnames=EC2_INSTANCE_LABELS, namespace=METRICS_NAMESPACE, subsystem=self.subsystem, + registry=self.registry, ) self.terminated_instances = Counter( "terminated_instances_total", @@ -86,6 +90,7 @@ def __post_init__(self) -> None: labelnames=EC2_INSTANCE_LABELS, namespace=METRICS_NAMESPACE, subsystem=self.subsystem, + registry=self.registry, ) def instance_started(self, instance_type: str) -> None: @@ -123,7 +128,12 @@ def __post_init__(self) -> None: setattr( self, field_name, - create_gauge(field_name, definition, buffer_pools_subsystem), + create_gauge( + field_name=field_name, + definition=definition, + subsystem=buffer_pools_subsystem, + registry=self.registry, + ), ) self.instances_ready_to_pull_seconds = Histogram( "instances_ready_to_pull_duration_seconds", @@ -132,6 +142,7 @@ def __post_init__(self) -> None: namespace=METRICS_NAMESPACE, subsystem=buffer_pools_subsystem, buckets=(10, 20, 30, 40, 50, 60, 120), + registry=self.registry, ) self.instances_completed_pulling_seconds = Histogram( "instances_completed_pulling_duration_seconds", @@ -150,6 +161,7 @@ def __post_init__(self) -> None: 30 * _MINUTE, 40 * _MINUTE, ), + registry=self.registry, ) def update_from_buffer_pool_manager( @@ -174,8 +186,16 @@ class AutoscalingInstrumentation(MetricsBase): buffer_machines_pools_metrics: BufferPoolsMetrics = field(init=False) def __post_init__(self) -> None: - self.cluster_metrics = ClusterMetrics(subsystem=self.subsystem) - self.ec2_client_metrics = EC2ClientMetrics(subsystem=self.subsystem) - self.buffer_machines_pools_metrics = BufferPoolsMetrics( - subsystem=self.subsystem + self.cluster_metrics = ClusterMetrics( # pylint: disable=unexpected-keyword-arg + subsystem=self.subsystem, registry=self.registry + ) + self.ec2_client_metrics = ( + EC2ClientMetrics( # pylint: disable=unexpected-keyword-arg + subsystem=self.subsystem, registry=self.registry + ) + ) + self.buffer_machines_pools_metrics = ( + BufferPoolsMetrics( # pylint: disable=unexpected-keyword-arg + subsystem=self.subsystem, registry=self.registry + ) ) diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_utils.py b/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_utils.py index 2d991b71cc7..8f80b1f05e8 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_utils.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_utils.py @@ -3,7 +3,7 @@ from dataclasses import dataclass, field from aws_library.ec2._models import EC2InstanceData -from prometheus_client import Gauge +from prometheus_client import CollectorRegistry, Gauge from ._constants import METRICS_NAMESPACE @@ -27,9 +27,11 @@ def update_from_instances(self, instances: Iterable[EC2InstanceData]) -> None: def create_gauge( + *, field_name: str, definition: tuple[str, tuple[str, ...]], subsystem: str, + registry: CollectorRegistry, ) -> TrackedGauge: description, labelnames = definition return TrackedGauge( @@ -39,5 +41,6 @@ def create_gauge( labelnames=labelnames, namespace=METRICS_NAMESPACE, subsystem=subsystem, + registry=registry, ) ) diff --git a/services/autoscaling/tests/unit/conftest.py b/services/autoscaling/tests/unit/conftest.py index 97e709c2dba..b705ea85b78 100644 --- a/services/autoscaling/tests/unit/conftest.py +++ b/services/autoscaling/tests/unit/conftest.py @@ -378,11 +378,17 @@ def enabled_rabbitmq( return rabbit_service +_LIFESPAN_TIMEOUT: Final[int] = 10 + + @pytest.fixture async def initialized_app(app_environment: EnvVarsDict) -> AsyncIterator[FastAPI]: settings = ApplicationSettings.create_from_envs() app = create_app(settings) - async with LifespanManager(app): + # NOTE: the timeout is sometime too small for CI machines, and even larger machines + async with LifespanManager( + app, startup_timeout=_LIFESPAN_TIMEOUT, shutdown_timeout=_LIFESPAN_TIMEOUT + ): yield app diff --git a/services/autoscaling/tests/unit/test_modules_instrumentation_utils.py b/services/autoscaling/tests/unit/test_modules_instrumentation_utils.py index f72fa262a97..31a19701f8e 100644 --- a/services/autoscaling/tests/unit/test_modules_instrumentation_utils.py +++ b/services/autoscaling/tests/unit/test_modules_instrumentation_utils.py @@ -2,6 +2,7 @@ from typing import TypedDict from aws_library.ec2._models import EC2InstanceData +from prometheus_client import CollectorRegistry from prometheus_client.metrics import MetricWrapperBase from simcore_service_autoscaling.modules.instrumentation._constants import ( EC2_INSTANCE_LABELS, @@ -40,10 +41,12 @@ def test_update_gauge_sets_old_entries_to_0( fake_ec2_instance_data: Callable[..., EC2InstanceData] ): # Create a Gauge with example labels + registry = CollectorRegistry() tracked_gauge = create_gauge( - "example_gauge", + field_name="example_gauge", definition=("An example gauge", EC2_INSTANCE_LABELS), subsystem="whatever", + registry=registry, ) ec2_instance_type_1 = fake_ec2_instance_data() diff --git a/services/director-v2/src/simcore_service_director_v2/modules/instrumentation/_models.py b/services/director-v2/src/simcore_service_director_v2/modules/instrumentation/_models.py index 5a8f692a124..7407885af31 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/instrumentation/_models.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/instrumentation/_models.py @@ -3,12 +3,11 @@ from prometheus_client import CollectorRegistry, Histogram from pydantic import ByteSize, parse_obj_as -from servicelib.instrumentation import get_metrics_namespace +from servicelib.instrumentation import MetricsBase, get_metrics_namespace from ..._meta import PROJECT_NAME -_NAMESPACE_METRICS: Final[str] = get_metrics_namespace(PROJECT_NAME) -_SUBSYSTEM_NAME: Final[str] = "dynamic_services" +_METRICS_NAMESPACE: Final[str] = get_metrics_namespace(PROJECT_NAME) _INSTRUMENTATION_LABELS: Final[tuple[str, ...]] = ( "user_id", "wallet_id", @@ -31,7 +30,7 @@ ) -_BUCKETS_RATE_BPS: Final[tuple[float, ...]] = tuple( +_RATE_BPS_BUCKETS: Final[tuple[float, ...]] = tuple( parse_obj_as(ByteSize, f"{m}MiB") for m in ( 1, @@ -50,8 +49,7 @@ @dataclass(slots=True, kw_only=True) -class DynamiSidecarMetrics: - +class DynamiSidecarMetrics(MetricsBase): start_time_duration: Histogram = field(init=False) stop_time_duration: Histogram = field(init=False) pull_user_services_images_duration: Histogram = field(init=False) @@ -69,69 +67,79 @@ class DynamiSidecarMetrics: def __post_init__(self) -> None: self.start_time_duration = Histogram( "start_time_duration_seconds", - "time to start dynamic-sidecar", + "time to start dynamic service (from start request in dv-2 till service containers are in running state (healthy))", labelnames=_INSTRUMENTATION_LABELS, - namespace=_NAMESPACE_METRICS, + namespace=_METRICS_NAMESPACE, buckets=_BUCKETS_TIME_S, - subsystem=_SUBSYSTEM_NAME, + subsystem=self.subsystem, + registry=self.registry, ) self.stop_time_duration = Histogram( "stop_time_duration_seconds", - "time to stop dynamic-sidecar", + "time to stop dynamic service (from stop request in dv-2 till all allocated resources (services + dynamic-sidecar) are removed)", labelnames=_INSTRUMENTATION_LABELS, - namespace=_NAMESPACE_METRICS, + namespace=_METRICS_NAMESPACE, buckets=_BUCKETS_TIME_S, - subsystem=_SUBSYSTEM_NAME, + subsystem=self.subsystem, + registry=self.registry, ) self.pull_user_services_images_duration = Histogram( "pull_user_services_images_duration_seconds", "time to pull docker images", labelnames=_INSTRUMENTATION_LABELS, - namespace=_NAMESPACE_METRICS, - buckets=_BUCKETS_RATE_BPS, - subsystem=_SUBSYSTEM_NAME, + namespace=_METRICS_NAMESPACE, + buckets=_RATE_BPS_BUCKETS, + subsystem=self.subsystem, + registry=self.registry, ) self.output_ports_pull_rate = Histogram( "output_ports_pull_rate_bps", "rate at which output ports were pulled", labelnames=_INSTRUMENTATION_LABELS, - namespace=_NAMESPACE_METRICS, - buckets=_BUCKETS_RATE_BPS, - subsystem=_SUBSYSTEM_NAME, + namespace=_METRICS_NAMESPACE, + buckets=_RATE_BPS_BUCKETS, + subsystem=self.subsystem, + registry=self.registry, ) self.input_ports_pull_rate = Histogram( "input_ports_pull_rate_bps", "rate at which input ports were pulled", labelnames=_INSTRUMENTATION_LABELS, - namespace=_NAMESPACE_METRICS, - buckets=_BUCKETS_RATE_BPS, - subsystem=_SUBSYSTEM_NAME, + namespace=_METRICS_NAMESPACE, + buckets=_RATE_BPS_BUCKETS, + subsystem=self.subsystem, + registry=self.registry, ) self.pull_service_state_rate = Histogram( "pull_service_state_rate_bps", "rate at which service states were recovered", labelnames=_INSTRUMENTATION_LABELS, - namespace=_NAMESPACE_METRICS, - buckets=_BUCKETS_RATE_BPS, - subsystem=_SUBSYSTEM_NAME, + namespace=_METRICS_NAMESPACE, + buckets=_RATE_BPS_BUCKETS, + subsystem=self.subsystem, + registry=self.registry, ) self.push_service_state_rate = Histogram( "push_service_state_rate_bps", "rate at which service states were saved", labelnames=_INSTRUMENTATION_LABELS, - namespace=_NAMESPACE_METRICS, - buckets=_BUCKETS_RATE_BPS, - subsystem=_SUBSYSTEM_NAME, + namespace=_METRICS_NAMESPACE, + buckets=_RATE_BPS_BUCKETS, + subsystem=self.subsystem, + registry=self.registry, ) @dataclass(slots=True, kw_only=True) class DirectorV2Instrumentation: registry: CollectorRegistry - dynamic_sidecar_metrics: DynamiSidecarMetrics = field(init=False) def __post_init__(self) -> None: - self.dynamic_sidecar_metrics = DynamiSidecarMetrics() + self.dynamic_sidecar_metrics = ( + DynamiSidecarMetrics( # pylint: disable=unexpected-keyword-arg + subsystem="dynamic_services", registry=self.registry + ) + ) From 51edfa83f55d8ea4776bc1e022dbac066fd98422 Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Fri, 20 Sep 2024 16:05:06 +0200 Subject: [PATCH 007/104] =?UTF-8?q?=E2=9C=A8E2E:=20increase=20timeout=20fo?= =?UTF-8?q?r=20video=20streaming=20and=20improve=20a=20bit=20logs=20about?= =?UTF-8?q?=20websockets=20(#6384)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../pytest_simcore/helpers/logging_tools.py | 6 +- .../src/pytest_simcore/helpers/playwright.py | 55 +++++++++++-------- .../helpers/playwright_sim4life.py | 8 ++- tests/e2e-playwright/tests/conftest.py | 37 +++++++++---- .../tests/sim4life/test_sim4life.py | 14 +++-- .../tests/sim4life/test_template.py | 10 ++-- 6 files changed, 81 insertions(+), 49 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/logging_tools.py b/packages/pytest-simcore/src/pytest_simcore/helpers/logging_tools.py index 427117749aa..2bb29562d75 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/logging_tools.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/logging_tools.py @@ -133,14 +133,14 @@ def log_context( else: ctx_msg = msg - started_time = datetime.datetime.now(tz=datetime.timezone.utc) + started_time = datetime.datetime.now(tz=datetime.UTC) try: DynamicIndentFormatter.cls_increase_indent() logger.log(level, ctx_msg.starting, *args, **kwargs) with _increased_logger_indent(logger): yield SimpleNamespace(logger=logger, messages=ctx_msg) - elapsed_time = datetime.datetime.now(tz=datetime.timezone.utc) - started_time + elapsed_time = datetime.datetime.now(tz=datetime.UTC) - started_time done_message = ( f"{ctx_msg.done} ({_timedelta_as_minute_second_ms(elapsed_time)})" ) @@ -152,7 +152,7 @@ def log_context( ) except: - elapsed_time = datetime.datetime.now(tz=datetime.timezone.utc) - started_time + elapsed_time = datetime.datetime.now(tz=datetime.UTC) - started_time error_message = ( f"{ctx_msg.raised} ({_timedelta_as_minute_second_ms(elapsed_time)})" ) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/playwright.py b/packages/pytest-simcore/src/pytest_simcore/helpers/playwright.py index 6060b2d026f..0225642cc4f 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/playwright.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/playwright.py @@ -3,10 +3,10 @@ import logging import re from collections import defaultdict -from contextlib import ExitStack +from collections.abc import Generator, Iterator from dataclasses import dataclass, field from enum import Enum, unique -from typing import Any, Final, Generator +from typing import Any, Final from playwright.sync_api import FrameLocator, Page, Request, WebSocket from pytest_simcore.helpers.logging_tools import log_context @@ -263,28 +263,37 @@ def wait_for_pipeline_state( return current_state -def on_web_socket_default_handler(ws) -> None: - """Usage - - from pytest_simcore.playwright_utils import on_web_socket_default_handler - - page.on("websocket", on_web_socket_default_handler) - - """ - stack = ExitStack() - ctx = stack.enter_context( - log_context( - logging.INFO, - ( - f"WebSocket opened: {ws.url}", - "WebSocket closed", - ), - ) - ) +@contextlib.contextmanager +def web_socket_default_log_handler(web_socket: WebSocket) -> Iterator[None]: - ws.on("framesent", lambda payload: ctx.logger.info("⬇️ %s", payload)) - ws.on("framereceived", lambda payload: ctx.logger.info("⬆️ %s", payload)) - ws.on("close", lambda payload: stack.close()) # noqa: ARG005 + try: + with log_context( + logging.DEBUG, + msg="handle websocket message (set to --log-cli-level=DEBUG level if you wanna see all of them)", + ) as ctx: + + def on_framesent(payload: str | bytes) -> None: + ctx.logger.debug("⬇️ Frame sent: %s", payload) + + def on_framereceived(payload: str | bytes) -> None: + ctx.logger.debug("⬆️ Frame received: %s", payload) + + def on_close(payload: WebSocket) -> None: + ctx.logger.warning("⚠️ Websocket closed: %s", payload) + + def on_socketerror(error_msg: str) -> None: + ctx.logger.error("❌ Websocket error: %s", error_msg) + + web_socket.on("framesent", on_framesent) + web_socket.on("framereceived", on_framereceived) + web_socket.on("close", on_close) + web_socket.on("socketerror", on_socketerror) + yield + finally: + web_socket.remove_listener("framesent", on_framesent) + web_socket.remove_listener("framereceived", on_framereceived) + web_socket.remove_listener("close", on_close) + web_socket.remove_listener("socketerror", on_socketerror) def _node_started_predicate(request: Request) -> bool: diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/playwright_sim4life.py b/packages/pytest-simcore/src/pytest_simcore/helpers/playwright_sim4life.py index 58c00e69597..ddbd444c5f6 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/playwright_sim4life.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/playwright_sim4life.py @@ -6,6 +6,8 @@ import arrow from playwright.sync_api import FrameLocator, Page, WebSocket, expect +from pydantic import TypeAdapter # pylint: disable=no-name-in-module +from pydantic import ByteSize from .logging_tools import log_context from .playwright import ( @@ -17,7 +19,7 @@ wait_for_service_running, ) -_S4L_STREAMING_ESTABLISHMENT_MAX_TIME: Final[int] = 15 * SECOND +_S4L_STREAMING_ESTABLISHMENT_MAX_TIME: Final[int] = 30 * SECOND _S4L_SOCKETIO_REGEX: Final[re.Pattern] = re.compile( r"^(?P[^:]+)://(?P[^\.]+)\.services\.(?P[^\/]+)\/socket\.io\/.+$" ) @@ -63,7 +65,7 @@ def __call__(self, message: str) -> bool: self._initial_bit_rate_time = arrow.utcnow().datetime self.logger.info( "%s", - f"{self._initial_bit_rate=} at {self._initial_bit_rate_time.isoformat()}", + f"{TypeAdapter(ByteSize).validate_python(self._initial_bit_rate).human_readable()}/s at {self._initial_bit_rate_time.isoformat()}", ) return False @@ -78,7 +80,7 @@ def __call__(self, message: str) -> bool: bitrate_test = bool(self._initial_bit_rate != current_bitrate) self.logger.info( "%s", - f"{current_bitrate=} after {elapsed_time=}: {'good!' if bitrate_test else 'failed! bitrate did not change! TIP: talk with MaG about underwater cables!'}", + f"{TypeAdapter(ByteSize).validate_python(current_bitrate).human_readable()}/s after {elapsed_time=}: {'good!' if bitrate_test else 'failed! bitrate did not change! TIP: talk with MaG about underwater cables!'}", ) return bitrate_test diff --git a/tests/e2e-playwright/tests/conftest.py b/tests/e2e-playwright/tests/conftest.py index d7104c6fe70..997ac6b7138 100644 --- a/tests/e2e-playwright/tests/conftest.py +++ b/tests/e2e-playwright/tests/conftest.py @@ -25,8 +25,8 @@ from pytest import Item from pytest_simcore.helpers.logging_tools import log_context from pytest_simcore.helpers.playwright import ( - SECOND, MINUTE, + SECOND, AutoRegisteredUser, RunningState, ServiceType, @@ -34,6 +34,7 @@ SocketIOProjectClosedWaiter, SocketIOProjectStateUpdatedWaiter, decode_socketio_42_message, + web_socket_default_log_handler, ) _PROJECT_CLOSING_TIMEOUT: Final[int] = 10 * MINUTE @@ -171,9 +172,11 @@ def pytest_runtest_makereport(item: Item, call): diagnostics["duration"] = str(end_time - start_time) # Print the diagnostics report - print(f"\nDiagnostics repoort for {test_name} ---") - print(json.dumps(diagnostics, indent=2)) - print("---") + with log_context( + logging.WARNING, + f"ℹ️ Diagnostics report for {test_name} ---", # noqa: RUF001 + ) as ctx: + ctx.logger.warning(json.dumps(diagnostics, indent=2)) @pytest.hookimpl(tryfirst=True) @@ -369,7 +372,8 @@ def log_in_and_out( if quickStartWindowCloseBtnLocator.is_visible(): quickStartWindowCloseBtnLocator.click() - yield ws + with web_socket_default_log_handler(ws): + yield ws with log_context( logging.INFO, @@ -410,12 +414,17 @@ def _( f"Open project in {product_url=} as {product_billable=}", ) as ctx: waiter = SocketIOProjectStateUpdatedWaiter(expected_states=expected_states) - timeout = _OPENING_TUTORIAL_MAX_WAIT_TIME if template_id is not None else _OPENING_NEW_EMPTY_PROJECT_MAX_WAIT_TIME + timeout = ( + _OPENING_TUTORIAL_MAX_WAIT_TIME + if template_id is not None + else _OPENING_NEW_EMPTY_PROJECT_MAX_WAIT_TIME + ) with ( - log_in_and_out.expect_event("framereceived", waiter, timeout=timeout + 10 * SECOND), + log_in_and_out.expect_event( + "framereceived", waiter, timeout=timeout + 10 * SECOND + ), page.expect_response( - re.compile(r"/projects/[^:]+:open"), - timeout=timeout + 5 * SECOND + re.compile(r"/projects/[^:]+:open"), timeout=timeout + 5 * SECOND ) as response_info, ): # Project detail view pop-ups shows @@ -436,8 +445,11 @@ def _( # From the long running tasks response's urls, only their path is relevant def url_to_path(url): return urllib.parse.urlparse(url).path + def wait_for_done(response): - if url_to_path(response.url) == url_to_path(lrt_data["status_href"]): + if url_to_path(response.url) == url_to_path( + lrt_data["status_href"] + ): resp_data = response.json() resp_data = resp_data["data"] assert "task_progress" in resp_data @@ -448,10 +460,13 @@ def wait_for_done(response): task_progress["message"], ) return False - if url_to_path(response.url) == url_to_path(lrt_data["result_href"]): + if url_to_path(response.url) == url_to_path( + lrt_data["result_href"] + ): copying_logger.logger.info("project created") return response.status == 201 return False + with page.expect_response(wait_for_done, timeout=timeout): # if the above calls go to fast, this test could fail # not expected in the sim4life context though diff --git a/tests/e2e-playwright/tests/sim4life/test_sim4life.py b/tests/e2e-playwright/tests/sim4life/test_sim4life.py index 23778f3f3f5..b993f262181 100644 --- a/tests/e2e-playwright/tests/sim4life/test_sim4life.py +++ b/tests/e2e-playwright/tests/sim4life/test_sim4life.py @@ -11,7 +11,10 @@ from typing import Any from playwright.sync_api import Page, WebSocket -from pytest_simcore.helpers.playwright import ServiceType +from pytest_simcore.helpers.playwright import ( + ServiceType, + web_socket_default_log_handler, +) from pytest_simcore.helpers.playwright_sim4life import ( check_video_streaming, interact_with_s4l, @@ -49,8 +52,9 @@ def test_sim4life( page, node_ids[0], log_in_and_out, autoscaled=autoscaled, copy_workspace=False ) s4l_websocket = resp["websocket"] - s4l_iframe = resp["iframe"] - interact_with_s4l(page, s4l_iframe) + with web_socket_default_log_handler(s4l_websocket): + s4l_iframe = resp["iframe"] + interact_with_s4l(page, s4l_iframe) - if check_videostreaming: - check_video_streaming(page, s4l_iframe, s4l_websocket) + if check_videostreaming: + check_video_streaming(page, s4l_iframe, s4l_websocket) diff --git a/tests/e2e-playwright/tests/sim4life/test_template.py b/tests/e2e-playwright/tests/sim4life/test_template.py index 9ac5d4ae065..a4f104a6291 100644 --- a/tests/e2e-playwright/tests/sim4life/test_template.py +++ b/tests/e2e-playwright/tests/sim4life/test_template.py @@ -11,6 +11,7 @@ from typing import Any from playwright.sync_api import Page, WebSocket +from pytest_simcore.helpers.playwright import web_socket_default_log_handler from pytest_simcore.helpers.playwright_sim4life import ( check_video_streaming, interact_with_s4l, @@ -39,8 +40,9 @@ def test_template( page, node_ids[0], log_in_and_out, autoscaled=autoscaled, copy_workspace=True ) s4l_websocket = resp["websocket"] - s4l_iframe = resp["iframe"] - interact_with_s4l(page, s4l_iframe) + with web_socket_default_log_handler(s4l_websocket): + s4l_iframe = resp["iframe"] + interact_with_s4l(page, s4l_iframe) - if check_videostreaming: - check_video_streaming(page, s4l_iframe, s4l_websocket) + if check_videostreaming: + check_video_streaming(page, s4l_iframe, s4l_websocket) From 26aa1650820f5206ba8b46dfaf0d9e2e77b88eaa Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Fri, 20 Sep 2024 16:54:54 +0200 Subject: [PATCH 008/104] =?UTF-8?q?=F0=9F=8E=A8Clusters=20keeper/use=20ssm?= =?UTF-8?q?=20(=F0=9F=9A=A8change=20in=20private=20clusters)=20(#6361)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .env-devel | 1 + .../src/models_library/clusters.py | 3 + .../api/health.py | 23 ++-- .../constants.py | 15 +++ .../core/application.py | 2 + .../core/settings.py | 34 +++++- .../modules/clusters.py | 10 +- .../modules/clusters_management_core.py | 109 ++++++++++++++++-- .../modules/clusters_management_task.py | 1 + .../modules/ssm.py | 56 +++++++++ .../utils/clusters.py | 35 +++--- .../utils/ec2.py | 27 ++++- .../clusters-keeper/tests/unit/conftest.py | 25 ++++ .../tests/unit/test_api_health.py | 7 ++ .../tests/unit/test_modules_clusters.py | 1 + .../test_modules_clusters_management_core.py | 1 + .../test_modules_clusters_management_task.py | 1 + .../tests/unit/test_modules_ec2.py | 7 +- .../tests/unit/test_modules_rabbitmq.py | 9 +- .../tests/unit/test_modules_redis.py | 1 + .../tests/unit/test_modules_remote_debug.py | 1 + .../tests/unit/test_modules_ssm.py | 22 ++++ .../tests/unit/test_rpc_clusters.py | 1 + .../tests/unit/test_rpc_ec2_instances.py | 1 + .../tests/unit/test_utils_clusters.py | 89 ++++++++++---- .../tests/unit/test_utils_ec2.py | 14 +++ services/docker-compose.yml | 7 ++ 27 files changed, 429 insertions(+), 74 deletions(-) create mode 100644 services/clusters-keeper/src/simcore_service_clusters_keeper/constants.py create mode 100644 services/clusters-keeper/src/simcore_service_clusters_keeper/modules/ssm.py create mode 100644 services/clusters-keeper/tests/unit/test_modules_ssm.py diff --git a/.env-devel b/.env-devel index 4a28dc33cf1..af4ba2efdcc 100644 --- a/.env-devel +++ b/.env-devel @@ -50,6 +50,7 @@ CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG=master-github-latest CLUSTERS_KEEPER_DASK_NTHREADS=0 CLUSTERS_KEEPER_DASK_WORKER_SATURATION=inf CLUSTERS_KEEPER_EC2_ACCESS=null +CLUSTERS_KEEPER_SSM_ACCESS=null CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX="" CLUSTERS_KEEPER_LOGLEVEL=WARNING CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION=5 diff --git a/packages/models-library/src/models_library/clusters.py b/packages/models-library/src/models_library/clusters.py index c51598b06ee..1856dc5c287 100644 --- a/packages/models-library/src/models_library/clusters.py +++ b/packages/models-library/src/models_library/clusters.py @@ -96,6 +96,9 @@ class Config(BaseAuthentication.Config): class NoAuthentication(BaseAuthentication): type: Literal["none"] = "none" + class Config(BaseAuthentication.Config): + schema_extra: ClassVar[dict[str, Any]] = {"examples": [{"type": "none"}]} + class TLSAuthentication(BaseAuthentication): type: Literal["tls"] = "tls" diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/api/health.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/api/health.py index ad2882da3c8..a971a551e4e 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/api/health.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/api/health.py @@ -21,7 +21,7 @@ @router.get("/", include_in_schema=True, response_class=PlainTextResponse) async def health_check(): # NOTE: sync url in docker/healthcheck.py with this entrypoint! - return f"{__name__}.health_check@{datetime.datetime.now(datetime.timezone.utc).isoformat()}" + return f"{__name__}.health_check@{datetime.datetime.now(datetime.UTC).isoformat()}" class _ComponentStatus(BaseModel): @@ -33,6 +33,7 @@ class _StatusGet(BaseModel): rabbitmq: _ComponentStatus ec2: _ComponentStatus redis_client_sdk: _ComponentStatus + ssm: _ComponentStatus @router.get("/status", include_in_schema=True, response_model=_StatusGet) @@ -40,18 +41,26 @@ async def get_status(app: Annotated[FastAPI, Depends(get_app)]) -> _StatusGet: return _StatusGet( rabbitmq=_ComponentStatus( is_enabled=is_rabbitmq_enabled(app), - is_responsive=await get_rabbitmq_client(app).ping() - if is_rabbitmq_enabled(app) - else False, + is_responsive=( + await get_rabbitmq_client(app).ping() + if is_rabbitmq_enabled(app) + else False + ), ), ec2=_ComponentStatus( is_enabled=bool(app.state.ec2_client), - is_responsive=await app.state.ec2_client.ping() - if app.state.ec2_client - else False, + is_responsive=( + await app.state.ec2_client.ping() if app.state.ec2_client else False + ), ), redis_client_sdk=_ComponentStatus( is_enabled=bool(app.state.redis_client_sdk), is_responsive=await get_redis_client(app).ping(), ), + ssm=_ComponentStatus( + is_enabled=(app.state.ssm_client is not None), + is_responsive=( + await app.state.ssm_client.ping() if app.state.ssm_client else False + ), + ), ) diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/constants.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/constants.py new file mode 100644 index 00000000000..7f970665f25 --- /dev/null +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/constants.py @@ -0,0 +1,15 @@ +from typing import Final + +from aws_library.ec2._models import AWSTagKey, AWSTagValue +from pydantic import parse_obj_as + +DOCKER_STACK_DEPLOY_COMMAND_NAME: Final[str] = "private cluster docker deploy" +DOCKER_STACK_DEPLOY_COMMAND_EC2_TAG_KEY: Final[AWSTagKey] = parse_obj_as( + AWSTagKey, "io.simcore.clusters-keeper.private_cluster_docker_deploy" +) + +USER_ID_TAG_KEY: Final[AWSTagKey] = parse_obj_as(AWSTagKey, "user_id") +WALLET_ID_TAG_KEY: Final[AWSTagKey] = parse_obj_as(AWSTagKey, "wallet_id") +ROLE_TAG_KEY: Final[AWSTagKey] = parse_obj_as(AWSTagKey, "role") +WORKER_ROLE_TAG_VALUE: Final[AWSTagValue] = parse_obj_as(AWSTagValue, "worker") +MANAGER_ROLE_TAG_VALUE: Final[AWSTagValue] = parse_obj_as(AWSTagValue, "manager") diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py index 14b3d344b70..5948715b081 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py @@ -19,6 +19,7 @@ from ..modules.ec2 import setup as setup_ec2 from ..modules.rabbitmq import setup as setup_rabbitmq from ..modules.redis import setup as setup_redis +from ..modules.ssm import setup as setup_ssm from ..rpc.rpc_routes import setup_rpc_routes from .settings import ApplicationSettings @@ -55,6 +56,7 @@ def create_app(settings: ApplicationSettings) -> FastAPI: setup_rabbitmq(app) setup_rpc_routes(app) setup_ec2(app) + setup_ssm(app) setup_redis(app) setup_clusters_management(app) diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py index 17a8ffcaae8..07fd7deb8bf 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py @@ -25,6 +25,7 @@ from settings_library.ec2 import EC2Settings from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisSettings +from settings_library.ssm import SSMSettings from settings_library.tracing import TracingSettings from settings_library.utils_logging import MixinLoggingSettings from types_aiobotocore_ec2.literals import InstanceTypeType @@ -50,6 +51,21 @@ class Config(EC2Settings.Config): } +class ClustersKeeperSSMSettings(SSMSettings): + class Config(SSMSettings.Config): + env_prefix = CLUSTERS_KEEPER_ENV_PREFIX + + schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + "examples": [ + { + f"{CLUSTERS_KEEPER_ENV_PREFIX}{key}": var + for key, var in example.items() + } + for example in SSMSettings.Config.schema_extra["examples"] + ], + } + + class WorkersEC2InstancesSettings(BaseCustomSettings): WORKERS_EC2_INSTANCES_ALLOWED_TYPES: dict[str, EC2InstanceBootSpecific] = Field( ..., @@ -183,6 +199,12 @@ class PrimaryEC2InstancesSettings(BaseCustomSettings): "that take longer than this time will be terminated as sometimes it happens that EC2 machine fail on start.", ) + PRIMARY_EC2_INSTANCES_DOCKER_DEFAULT_ADDRESS_POOL: str = Field( + default="172.20.0.0/14", + description="defines the docker swarm default address pool in CIDR format " + "(see https://docs.docker.com/reference/cli/docker/swarm/init/)", + ) + @validator("PRIMARY_EC2_INSTANCES_ALLOWED_TYPES") @classmethod def check_valid_instance_names( @@ -250,6 +272,10 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): auto_default_from_env=True ) + CLUSTERS_KEEPER_SSM_ACCESS: ClustersKeeperSSMSettings | None = Field( + auto_default_from_env=True + ) + CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES: PrimaryEC2InstancesSettings | None = Field( auto_default_from_env=True ) @@ -285,9 +311,11 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", ) - CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION: NonNegativeInt = Field( - default=5, - description="Max number of missed heartbeats before a cluster is terminated", + CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION: NonNegativeInt = ( + Field( + default=5, + description="Max number of missed heartbeats before a cluster is terminated", + ) ) CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG: str = Field( diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters.py index 38246f3008a..89860549fd3 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters.py @@ -49,7 +49,7 @@ async def _get_primary_ec2_params( ec2_instance_types: list[ EC2InstanceType ] = await ec2_client.get_ec2_instance_capabilities( - instance_type_names=[ec2_type_name] + instance_type_names={ec2_type_name} ) assert ec2_instance_types # nosec assert len(ec2_instance_types) == 1 # nosec @@ -72,15 +72,7 @@ async def create_cluster( tags=creation_ec2_tags(app_settings, user_id=user_id, wallet_id=wallet_id), startup_script=create_startup_script( app_settings, - cluster_machines_name_prefix=get_cluster_name( - app_settings, user_id=user_id, wallet_id=wallet_id, is_manager=False - ), ec2_boot_specific=ec2_instance_boot_specs, - additional_custom_tags={ - AWSTagKey("user_id"): AWSTagValue(f"{user_id}"), - AWSTagKey("wallet_id"): AWSTagValue(f"{wallet_id}"), - AWSTagKey("role"): AWSTagValue("worker"), - }, ), ami_id=ec2_instance_boot_specs.ami_id, key_name=app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES.PRIMARY_EC2_INSTANCES_KEY_NAME, diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_core.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_core.py index a7c23143a0b..871ad8bd242 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_core.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_core.py @@ -5,12 +5,22 @@ import arrow from aws_library.ec2 import AWSTagKey, EC2InstanceData +from aws_library.ec2._models import AWSTagValue from fastapi import FastAPI from models_library.users import UserID from models_library.wallets import WalletID from pydantic import parse_obj_as from servicelib.logging_utils import log_catch - +from servicelib.utils import limited_gather + +from ..constants import ( + DOCKER_STACK_DEPLOY_COMMAND_EC2_TAG_KEY, + DOCKER_STACK_DEPLOY_COMMAND_NAME, + ROLE_TAG_KEY, + USER_ID_TAG_KEY, + WALLET_ID_TAG_KEY, + WORKER_ROLE_TAG_VALUE, +) from ..core.settings import get_application_settings from ..modules.clusters import ( delete_clusters, @@ -18,9 +28,17 @@ get_cluster_workers, set_instance_heartbeat, ) +from ..utils.clusters import create_deploy_cluster_stack_script from ..utils.dask import get_scheduler_auth, get_scheduler_url -from ..utils.ec2 import HEARTBEAT_TAG_KEY +from ..utils.ec2 import ( + HEARTBEAT_TAG_KEY, + get_cluster_name, + user_id_from_instance_tags, + wallet_id_from_instance_tags, +) from .dask import is_scheduler_busy, ping_scheduler +from .ec2 import get_ec2_client +from .ssm import get_ssm_client _logger = logging.getLogger(__name__) @@ -42,8 +60,8 @@ def _get_instance_last_heartbeat(instance: EC2InstanceData) -> datetime.datetime async def _get_all_associated_worker_instances( app: FastAPI, primary_instances: Iterable[EC2InstanceData], -) -> list[EC2InstanceData]: - worker_instances = [] +) -> set[EC2InstanceData]: + worker_instances: set[EC2InstanceData] = set() for instance in primary_instances: assert "user_id" in instance.tags # nosec user_id = UserID(instance.tags[_USER_ID_TAG_KEY]) @@ -55,7 +73,7 @@ async def _get_all_associated_worker_instances( else None ) - worker_instances.extend( + worker_instances.update( await get_cluster_workers(app, user_id=user_id, wallet_id=wallet_id) ) return worker_instances @@ -63,12 +81,12 @@ async def _get_all_associated_worker_instances( async def _find_terminateable_instances( app: FastAPI, instances: Iterable[EC2InstanceData] -) -> list[EC2InstanceData]: +) -> set[EC2InstanceData]: app_settings = get_application_settings(app) assert app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES # nosec # get the corresponding ec2 instance data - terminateable_instances: list[EC2InstanceData] = [] + terminateable_instances: set[EC2InstanceData] = set() time_to_wait_before_termination = ( app_settings.CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION @@ -82,7 +100,7 @@ async def _find_terminateable_instances( elapsed_time_since_heartbeat = arrow.utcnow().datetime - last_heartbeat allowed_time_to_wait = time_to_wait_before_termination if elapsed_time_since_heartbeat >= allowed_time_to_wait: - terminateable_instances.append(instance) + terminateable_instances.add(instance) else: _logger.info( "%s has still %ss before being terminateable", @@ -93,14 +111,14 @@ async def _find_terminateable_instances( elapsed_time_since_startup = arrow.utcnow().datetime - instance.launch_time allowed_time_to_wait = startup_delay if elapsed_time_since_startup >= allowed_time_to_wait: - terminateable_instances.append(instance) + terminateable_instances.add(instance) # get all terminateable instances associated worker instances worker_instances = await _get_all_associated_worker_instances( app, terminateable_instances ) - return terminateable_instances + worker_instances + return terminateable_instances.union(worker_instances) async def check_clusters(app: FastAPI) -> None: @@ -112,6 +130,7 @@ async def check_clusters(app: FastAPI) -> None: if await ping_scheduler(get_scheduler_url(instance), get_scheduler_auth(app)) } + # set intance heartbeat if scheduler is busy for instance in connected_intances: with log_catch(_logger, reraise=False): # NOTE: some connected instance could in theory break between these 2 calls, therefore this is silenced and will @@ -124,6 +143,7 @@ async def check_clusters(app: FastAPI) -> None: f"{instance.id=} for {instance.tags=}", ) await set_instance_heartbeat(app, instance=instance) + # clean any cluster that is not doing anything if terminateable_instances := await _find_terminateable_instances( app, connected_intances ): @@ -138,7 +158,7 @@ async def check_clusters(app: FastAPI) -> None: for instance in disconnected_instances if _get_instance_last_heartbeat(instance) is None } - + # remove instances that were starting for too long if terminateable_instances := await _find_terminateable_instances( app, starting_instances ): @@ -149,7 +169,72 @@ async def check_clusters(app: FastAPI) -> None: ) await delete_clusters(app, instances=terminateable_instances) - # the other instances are broken (they were at some point connected but now not anymore) + # NOTE: transmit command to start docker swarm/stack if needed + # once the instance is connected to the SSM server, + # use ssm client to send the command to these instances, + # we send a command that contain: + # the docker-compose file in binary, + # the call to init the docker swarm and the call to deploy the stack + instances_in_need_of_deployment = { + i + for i in starting_instances - terminateable_instances + if DOCKER_STACK_DEPLOY_COMMAND_EC2_TAG_KEY not in i.tags + } + + if instances_in_need_of_deployment: + app_settings = get_application_settings(app) + ssm_client = get_ssm_client(app) + ec2_client = get_ec2_client(app) + instances_in_need_of_deployment_ssm_connection_state = await limited_gather( + *[ + ssm_client.is_instance_connected_to_ssm_server(i.id) + for i in instances_in_need_of_deployment + ], + reraise=False, + log=_logger, + limit=20, + ) + ec2_connected_to_ssm_server = [ + i + for i, c in zip( + instances_in_need_of_deployment, + instances_in_need_of_deployment_ssm_connection_state, + strict=True, + ) + if c is True + ] + started_instances_ready_for_command = ec2_connected_to_ssm_server + if started_instances_ready_for_command: + # we need to send 1 command per machine here, as the user_id/wallet_id changes + for i in started_instances_ready_for_command: + ssm_command = await ssm_client.send_command( + [i.id], + command=create_deploy_cluster_stack_script( + app_settings, + cluster_machines_name_prefix=get_cluster_name( + app_settings, + user_id=user_id_from_instance_tags(i.tags), + wallet_id=wallet_id_from_instance_tags(i.tags), + is_manager=False, + ), + additional_custom_tags={ + USER_ID_TAG_KEY: i.tags[USER_ID_TAG_KEY], + WALLET_ID_TAG_KEY: i.tags[WALLET_ID_TAG_KEY], + ROLE_TAG_KEY: WORKER_ROLE_TAG_VALUE, + }, + ), + command_name=DOCKER_STACK_DEPLOY_COMMAND_NAME, + ) + await ec2_client.set_instances_tags( + started_instances_ready_for_command, + tags={ + DOCKER_STACK_DEPLOY_COMMAND_EC2_TAG_KEY: AWSTagValue( + ssm_command.command_id + ), + }, + ) + + # the remaining instances are broken (they were at some point connected but now not anymore) broken_instances = disconnected_instances - starting_instances if terminateable_instances := await _find_terminateable_instances( app, broken_instances diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py index 806cb6d472c..410edba1efb 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py @@ -47,6 +47,7 @@ def setup(app: FastAPI): for s in [ app_settings.CLUSTERS_KEEPER_EC2_ACCESS, app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES, + app_settings.CLUSTERS_KEEPER_SSM_ACCESS, ] ): logger.warning( diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/ssm.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/ssm.py new file mode 100644 index 00000000000..218812d5523 --- /dev/null +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/ssm.py @@ -0,0 +1,56 @@ +import logging +from typing import cast + +from aws_library.ssm import SimcoreSSMAPI +from aws_library.ssm._errors import SSMNotConnectedError +from fastapi import FastAPI +from settings_library.ssm import SSMSettings +from tenacity.asyncio import AsyncRetrying +from tenacity.before_sleep import before_sleep_log +from tenacity.stop import stop_after_delay +from tenacity.wait import wait_random_exponential + +from ..core.errors import ConfigurationError +from ..core.settings import get_application_settings + +_logger = logging.getLogger(__name__) + + +def setup(app: FastAPI) -> None: + async def on_startup() -> None: + app.state.ssm_client = None + settings: SSMSettings | None = get_application_settings( + app + ).CLUSTERS_KEEPER_SSM_ACCESS + + if not settings: + _logger.warning("SSM client is de-activated in the settings") + return + + app.state.ssm_client = client = await SimcoreSSMAPI.create(settings) + + async for attempt in AsyncRetrying( + reraise=True, + stop=stop_after_delay(120), + wait=wait_random_exponential(max=30), + before_sleep=before_sleep_log(_logger, logging.WARNING), + ): + with attempt: + connected = await client.ping() + if not connected: + raise SSMNotConnectedError # pragma: no cover + + async def on_shutdown() -> None: + if app.state.ssm_client: + await cast(SimcoreSSMAPI, app.state.ssm_client).close() + + app.add_event_handler("startup", on_startup) + app.add_event_handler("shutdown", on_shutdown) + + +def get_ssm_client(app: FastAPI) -> SimcoreSSMAPI: + if not app.state.ssm_client: + raise ConfigurationError( + msg="SSM client is not available. Please check the configuration." + ) + return cast(SimcoreSSMAPI, app.state.ssm_client) diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py index 48eb4dee380..c9b4a32f4af 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py @@ -8,6 +8,7 @@ import arrow import yaml from aws_library.ec2 import EC2InstanceBootSpecific, EC2InstanceData, EC2Tags +from aws_library.ec2._models import CommandStr from fastapi.encoders import jsonable_encoder from models_library.api_schemas_clusters_keeper.clusters import ( ClusterState, @@ -107,35 +108,43 @@ def _convert_to_env_dict(entries: dict[str, Any]) -> str: def create_startup_script( app_settings: ApplicationSettings, *, - cluster_machines_name_prefix: str, ec2_boot_specific: EC2InstanceBootSpecific, - additional_custom_tags: EC2Tags, ) -> str: assert app_settings.CLUSTERS_KEEPER_EC2_ACCESS # nosec assert app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES # nosec - environment_variables = _prepare_environment_variables( - app_settings, - cluster_machines_name_prefix=cluster_machines_name_prefix, - additional_custom_tags=additional_custom_tags, - ) - startup_commands = ec2_boot_specific.custom_boot_scripts.copy() + return "\n".join(startup_commands) + + +def create_deploy_cluster_stack_script( + app_settings: ApplicationSettings, + *, + cluster_machines_name_prefix: str, + additional_custom_tags: EC2Tags, +) -> str: + deploy_script: list[CommandStr] = [] assert app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES # nosec if isinstance( app_settings.CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH, TLSAuthentication, ): - + # get the dask certificates download_certificates_commands = [ f"mkdir --parents {_HOST_CERTIFICATES_BASE_PATH}", f'aws ssm get-parameter --name "{app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES.PRIMARY_EC2_INSTANCES_SSM_TLS_DASK_CA}" --region us-east-1 --with-decryption --query "Parameter.Value" --output text > {_HOST_TLS_CA_FILE_PATH}', f'aws ssm get-parameter --name "{app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES.PRIMARY_EC2_INSTANCES_SSM_TLS_DASK_CERT}" --region us-east-1 --with-decryption --query "Parameter.Value" --output text > {_HOST_TLS_CERT_FILE_PATH}', f'aws ssm get-parameter --name "{app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES.PRIMARY_EC2_INSTANCES_SSM_TLS_DASK_KEY}" --region us-east-1 --with-decryption --query "Parameter.Value" --output text > {_HOST_TLS_KEY_FILE_PATH}', ] - startup_commands.extend(download_certificates_commands) + deploy_script.extend(download_certificates_commands) + + environment_variables = _prepare_environment_variables( + app_settings, + cluster_machines_name_prefix=cluster_machines_name_prefix, + additional_custom_tags=additional_custom_tags, + ) - startup_commands.extend( + deploy_script.extend( [ # NOTE: https://stackoverflow.com/questions/41203492/solving-redis-warnings-on-overcommit-memory-and-transparent-huge-pages-for-ubunt "sysctl vm.overcommit_memory=1", @@ -143,11 +152,11 @@ def create_startup_script( f"echo '{_prometheus_yml_base64_encoded()}' | base64 -d > {_HOST_PROMETHEUS_PATH}", f"echo '{_prometheus_basic_auth_yml_base64_encoded(app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES.PRIMARY_EC2_INSTANCES_PROMETHEUS_USERNAME, app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES.PRIMARY_EC2_INSTANCES_PROMETHEUS_PASSWORD.get_secret_value())}' | base64 -d > {_HOST_PROMETHEUS_WEB_PATH}", # NOTE: --default-addr-pool is necessary in order to prevent conflicts with AWS node IPs - "docker swarm init --default-addr-pool 172.20.0.0/14", + f"docker swarm init --default-addr-pool {app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES.PRIMARY_EC2_INSTANCES_DOCKER_DEFAULT_ADDRESS_POOL}", f"{' '.join(environment_variables)} docker stack deploy --with-registry-auth --compose-file={_HOST_DOCKER_COMPOSE_PATH} dask_stack", ] ) - return "\n".join(startup_commands) + return "\n".join(deploy_script) def _convert_ec2_state_to_cluster_state( diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/ec2.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/ec2.py index c74bbc554d9..b48e1076e59 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/ec2.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/ec2.py @@ -7,6 +7,12 @@ from pydantic import parse_obj_as from .._meta import VERSION +from ..constants import ( + MANAGER_ROLE_TAG_VALUE, + ROLE_TAG_KEY, + USER_ID_TAG_KEY, + WALLET_ID_TAG_KEY, +) from ..core.settings import ApplicationSettings _APPLICATION_TAG_KEY: Final[str] = "io.simcore.clusters-keeper" @@ -50,9 +56,9 @@ def creation_ec2_tags( app_settings, user_id=user_id, wallet_id=wallet_id, is_manager=True ) ), - AWSTagKey("user_id"): AWSTagValue(f"{user_id}"), - AWSTagKey("wallet_id"): AWSTagValue(f"{wallet_id}"), - AWSTagKey("role"): AWSTagValue("manager"), + USER_ID_TAG_KEY: AWSTagValue(f"{user_id}"), + WALLET_ID_TAG_KEY: AWSTagValue(f"{wallet_id}"), + ROLE_TAG_KEY: MANAGER_ROLE_TAG_VALUE, } | app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES.PRIMARY_EC2_INSTANCES_CUSTOM_TAGS ) @@ -67,8 +73,8 @@ def ec2_instances_for_user_wallet_filter( ) -> EC2Tags: return ( _minimal_identification_tag(app_settings) - | {AWSTagKey("user_id"): AWSTagValue(f"{user_id}")} - | {AWSTagKey("wallet_id"): AWSTagValue(f"{wallet_id}")} + | {USER_ID_TAG_KEY: AWSTagValue(f"{user_id}")} + | {WALLET_ID_TAG_KEY: AWSTagValue(f"{wallet_id}")} ) @@ -81,3 +87,14 @@ def compose_user_data(bash_command: str) -> str: echo "completed user data bash script" """ ) + + +def wallet_id_from_instance_tags(tags: EC2Tags) -> WalletID | None: + wallet_id_str = tags[WALLET_ID_TAG_KEY] + if wallet_id_str == "None": + return None + return WalletID(wallet_id_str) + + +def user_id_from_instance_tags(tags: EC2Tags) -> UserID: + return UserID(tags[USER_ID_TAG_KEY]) diff --git a/services/clusters-keeper/tests/unit/conftest.py b/services/clusters-keeper/tests/unit/conftest.py index a8f4913d4bb..43805123c30 100644 --- a/services/clusters-keeper/tests/unit/conftest.py +++ b/services/clusters-keeper/tests/unit/conftest.py @@ -22,11 +22,13 @@ from fastapi import FastAPI from models_library.users import UserID from models_library.wallets import WalletID +from pydantic import SecretStr from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.rabbitmq import RabbitMQRPCClient from settings_library.ec2 import EC2Settings from settings_library.rabbit import RabbitSettings +from settings_library.ssm import SSMSettings from simcore_service_clusters_keeper.core.application import create_app from simcore_service_clusters_keeper.core.settings import ( CLUSTERS_KEEPER_ENV_PREFIX, @@ -86,6 +88,21 @@ def mocked_ec2_server_envs( return setenvs_from_dict(monkeypatch, changed_envs) +@pytest.fixture +def mocked_ssm_server_envs( + mocked_ssm_server_settings: SSMSettings, + monkeypatch: pytest.MonkeyPatch, +) -> EnvVarsDict: + # NOTE: overrides the SSMSettings with what clusters-keeper expects + changed_envs: EnvVarsDict = { + f"{CLUSTERS_KEEPER_ENV_PREFIX}{k}": ( + v.get_secret_value() if isinstance(v, SecretStr) else v + ) + for k, v in mocked_ssm_server_settings.dict().items() + } + return setenvs_from_dict(monkeypatch, changed_envs) + + @pytest.fixture def ec2_settings(mocked_ec2_server_settings: EC2Settings) -> EC2Settings: return mocked_ec2_server_settings @@ -105,6 +122,9 @@ def app_environment( "CLUSTERS_KEEPER_EC2_ACCESS": "{}", "CLUSTERS_KEEPER_EC2_ACCESS_KEY_ID": faker.pystr(), "CLUSTERS_KEEPER_EC2_SECRET_ACCESS_KEY": faker.pystr(), + "CLUSTERS_KEEPER_SSM_ACCESS": "{}", + "CLUSTERS_KEEPER_SSM_ACCESS_KEY_ID": faker.pystr(), + "CLUSTERS_KEEPER_SSM_SECRET_ACCESS_KEY": faker.pystr(), "CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES": "{}", "CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX": faker.pystr(), "CLUSTERS_KEEPER_DASK_NTHREADS": f"{faker.pyint(min_value=0)}", @@ -206,6 +226,11 @@ def disabled_ec2(app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch): monkeypatch.setenv("CLUSTERS_KEEPER_EC2_ACCESS", "null") +@pytest.fixture +def disabled_ssm(app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch): + monkeypatch.setenv("CLUSTERS_KEEPER_SSM_ACCESS", "null") + + @pytest.fixture def enabled_rabbitmq( app_environment: EnvVarsDict, rabbit_service: RabbitSettings diff --git a/services/clusters-keeper/tests/unit/test_api_health.py b/services/clusters-keeper/tests/unit/test_api_health.py index 734620afa1b..5bf72ccae8e 100644 --- a/services/clusters-keeper/tests/unit/test_api_health.py +++ b/services/clusters-keeper/tests/unit/test_api_health.py @@ -21,6 +21,7 @@ def app_environment( app_environment: EnvVarsDict, enabled_rabbitmq: None, mocked_ec2_server_envs: EnvVarsDict, + mocked_ssm_server_envs: EnvVarsDict, mocked_redis_server: None, ) -> EnvVarsDict: return app_environment @@ -69,6 +70,9 @@ async def test_status( assert status_response.ec2.is_enabled is True assert status_response.ec2.is_responsive is False + assert status_response.ssm.is_enabled is True + assert status_response.ssm.is_responsive is False + # restart the server mocked_aws_server.start() @@ -83,3 +87,6 @@ async def test_status( assert status_response.ec2.is_enabled is True assert status_response.ec2.is_responsive is True + + assert status_response.ssm.is_enabled is True + assert status_response.ssm.is_responsive is True diff --git a/services/clusters-keeper/tests/unit/test_modules_clusters.py b/services/clusters-keeper/tests/unit/test_modules_clusters.py index 16cfbde04b2..497b9e447e7 100644 --- a/services/clusters-keeper/tests/unit/test_modules_clusters.py +++ b/services/clusters-keeper/tests/unit/test_modules_clusters.py @@ -49,6 +49,7 @@ def _base_configuration( mocked_redis_server: None, mocked_ec2_server_envs: EnvVarsDict, mocked_primary_ec2_instances_envs: EnvVarsDict, + mocked_ssm_server_envs: EnvVarsDict, ) -> None: ... diff --git a/services/clusters-keeper/tests/unit/test_modules_clusters_management_core.py b/services/clusters-keeper/tests/unit/test_modules_clusters_management_core.py index 09720632fd4..438e69ee72e 100644 --- a/services/clusters-keeper/tests/unit/test_modules_clusters_management_core.py +++ b/services/clusters-keeper/tests/unit/test_modules_clusters_management_core.py @@ -60,6 +60,7 @@ def _base_configuration( mocked_redis_server: None, mocked_ec2_server_envs: EnvVarsDict, mocked_primary_ec2_instances_envs: EnvVarsDict, + mocked_ssm_server_envs: EnvVarsDict, ) -> None: ... diff --git a/services/clusters-keeper/tests/unit/test_modules_clusters_management_task.py b/services/clusters-keeper/tests/unit/test_modules_clusters_management_task.py index 0c9c52eab4c..d22bdce1f76 100644 --- a/services/clusters-keeper/tests/unit/test_modules_clusters_management_task.py +++ b/services/clusters-keeper/tests/unit/test_modules_clusters_management_task.py @@ -37,6 +37,7 @@ def mock_background_task(mocker: MockerFixture) -> mock.Mock: async def test_clusters_management_task_created_and_deleted( disabled_rabbitmq: None, mocked_ec2_server_envs: EnvVarsDict, + mocked_ssm_server_envs: EnvVarsDict, mocked_redis_server: None, mock_background_task: mock.Mock, initialized_app: FastAPI, diff --git a/services/clusters-keeper/tests/unit/test_modules_ec2.py b/services/clusters-keeper/tests/unit/test_modules_ec2.py index 0820ada5818..439e54aaa2d 100644 --- a/services/clusters-keeper/tests/unit/test_modules_ec2.py +++ b/services/clusters-keeper/tests/unit/test_modules_ec2.py @@ -5,13 +5,16 @@ import pytest from fastapi import FastAPI +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from simcore_service_clusters_keeper.core.errors import ConfigurationError from simcore_service_clusters_keeper.modules.ec2 import get_ec2_client +from simcore_service_clusters_keeper.modules.ssm import get_ssm_client -async def test_ec2_does_not_initialize_if_deactivated( +async def test_ec2_does_not_initialize_if_ec2_deactivated( disabled_rabbitmq: None, disabled_ec2: None, + mocked_ssm_server_envs: EnvVarsDict, mocked_redis_server: None, initialized_app: FastAPI, ): @@ -19,3 +22,5 @@ async def test_ec2_does_not_initialize_if_deactivated( assert initialized_app.state.ec2_client is None with pytest.raises(ConfigurationError): get_ec2_client(initialized_app) + + assert get_ssm_client(initialized_app) diff --git a/services/clusters-keeper/tests/unit/test_modules_rabbitmq.py b/services/clusters-keeper/tests/unit/test_modules_rabbitmq.py index a2c23ac0602..1bbd5683c76 100644 --- a/services/clusters-keeper/tests/unit/test_modules_rabbitmq.py +++ b/services/clusters-keeper/tests/unit/test_modules_rabbitmq.py @@ -43,8 +43,8 @@ def rabbit_log_message(faker: Faker) -> LoggerRabbitMessage: return LoggerRabbitMessage( user_id=faker.pyint(min_value=1), - project_id=faker.uuid4(), - node_id=faker.uuid4(), + project_id=faker.uuid4(cast_to=None), + node_id=faker.uuid4(cast_to=None), messages=faker.pylist(allowed_types=(str,)), ) @@ -62,6 +62,7 @@ def rabbit_message( def test_rabbitmq_does_not_initialize_if_deactivated( disabled_rabbitmq: None, disabled_ec2: None, + disabled_ssm: None, mocked_redis_server: None, initialized_app: FastAPI, ): @@ -78,6 +79,7 @@ def test_rabbitmq_does_not_initialize_if_deactivated( def test_rabbitmq_initializes( enabled_rabbitmq: RabbitSettings, disabled_ec2: None, + disabled_ssm: None, mocked_redis_server: None, initialized_app: FastAPI, ): @@ -95,6 +97,7 @@ def test_rabbitmq_initializes( async def test_post_message( enabled_rabbitmq: RabbitSettings, disabled_ec2: None, + disabled_ssm: None, mocked_redis_server: None, initialized_app: FastAPI, rabbit_message: RabbitMessageBase, @@ -124,6 +127,7 @@ async def test_post_message( async def test_post_message_with_disabled_rabbit_does_not_raise( disabled_rabbitmq: None, disabled_ec2: None, + disabled_ssm: None, mocked_redis_server: None, initialized_app: FastAPI, rabbit_message: RabbitMessageBase, @@ -135,6 +139,7 @@ async def test_post_message_when_rabbit_disconnected_does_not_raise( paused_container: Callable[[str], AbstractAsyncContextManager[None]], enabled_rabbitmq: RabbitSettings, disabled_ec2: None, + disabled_ssm: None, mocked_redis_server: None, initialized_app: FastAPI, rabbit_log_message: LoggerRabbitMessage, diff --git a/services/clusters-keeper/tests/unit/test_modules_redis.py b/services/clusters-keeper/tests/unit/test_modules_redis.py index f6b760f27fb..44fb9a9f6ac 100644 --- a/services/clusters-keeper/tests/unit/test_modules_redis.py +++ b/services/clusters-keeper/tests/unit/test_modules_redis.py @@ -10,6 +10,7 @@ async def test_redis_raises_if_missing( disabled_rabbitmq: None, disabled_ec2: None, + disabled_ssm: None, mocked_redis_server: None, initialized_app: FastAPI, ): diff --git a/services/clusters-keeper/tests/unit/test_modules_remote_debug.py b/services/clusters-keeper/tests/unit/test_modules_remote_debug.py index dbb5a91922e..3fe8b823d13 100644 --- a/services/clusters-keeper/tests/unit/test_modules_remote_debug.py +++ b/services/clusters-keeper/tests/unit/test_modules_remote_debug.py @@ -23,6 +23,7 @@ def app_environment( def test_application_with_debug_enabled( disabled_rabbitmq: None, disabled_ec2: None, + disabled_ssm: None, mocked_redis_server: None, initialized_app: FastAPI, ): diff --git a/services/clusters-keeper/tests/unit/test_modules_ssm.py b/services/clusters-keeper/tests/unit/test_modules_ssm.py new file mode 100644 index 00000000000..3bcffb72661 --- /dev/null +++ b/services/clusters-keeper/tests/unit/test_modules_ssm.py @@ -0,0 +1,22 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +import pytest +from fastapi import FastAPI +from simcore_service_clusters_keeper.core.errors import ConfigurationError +from simcore_service_clusters_keeper.modules.ssm import get_ssm_client + + +async def test_ssm_does_not_initialize_if_ssm_deactivated( + disabled_rabbitmq: None, + disabled_ec2: None, + disabled_ssm: None, + mocked_redis_server: None, + initialized_app: FastAPI, +): + assert hasattr(initialized_app.state, "ssm_client") + assert initialized_app.state.ssm_client is None + with pytest.raises(ConfigurationError): + get_ssm_client(initialized_app) diff --git a/services/clusters-keeper/tests/unit/test_rpc_clusters.py b/services/clusters-keeper/tests/unit/test_rpc_clusters.py index 41146c827bd..a280cbb5338 100644 --- a/services/clusters-keeper/tests/unit/test_rpc_clusters.py +++ b/services/clusters-keeper/tests/unit/test_rpc_clusters.py @@ -43,6 +43,7 @@ def _base_configuration( mocked_redis_server: None, mocked_ec2_server_envs: EnvVarsDict, mocked_primary_ec2_instances_envs: EnvVarsDict, + mocked_ssm_server_envs: EnvVarsDict, initialized_app: FastAPI, ensure_run_in_sequence_context_is_empty: None, ) -> None: diff --git a/services/clusters-keeper/tests/unit/test_rpc_ec2_instances.py b/services/clusters-keeper/tests/unit/test_rpc_ec2_instances.py index d03b6b74502..f4eea132cdf 100644 --- a/services/clusters-keeper/tests/unit/test_rpc_ec2_instances.py +++ b/services/clusters-keeper/tests/unit/test_rpc_ec2_instances.py @@ -24,6 +24,7 @@ def _base_configuration( enabled_rabbitmq: None, mocked_redis_server: None, mocked_ec2_server_envs: EnvVarsDict, + mocked_ssm_server_envs: EnvVarsDict, initialized_app: FastAPI, ) -> None: ... diff --git a/services/clusters-keeper/tests/unit/test_utils_clusters.py b/services/clusters-keeper/tests/unit/test_utils_clusters.py index a6592ed1fa4..1c4a7760d5f 100644 --- a/services/clusters-keeper/tests/unit/test_utils_clusters.py +++ b/services/clusters-keeper/tests/unit/test_utils_clusters.py @@ -29,6 +29,7 @@ from simcore_service_clusters_keeper.utils.clusters import ( _prepare_environment_variables, create_cluster_from_ec2_instance, + create_deploy_cluster_stack_script, create_startup_script, ) from types_aiobotocore_ec2.literals import InstanceStateNameType @@ -51,16 +52,26 @@ def ec2_boot_specs(app_settings: ApplicationSettings) -> EC2InstanceBootSpecific return ec2_boot_specs +@pytest.fixture(params=[TLSAuthentication, NoAuthentication]) +def backend_cluster_auth( + request: pytest.FixtureRequest, +) -> InternalClusterAuthentication: + return request.param + + @pytest.fixture def app_environment( app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch, + backend_cluster_auth: InternalClusterAuthentication, ) -> EnvVarsDict: return app_environment | setenvs_from_dict( monkeypatch, { "CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH": json_dumps( TLSAuthentication.Config.schema_extra["examples"][0] + if isinstance(backend_cluster_auth, TLSAuthentication) + else NoAuthentication.Config.schema_extra["examples"][0] ) }, ) @@ -69,38 +80,52 @@ def app_environment( def test_create_startup_script( disabled_rabbitmq: None, mocked_ec2_server_envs: EnvVarsDict, + mocked_ssm_server_envs: EnvVarsDict, mocked_redis_server: None, app_settings: ApplicationSettings, - cluster_machines_name_prefix: str, - clusters_keeper_docker_compose: dict[str, Any], ec2_boot_specs: EC2InstanceBootSpecific, ): - additional_custom_tags = { - AWSTagKey("pytest-tag-key"): AWSTagValue("pytest-tag-value") - } startup_script = create_startup_script( app_settings, - cluster_machines_name_prefix=cluster_machines_name_prefix, ec2_boot_specific=ec2_boot_specs, - additional_custom_tags=additional_custom_tags, ) assert isinstance(startup_script, str) assert len(ec2_boot_specs.custom_boot_scripts) > 0 for boot_script in ec2_boot_specs.custom_boot_scripts: assert boot_script in startup_script + + +def test_create_deploy_cluster_stack_script( + disabled_rabbitmq: None, + mocked_ec2_server_envs: EnvVarsDict, + mocked_ssm_server_envs: EnvVarsDict, + mocked_redis_server: None, + app_settings: ApplicationSettings, + cluster_machines_name_prefix: str, + clusters_keeper_docker_compose: dict[str, Any], +): + additional_custom_tags = { + AWSTagKey("pytest-tag-key"): AWSTagValue("pytest-tag-value") + } + deploy_script = create_deploy_cluster_stack_script( + app_settings, + cluster_machines_name_prefix=cluster_machines_name_prefix, + additional_custom_tags=additional_custom_tags, + ) + assert isinstance(deploy_script, str) # we have commands to pipe into a docker-compose file - assert " | base64 -d > /docker-compose.yml" in startup_script + assert " | base64 -d > /docker-compose.yml" in deploy_script # we have commands to init a docker-swarm - assert "docker swarm init" in startup_script + assert "docker swarm init --default-addr-pool" in deploy_script # we have commands to deploy a stack assert ( "docker stack deploy --with-registry-auth --compose-file=/docker-compose.yml dask_stack" - in startup_script + in deploy_script ) # before that we have commands that setup ENV variables, let's check we have all of them as defined in the docker-compose # let's get what was set in the startup script and compare with the expected one of the docker-compose startup_script_envs_definition = ( - startup_script.splitlines()[-1].split("docker stack deploy")[0].strip() + deploy_script.splitlines()[-1].split("docker stack deploy")[0].strip() ) assert startup_script_envs_definition # Use regular expression to split the string into key-value pairs (courtesy of chatGPT) @@ -137,7 +162,7 @@ def test_create_startup_script( "WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS", ] assert all( - re.search(rf"{i}=\[(\\\".+\\\")*\]", startup_script) for i in list_settings + re.search(rf"{i}=\[(\\\".+\\\")*\]", deploy_script) for i in list_settings ) # check dicts have \' in front @@ -146,35 +171,55 @@ def test_create_startup_script( "WORKERS_EC2_INSTANCES_CUSTOM_TAGS", ] assert all( - re.search(rf"{i}=\'{{(\".+\":\s\".*\")+}}\'", startup_script) + re.search(rf"{i}=\'{{(\".+\":\s\".*\")+}}\'", deploy_script) for i in dict_settings ) # check the additional tags are in assert all( - f'"{key}": "{value}"' in startup_script + f'"{key}": "{value}"' in deploy_script for key, value in additional_custom_tags.items() ) -def test_create_startup_script_script_size_below_16kb( +def test_create_deploy_cluster_stack_script_below_64kb( disabled_rabbitmq: None, mocked_ec2_server_envs: EnvVarsDict, + mocked_ssm_server_envs: EnvVarsDict, mocked_redis_server: None, app_settings: ApplicationSettings, cluster_machines_name_prefix: str, clusters_keeper_docker_compose: dict[str, Any], - ec2_boot_specs: EC2InstanceBootSpecific, ): additional_custom_tags = { AWSTagKey("pytest-tag-key"): AWSTagValue("pytest-tag-value") } - startup_script = create_startup_script( + deploy_script = create_deploy_cluster_stack_script( app_settings, cluster_machines_name_prefix=cluster_machines_name_prefix, - ec2_boot_specific=ec2_boot_specs, additional_custom_tags=additional_custom_tags, ) + deploy_script_size_in_bytes = len(deploy_script.encode("utf-8")) + assert deploy_script_size_in_bytes < 64000, ( + f"script size is {deploy_script_size_in_bytes} bytes that exceeds the SSM command of 64KB. " + "TIP: split commands or reduce size." + ) + + +def test_create_startup_script_script_size_below_16kb( + disabled_rabbitmq: None, + mocked_ec2_server_envs: EnvVarsDict, + mocked_ssm_server_envs: EnvVarsDict, + mocked_redis_server: None, + app_settings: ApplicationSettings, + cluster_machines_name_prefix: str, + clusters_keeper_docker_compose: dict[str, Any], + ec2_boot_specs: EC2InstanceBootSpecific, +): + startup_script = create_startup_script( + app_settings, + ec2_boot_specific=ec2_boot_specs, + ) script_size_in_bytes = len(startup_script.encode("utf-8")) print( @@ -184,13 +229,13 @@ def test_create_startup_script_script_size_below_16kb( assert script_size_in_bytes < 15 * 1024 -def test_startup_script_defines_all_envs_for_docker_compose( +def test__prepare_environment_variables_defines_all_envs_for_docker_compose( disabled_rabbitmq: None, mocked_ec2_server_envs: EnvVarsDict, + mocked_ssm_server_envs: EnvVarsDict, mocked_redis_server: None, app_settings: ApplicationSettings, cluster_machines_name_prefix: str, - ec2_boot_specs: EC2InstanceBootSpecific, clusters_keeper_docker_compose_file: Path, ): additional_custom_tags = { @@ -202,8 +247,8 @@ def test_startup_script_defines_all_envs_for_docker_compose( additional_custom_tags=additional_custom_tags, ) assert environment_variables - process = subprocess.run( - [ # noqa: S603, S607 + process = subprocess.run( # noqa: S603 + [ # noqa: S607 "docker", "compose", "--dry-run", diff --git a/services/clusters-keeper/tests/unit/test_utils_ec2.py b/services/clusters-keeper/tests/unit/test_utils_ec2.py index cc466d113ac..125670475db 100644 --- a/services/clusters-keeper/tests/unit/test_utils_ec2.py +++ b/services/clusters-keeper/tests/unit/test_utils_ec2.py @@ -25,6 +25,7 @@ def wallet_id(faker: Faker) -> WalletID: def test_get_cluster_name( disabled_rabbitmq: None, disabled_ec2: None, + disabled_ssm: None, mocked_redis_server: None, app_settings: ApplicationSettings, user_id: UserID, @@ -46,9 +47,21 @@ def test_get_cluster_name( == f"{app_settings.CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX}osparc-computational-cluster-worker-{app_settings.SWARM_STACK_NAME}-user_id:{user_id}-wallet_id:{wallet_id}" ) + assert ( + get_cluster_name(app_settings, user_id=user_id, wallet_id=None, is_manager=True) + == f"{app_settings.CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX}osparc-computational-cluster-manager-{app_settings.SWARM_STACK_NAME}-user_id:{user_id}-wallet_id:None" + ) + assert ( + get_cluster_name( + app_settings, user_id=user_id, wallet_id=None, is_manager=False + ) + == f"{app_settings.CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX}osparc-computational-cluster-worker-{app_settings.SWARM_STACK_NAME}-user_id:{user_id}-wallet_id:None" + ) + def test_creation_ec2_tags( mocked_ec2_server_envs: EnvVarsDict, + mocked_ssm_server_envs: EnvVarsDict, disabled_rabbitmq: None, mocked_redis_server: None, app_settings: ApplicationSettings, @@ -78,6 +91,7 @@ def test_creation_ec2_tags( def test_all_created_ec2_instances_filter( mocked_ec2_server_envs: EnvVarsDict, + mocked_ssm_server_envs: EnvVarsDict, disabled_rabbitmq: None, mocked_redis_server: None, app_settings: ApplicationSettings, diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 60ce2c26b17..af73de611b4 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -189,6 +189,11 @@ services: CLUSTERS_KEEPER_EC2_ENDPOINT: ${CLUSTERS_KEEPER_EC2_ENDPOINT} CLUSTERS_KEEPER_EC2_REGION_NAME: ${CLUSTERS_KEEPER_EC2_REGION_NAME} CLUSTERS_KEEPER_EC2_SECRET_ACCESS_KEY: ${CLUSTERS_KEEPER_EC2_SECRET_ACCESS_KEY} + CLUSTERS_KEEPER_SSM_ACCESS: ${CLUSTERS_KEEPER_SSM_ACCESS} + CLUSTERS_KEEPER_SSM_ACCESS_KEY_ID: ${CLUSTERS_KEEPER_SSM_ACCESS_KEY_ID} + CLUSTERS_KEEPER_SSM_ENDPOINT: ${CLUSTERS_KEEPER_SSM_ENDPOINT} + CLUSTERS_KEEPER_SSM_REGION_NAME: ${CLUSTERS_KEEPER_SSM_REGION_NAME} + CLUSTERS_KEEPER_SSM_SECRET_ACCESS_KEY: ${CLUSTERS_KEEPER_SSM_SECRET_ACCESS_KEY} CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX: ${CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES: ${CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES} @@ -204,6 +209,8 @@ services: PRIMARY_EC2_INSTANCES_SSM_TLS_DASK_KEY: ${PRIMARY_EC2_INSTANCES_SSM_TLS_DASK_KEY} PRIMARY_EC2_INSTANCES_PROMETHEUS_USERNAME: ${PRIMARY_EC2_INSTANCES_PROMETHEUS_USERNAME} PRIMARY_EC2_INSTANCES_PROMETHEUS_PASSWORD: ${PRIMARY_EC2_INSTANCES_PROMETHEUS_PASSWORD} + PRIMARY_EC2_INSTANCES_MAX_START_TIME: ${PRIMARY_EC2_INSTANCES_MAX_START_TIME} + PRIMARY_EC2_INSTANCES_DOCKER_DEFAULT_ADDRESS_POOL: ${PRIMARY_EC2_INSTANCES_DOCKER_DEFAULT_ADDRESS_POOL} RABBIT_HOST: ${RABBIT_HOST} RABBIT_PASSWORD: ${RABBIT_PASSWORD} RABBIT_PORT: ${RABBIT_PORT} From 9ebe05b091a0fd8904c196a1e6f9a65574460dbc Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Fri, 20 Sep 2024 18:27:32 +0200 Subject: [PATCH 009/104] =?UTF-8?q?=E2=99=BB=EF=B8=8FCI:=20autoscaling=20m?= =?UTF-8?q?ypy=20takes=20longer=20and=20longer.=20(#6417)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/ci-testing-deploy.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index 76c478addd9..89b0620085d 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -590,7 +590,7 @@ jobs: unit-test-autoscaling: needs: changes if: ${{ needs.changes.outputs.autoscaling == 'true' || github.event_name == 'push' }} - timeout-minutes: 19 # if this timeout gets too small, then split the tests + timeout-minutes: 22 # temporary: mypy takes a huge amount of time to run here, maybe we should cache it name: "[unit] autoscaling" runs-on: ${{ matrix.os }} strategy: From 060c9cb2b6d229c6fab2c77ec7963e4f31384169 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Sat, 21 Sep 2024 10:32:44 +0200 Subject: [PATCH 010/104] =?UTF-8?q?=20=F0=9F=90=9B=20Improved=20Error=20Ha?= =?UTF-8?q?ndling=20for=20Missing=20=20Billing=20Details=20(#6418)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/models_library/users.py | 2 +- .../payments/_onetime_api.py | 2 ++ .../simcore_service_webserver/users/_db.py | 8 ++++-- .../users/exceptions.py | 5 ++++ .../wallets/_constants.py | 5 ++++ .../wallets/_handlers.py | 26 +++++++++++++++++-- .../03/wallets/payments/test_payments.py | 26 +++++++++++++++++++ 7 files changed, 69 insertions(+), 5 deletions(-) diff --git a/packages/models-library/src/models_library/users.py b/packages/models-library/src/models_library/users.py index 31ca948a1b8..a28add967a6 100644 --- a/packages/models-library/src/models_library/users.py +++ b/packages/models-library/src/models_library/users.py @@ -22,7 +22,7 @@ class UserBillingDetails(BaseModel): address: str | None city: str | None state: str | None = Field(description="State, province, canton, ...") - country: str + country: str # Required for taxes postal_code: str | None phone: str | None diff --git a/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py b/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py index f1c9f9df733..f54f48403bb 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py +++ b/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py @@ -279,6 +279,7 @@ async def init_creation_of_wallet_payment( Raises: UserNotFoundError WalletAccessForbiddenError + BillingDetailsNotFoundError """ # wallet: check permissions @@ -293,6 +294,7 @@ async def init_creation_of_wallet_payment( # user info user = await get_user_display_and_id_names(app, user_id=user_id) user_invoice_address = await get_user_invoice_address(app, user_id=user_id) + # stripe info product_stripe_info = await get_product_stripe_info(app, product_name=product_name) diff --git a/services/web/server/src/simcore_service_webserver/users/_db.py b/services/web/server/src/simcore_service_webserver/users/_db.py index 100575cd522..f7d8769f963 100644 --- a/services/web/server/src/simcore_service_webserver/users/_db.py +++ b/services/web/server/src/simcore_service_webserver/users/_db.py @@ -21,6 +21,7 @@ from ..db.models import user_to_groups from ..db.plugin import get_database_engine +from .exceptions import BillingDetailsNotFoundError from .schemas import Permission _ALL = None @@ -203,9 +204,12 @@ async def new_user_details( async def get_user_billing_details( engine: Engine, user_id: UserID ) -> UserBillingDetails: + """ + Raises: + BillingDetailsNotFoundError + """ async with engine.acquire() as conn: user_billing_details = await UsersRepo.get_billing_details(conn, user_id) if not user_billing_details: - msg = f"Missing biling details for user {user_id}" - raise ValueError(msg) + raise BillingDetailsNotFoundError(user_id=user_id) return UserBillingDetails.from_orm(user_billing_details) diff --git a/services/web/server/src/simcore_service_webserver/users/exceptions.py b/services/web/server/src/simcore_service_webserver/users/exceptions.py index 08e1432ece0..13b14ee0240 100644 --- a/services/web/server/src/simcore_service_webserver/users/exceptions.py +++ b/services/web/server/src/simcore_service_webserver/users/exceptions.py @@ -45,3 +45,8 @@ class AlreadyPreRegisteredError(UsersBaseError): msg_template = ( "Found {num_found} matches for '{email}'. Cannot pre-register existing user" ) + + +class BillingDetailsNotFoundError(UsersBaseError): + # NOTE: this is for internal log and should not be transmitted to the final user + msg_template = "Billing details are missing for user_id={user_id}. TIP: Check whether this user is pre-registered" diff --git a/services/web/server/src/simcore_service_webserver/wallets/_constants.py b/services/web/server/src/simcore_service_webserver/wallets/_constants.py index a8354070f93..eab6335e3df 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_constants.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_constants.py @@ -3,3 +3,8 @@ MSG_PRICE_NOT_DEFINED_ERROR: Final[ str ] = "No payments are accepted until this product has a price" + +MSG_BILLING_DETAILS_NOT_DEFINED_ERROR: Final[str] = ( + "Payments cannot be processed: Required billing details (e.g. country for tax) are missing from your account." + "Please contact support to resolve this configuration issue." +) diff --git a/services/web/server/src/simcore_service_webserver/wallets/_handlers.py b/services/web/server/src/simcore_service_webserver/wallets/_handlers.py index 0b43bbea59a..e7c67919f10 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_handlers.py @@ -18,6 +18,8 @@ parse_request_path_parameters_as, ) from servicelib.aiohttp.typing_extension import Handler +from servicelib.error_codes import create_error_code +from servicelib.logging_utils import LogExtra, get_log_record_extra from servicelib.request_keys import RQT_USERID_KEY from .._constants import RQ_PRODUCT_KEY @@ -36,10 +38,16 @@ ) from ..products.errors import BelowMinimumPaymentError, ProductPriceNotDefinedError from ..security.decorators import permission_required -from ..users.exceptions import UserDefaultWalletNotFoundError +from ..users.exceptions import ( + BillingDetailsNotFoundError, + UserDefaultWalletNotFoundError, +) from ..utils_aiohttp import envelope_json_response from . import _api -from ._constants import MSG_PRICE_NOT_DEFINED_ERROR +from ._constants import ( + MSG_BILLING_DETAILS_NOT_DEFINED_ERROR, + MSG_PRICE_NOT_DEFINED_ERROR, +) from .errors import WalletAccessForbiddenError, WalletNotFoundError _logger = logging.getLogger(__name__) @@ -80,6 +88,20 @@ async def wrapper(request: web.Request) -> web.StreamResponse: except ProductPriceNotDefinedError as exc: raise web.HTTPConflict(reason=MSG_PRICE_NOT_DEFINED_ERROR) from exc + except BillingDetailsNotFoundError as exc: + error_code = create_error_code(exc) + log_extra: LogExtra = {} + if user_id := getattr(exc, "user_id", None): + log_extra = get_log_record_extra(user_id=user_id) or {} + + log_msg = f"{exc} [{error_code}]" + _logger.exception( + log_msg, + extra={"error_code": error_code, **log_extra}, + ) + user_msg = f"{MSG_BILLING_DETAILS_NOT_DEFINED_ERROR} ({error_code})" + raise web.HTTPServiceUnavailable(reason=user_msg) from exc + return wrapper diff --git a/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments.py b/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments.py index ed8b2868481..f6519735ed1 100644 --- a/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments.py +++ b/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments.py @@ -33,6 +33,9 @@ PaymentsSettings, get_plugin_settings, ) +from simcore_service_webserver.wallets._constants import ( + MSG_BILLING_DETAILS_NOT_DEFINED_ERROR, +) OpenApiDict: TypeAlias = dict[str, Any] @@ -312,6 +315,29 @@ async def test_complete_payment_errors( send_message.assert_called_once() +async def test_billing_info_missing_error( + latest_osparc_price: Decimal, + client: TestClient, + logged_user_wallet: WalletGet, +): + # NOTE: setup_user_pre_registration_details_db is not setup to emulate missing pre-registration + + assert client.app + wallet = logged_user_wallet + + # Pay + response = await client.post( + f"/v0/wallets/{wallet.wallet_id}/payments", json={"priceDollars": 25} + ) + data, error = await assert_status(response, status.HTTP_503_SERVICE_UNAVAILABLE) + + assert not data + assert MSG_BILLING_DETAILS_NOT_DEFINED_ERROR in error["message"] + + assert response.reason + assert MSG_BILLING_DETAILS_NOT_DEFINED_ERROR in response.reason + + async def test_payment_not_found( latest_osparc_price: Decimal, client: TestClient, From ffba5aed27544fd49856c5d6033e29d781836b33 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 23 Sep 2024 08:46:01 +0200 Subject: [PATCH 011/104] =?UTF-8?q?=F0=9F=8E=A8=20Enhances=20Product=20par?= =?UTF-8?q?sing=20to=20strip=20whitespaces=20in=20host=5Fregex=20(#6419)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../products/_model.py | 16 +++++++++++++--- .../unit/isolated/test_products_model.py | 19 +++++++++++++++++++ 2 files changed, 32 insertions(+), 3 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/products/_model.py b/services/web/server/src/simcore_service_webserver/products/_model.py index de3955652a3..cccd7707008 100644 --- a/services/web/server/src/simcore_service_webserver/products/_model.py +++ b/services/web/server/src/simcore_service_webserver/products/_model.py @@ -111,7 +111,7 @@ class Product(BaseModel): @validator("*", pre=True) @classmethod - def parse_empty_string_as_null(cls, v): + def _parse_empty_string_as_null(cls, v): """Safe measure: database entries are sometimes left blank instead of null""" if isinstance(v, str) and len(v.strip()) == 0: return None @@ -119,12 +119,21 @@ def parse_empty_string_as_null(cls, v): @validator("name", pre=True, always=True) @classmethod - def validate_name(cls, v): + def _validate_name(cls, v): if v not in FRONTEND_APPS_AVAILABLE: msg = f"{v} is not in available front-end apps {FRONTEND_APPS_AVAILABLE}" raise ValueError(msg) return v + @validator("host_regex", pre=True) + @classmethod + def _strip_whitespaces(cls, v): + if v and isinstance(v, str): + # Prevents unintended leading & trailing spaces when added + # manually in the database + return v.strip() + return v + @property def twilio_alpha_numeric_sender_id(self) -> str: return self.short_name or self.display_name.replace(string.punctuation, "")[:11] @@ -132,9 +141,10 @@ def twilio_alpha_numeric_sender_id(self) -> str: class Config: alias_generator = snake_to_camel # to export allow_population_by_field_name = True + anystr_strip_whitespace = True + extra = Extra.ignore frozen = True # read-only orm_mode = True - extra = Extra.ignore schema_extra: ClassVar[dict[str, Any]] = { "examples": [ { diff --git a/services/web/server/tests/unit/isolated/test_products_model.py b/services/web/server/tests/unit/isolated/test_products_model.py index b3ee823a37e..84fa67d94eb 100644 --- a/services/web/server/tests/unit/isolated/test_products_model.py +++ b/services/web/server/tests/unit/isolated/test_products_model.py @@ -76,3 +76,22 @@ def test_product_to_static(): ], "isPaymentEnabled": False, } + + +def test_product_host_regex_with_spaces(): + data = Product.Config.schema_extra["examples"][2] + + # with leading and trailing spaces and uppercase (tests anystr_strip_whitespace ) + data["support_email"] = " fOO@BaR.COM " + + # with leading trailing spaces (tests validator("host_regex", pre=True)) + expected = r"([\.-]{0,1}osparc[\.-])".strip() + data["host_regex"] = expected + " " + + # parsing should strip all whitespaces and normalize email + product = Product.parse_obj(data) + + assert product.host_regex.pattern == expected + assert product.host_regex.search("osparc.bar.com") + + assert product.support_email == "foo@bar.com" From 2e287ab9c023673604a63a7dd86a3f74eb1e43ce Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Mon, 23 Sep 2024 09:51:40 +0200 Subject: [PATCH 012/104] =?UTF-8?q?=F0=9F=90=9B=20[Frontend]=20Fix:=20Shar?= =?UTF-8?q?e=20and=20Leave=20Study=20(#6401)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../client/source/class/osparc/data/Roles.js | 1 + .../class/osparc/share/Collaborators.js | 35 +++++++++++++++---- .../class/osparc/share/CollaboratorsStudy.js | 2 +- 3 files changed, 30 insertions(+), 8 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/data/Roles.js b/services/static-webserver/client/source/class/osparc/data/Roles.js index ccb66ffb7d7..6320c5a7003 100644 --- a/services/static-webserver/client/source/class/osparc/data/Roles.js +++ b/services/static-webserver/client/source/class/osparc/data/Roles.js @@ -176,6 +176,7 @@ qx.Class.define("osparc.data.Roles", { if (showWording) { const rolesText = new qx.ui.basic.Label(qx.locale.Manager.tr("Roles")).set({ + alignY: "middle", font: "text-13" }); rolesLayout.add(rolesText); diff --git a/services/static-webserver/client/source/class/osparc/share/Collaborators.js b/services/static-webserver/client/source/class/osparc/share/Collaborators.js index 9c282f858ed..6bde9a32e86 100644 --- a/services/static-webserver/client/source/class/osparc/share/Collaborators.js +++ b/services/static-webserver/client/source/class/osparc/share/Collaborators.js @@ -189,6 +189,25 @@ qx.Class.define("osparc.share.Collaborators", { return control || this.base(arguments, id); }, + __canIShare: function() { + if (this._resourceType === "study" && this._serializedDataCopy["workspaceId"]) { + // Access Rights are set at workspace level + return false; + } + let canIShare = false; + switch (this._resourceType) { + case "study": + case "template": + case "service": + canIShare = osparc.service.Utils.canIWrite(this._serializedDataCopy["accessRights"]); + break; + case "workspace": + canIShare = osparc.share.CollaboratorsWorkspace.canIDelete(this._serializedDataCopy["myAccessRights"]); + break; + } + return canIShare; + }, + __canIChangePermissions: function() { if (this._resourceType === "study" && this._serializedDataCopy["workspaceId"]) { // Access Rights are set at workspace level @@ -227,7 +246,7 @@ qx.Class.define("osparc.share.Collaborators", { }, __buildLayout: function() { - if (this.__canIChangePermissions()) { + if (this.__canIShare()) { this.__addCollaborators = this._createChildControlImpl("add-collaborator"); } this._createChildControlImpl("collaborators-list"); @@ -330,14 +349,14 @@ qx.Class.define("osparc.share.Collaborators", { }, __getLeaveStudyButton: function() { + const myGid = osparc.auth.Data.getInstance().getGroupId(); if ( (this._resourceType === "study") && - // check the study is shared - (Object.keys(this._serializedDataCopy["accessRights"]).length > 1) && + // check if I'm part of the access rights (not through an organization) + Object.keys(this._serializedDataCopy["accessRights"]).includes(myGid.toString()) && // check also user is not "prjOwner". Backend will silently not let the frontend remove that user. (this._serializedDataCopy["prjOwner"] !== osparc.auth.Data.getInstance().getEmail()) ) { - const myGid = osparc.auth.Data.getInstance().getGroupId(); const leaveButton = new qx.ui.form.Button(this.tr("Leave") + " " + osparc.product.Utils.getStudyAlias({ firstUpperCase: true })).set({ @@ -345,7 +364,7 @@ qx.Class.define("osparc.share.Collaborators", { visibility: Object.keys(this._serializedDataCopy["accessRights"]).includes(myGid.toString()) ? "visible" : "excluded" }); leaveButton.addListener("execute", () => { - let msg = this._serializedDataCopy["name"] + " " + this.tr("will no longer be listed."); + let msg = `"${this._serializedDataCopy["name"]}" ` + this.tr("will no longer be listed."); if (!osparc.share.CollaboratorsStudy.checkRemoveCollaborator(this._serializedDataCopy, myGid)) { msg += "
"; msg += this.tr("If you remove yourself, there won't be any other Owners."); @@ -357,8 +376,10 @@ qx.Class.define("osparc.share.Collaborators", { win.open(); win.addListener("close", () => { if (win.getConfirmed()) { - this._deleteMember({gid: myGid}); - qx.event.message.Bus.dispatchByName("reloadStudies"); + this._deleteMember({gid: myGid}) + .then(() => { + qx.event.message.Bus.dispatchByName("reloadStudies"); + }); } }, this); }, this); diff --git a/services/static-webserver/client/source/class/osparc/share/CollaboratorsStudy.js b/services/static-webserver/client/source/class/osparc/share/CollaboratorsStudy.js index 0326dafb5f8..80ecbe75006 100644 --- a/services/static-webserver/client/source/class/osparc/share/CollaboratorsStudy.js +++ b/services/static-webserver/client/source/class/osparc/share/CollaboratorsStudy.js @@ -152,7 +152,7 @@ qx.Class.define("osparc.share.CollaboratorsStudy", { item.setEnabled(false); } - osparc.info.StudyUtils.removeCollaborator(this._serializedDataCopy, collaborator["gid"]) + return osparc.info.StudyUtils.removeCollaborator(this._serializedDataCopy, collaborator["gid"]) .then(() => { this.fireDataEvent("updateAccessRights", this._serializedDataCopy); osparc.FlashMessenger.getInstance().logAs(this.tr("Member successfully removed")); From a876819262a8529fa28c74bb8878be7abc006388 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Mon, 23 Sep 2024 11:47:52 +0200 Subject: [PATCH 013/104] =?UTF-8?q?=E2=9C=A8=20[Frontend]=20Workspaces=20&?= =?UTF-8?q?=20Folders:=20Merge=20Workspace=20header=20and=20Folder=20bread?= =?UTF-8?q?crumbs=20(#6414)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ...ntainerHeader.js => ContextBreadcrumbs.js} | 66 +++++++------------ .../osparc/dashboard/ResourceBrowserBase.js | 39 +++-------- .../dashboard/ResourceContainerManager.js | 2 +- .../class/osparc/dashboard/ServiceBrowser.js | 1 + .../class/osparc/dashboard/StudyBrowser.js | 31 +++++---- .../class/osparc/dashboard/TemplateBrowser.js | 1 + .../class/osparc/dashboard/WorkspaceHeader.js | 25 ++++++- .../class/osparc/node/TierSelectionView.js | 6 +- 8 files changed, 79 insertions(+), 92 deletions(-) rename services/static-webserver/client/source/class/osparc/dashboard/{ContainerHeader.js => ContextBreadcrumbs.js} (67%) diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ContainerHeader.js b/services/static-webserver/client/source/class/osparc/dashboard/ContextBreadcrumbs.js similarity index 67% rename from services/static-webserver/client/source/class/osparc/dashboard/ContainerHeader.js rename to services/static-webserver/client/source/class/osparc/dashboard/ContextBreadcrumbs.js index f79e5839586..9dc67a568b1 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ContainerHeader.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ContextBreadcrumbs.js @@ -15,59 +15,38 @@ ************************************************************************ */ -/** - * Widget used for displaying a New Folder in the Study Browser - * - */ - -qx.Class.define("osparc.dashboard.ContainerHeader", { +qx.Class.define("osparc.dashboard.ContextBreadcrumbs", { extend: qx.ui.core.Widget, construct: function() { this.base(arguments); - this._setLayout(new qx.ui.layout.HBox(20).set({ + this._setLayout(new qx.ui.layout.HBox(5).set({ alignY: "middle" })); }, - events: { - "changeContext": "qx.event.type.Data", - }, - properties: { currentWorkspaceId: { check: "Number", nullable: true, init: null, - apply: "__buildBreadcrumbs" + event: "changeCurrentWorkspaceId", + apply: "__rebuild" }, currentFolderId: { check: "Number", nullable: true, init: null, - apply: "__buildBreadcrumbs" + event: "changeCurrentFolderId", + apply: "__rebuild" } }, members: { - _createChildControlImpl: function(id) { - let control; - switch (id) { - case "breadcrumbs-layout": - control = new qx.ui.container.Composite(new qx.ui.layout.HBox(5).set({ - alignY: "middle" - })); - this._addAt(control, 0, {flex: 1}); - break; - } - return control || this.base(arguments, id); - }, - - __buildBreadcrumbs: function() { - const breadcrumbsLayout = this.getChildControl("breadcrumbs-layout"); - breadcrumbsLayout.removeAll(); + __rebuild: function() { + this._removeAll(); if (this.getCurrentFolderId()) { const currentFolder = osparc.store.Folders.getInstance().getFolder(this.getCurrentFolderId()); @@ -76,23 +55,22 @@ qx.Class.define("osparc.dashboard.ContainerHeader", { const currentFolderButton = this.__createCurrentFolderButton(); if (currentFolderButton) { - breadcrumbsLayout.add(currentFolderButton); + this._add(currentFolderButton); } }, __createUpstreamButtons: function(childFolder) { if (childFolder) { - const breadcrumbsLayout = this.getChildControl("breadcrumbs-layout"); const parentFolder = osparc.store.Folders.getInstance().getFolder(childFolder.getParentFolderId()); if (parentFolder) { - breadcrumbsLayout.addAt(this.__createArrow(), 0); + this._addAt(this.__createArrow(), 0); const upstreamButton = this.__createFolderButton(parentFolder); - breadcrumbsLayout.addAt(upstreamButton, 0); + this._addAt(upstreamButton, 0); this.__createUpstreamButtons(parentFolder); } else { - breadcrumbsLayout.addAt(this.__createArrow(), 0); + this._addAt(this.__createArrow(), 0); const homeButton = this.__createFolderButton(); - breadcrumbsLayout.addAt(homeButton, 0); + this._addAt(homeButton, 0); } } }, @@ -102,15 +80,12 @@ qx.Class.define("osparc.dashboard.ContainerHeader", { return this.__createFolderButton(currentFolder); }, - __changeContext: function(workspaceId, folderId) { + __changeFolder: function(folderId) { + const workspaceId = this.getCurrentWorkspaceId(); this.set({ currentWorkspaceId: workspaceId, currentFolderId: folderId, }); - this.fireDataEvent("changeContext", { - workspaceId, - folderId, - }); }, __createRootButton: function() { @@ -131,7 +106,7 @@ qx.Class.define("osparc.dashboard.ContainerHeader", { } rootButton.addListener("execute", () => { const folderId = null; - this.__changeContext(workspaceId, folderId); + this.__changeFolder(folderId); }); return rootButton; }, @@ -139,17 +114,20 @@ qx.Class.define("osparc.dashboard.ContainerHeader", { __createFolderButton: function(folder) { let folderButton = null; if (folder) { - folderButton = new qx.ui.form.Button(folder.getName(), "@FontAwesome5Solid/folder/14").set({ + folderButton = new qx.ui.form.Button(folder.getName()).set({ maxWidth: 200 }); folder.bind("name", folderButton, "label"); folderButton.addListener("execute", () => { - const workspaceId = this.getCurrentWorkspaceId(); const folderId = folder ? folder.getFolderId() : null; - this.__changeContext(workspaceId, folderId); + this.__changeFolder(folderId); }, this); } else { folderButton = this.__createRootButton(); + // Do not show root folder + folderButton.set({ + visibility: "excluded" + }); } folderButton.set({ backgroundColor: "transparent", diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js index a52dc18a38a..f88008d773f 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js @@ -230,10 +230,16 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { throw new Error("Abstract method called!"); }, - _createResourcesLayout: function() { - const topBar = this.__createTopBar(); - this._addToLayout(topBar); + _createSearchBar: function() { + const searchBarFilter = this._searchBarFilter = new osparc.dashboard.SearchBarFilter(this._resourceType).set({ + marginRight: 22 + }); + const textField = searchBarFilter.getChildControl("text-field"); + osparc.utils.Utils.setIdToWidget(textField, "searchBarFilter-textField-"+this._resourceType); + this._addToLayout(searchBarFilter); + }, + _createResourcesLayout: function() { const toolbar = this._toolbar = new qx.ui.toolbar.ToolBar().set({ backgroundColor: "transparent", spacing: 10, @@ -268,34 +274,9 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { resourcesContainer.addListener("workspaceUpdated", e => this._workspaceUpdated(e.getData())); resourcesContainer.addListener("deleteWorkspaceRequested", e => this._deleteWorkspaceRequested(e.getData())); - const containerHeader = this._resourcesContainer.getContainerHeader(); - containerHeader.addListener("changeContext", e => { - const { - workspaceId, - folderId, - } = e.getData(); - this._resourceFilter.contextChanged(workspaceId, folderId); - }, this); - this._addToLayout(resourcesContainer); }, - __createTopBar: function() { - const topBar = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)).set({ - paddingRight: 22, - alignY: "middle" - }); - - const searchBarFilter = this._searchBarFilter = new osparc.dashboard.SearchBarFilter(this._resourceType); - const textField = searchBarFilter.getChildControl("text-field"); - osparc.utils.Utils.setIdToWidget(textField, "searchBarFilter-textField-"+this._resourceType); - topBar.add(searchBarFilter, { - flex: 1 - }); - - return topBar; - }, - _groupByChanged: function(groupBy) { // if cards are grouped they need to be in grid mode this._resourcesContainer.setMode("grid"); @@ -373,7 +354,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { _addResourceFilter: function() { const resourceFilter = this._resourceFilter = new osparc.dashboard.ResourceFilter(this._resourceType).set({ - marginTop: osparc.dashboard.SearchBarFilter.HEIGHT, + marginTop: osparc.dashboard.SearchBarFilter.HEIGHT + 10, maxWidth: this.self().SIDE_SPACER_WIDTH, width: this.self().SIDE_SPACER_WIDTH }); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js index 592d771f262..994bb1fae43 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js @@ -32,7 +32,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { this.__resourcesList = []; this.__groupedContainersList = []; - const containerHeader = this.__containerHeader = new osparc.dashboard.ContainerHeader(); + const containerHeader = this.__containerHeader = new osparc.dashboard.ContextBreadcrumbs(); this._add(containerHeader); containerHeader.setVisibility(osparc.utils.DisabledPlugins.isFoldersEnabled() ? "visible" : "excluded"); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ServiceBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/ServiceBrowser.js index 81583c9b24f..22860e3704b 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ServiceBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ServiceBrowser.js @@ -145,6 +145,7 @@ qx.Class.define("osparc.dashboard.ServiceBrowser", { // LAYOUT // _createLayout: function() { + this._createSearchBar(); this._createResourcesLayout(); const list = this._resourcesContainer.getFlatList(); if (list) { diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js index 97c5638abd2..a5a3ddad248 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js @@ -872,30 +872,29 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { // LAYOUT // _createLayout: function() { + this._createSearchBar(); + if (osparc.utils.DisabledPlugins.isFoldersEnabled()) { const workspaceHeader = new osparc.dashboard.WorkspaceHeader(); this.bind("currentWorkspaceId", workspaceHeader, "currentWorkspaceId"); + this.bind("currentFolderId", workspaceHeader, "currentFolderId"); + [ + "changeCurrentWorkspaceId", + "changeCurrentFolderId", + ].forEach(ev => { + workspaceHeader.addListener(ev, () => { + const workspaceId = workspaceHeader.getCurrentWorkspaceId(); + const folderId = workspaceHeader.getCurrentFolderId(); + this._changeContext(workspaceId, folderId); + this._resourceFilter.contextChanged(workspaceId, folderId); + }, this); + }); + this._addToLayout(workspaceHeader); } this._createResourcesLayout(); - const containerHeader = this._resourcesContainer.getContainerHeader(); - if (containerHeader) { - this.bind("currentWorkspaceId", containerHeader, "currentWorkspaceId"); - this.bind("currentFolderId", containerHeader, "currentFolderId"); - containerHeader.addListener("changeContext", e => { - const { - workspaceId, - folderId, - } = e.getData(); - this.set({ - currentWorkspaceId: workspaceId, - currentFolderId: folderId, - }) - this._changeContext(workspaceId, folderId); - }); - } const list = this._resourcesContainer.getFlatList(); if (list) { osparc.utils.Utils.setIdToWidget(list, "studiesList"); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js index 6a5e516ce9e..2f824715ea7 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js @@ -162,6 +162,7 @@ qx.Class.define("osparc.dashboard.TemplateBrowser", { // LAYOUT // _createLayout: function() { + this._createSearchBar(); this._createResourcesLayout(); const list = this._resourcesContainer.getFlatList(); if (list) { diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceHeader.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceHeader.js index b4a567f5c48..88d557312e8 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceHeader.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceHeader.js @@ -52,6 +52,13 @@ qx.Class.define("osparc.dashboard.WorkspaceHeader", { apply: "__buildLayout" }, + currentFolderId: { + check: "Number", + nullable: true, + init: null, + event: "changeCurrentFolderId", + }, + accessRights: { check: "Object", nullable: false, @@ -98,6 +105,14 @@ qx.Class.define("osparc.dashboard.WorkspaceHeader", { }); this._add(control); break; + case "breadcrumbs": + control = new osparc.dashboard.ContextBreadcrumbs(); + this.bind("currentWorkspaceId", control, "currentWorkspaceId"); + this.bind("currentFolderId", control, "currentFolderId"); + control.bind("currentWorkspaceId", this, "currentWorkspaceId"); + control.bind("currentFolderId", this, "currentFolderId"); + this._add(control); + break; case "edit-button": control = new qx.ui.form.MenuButton().set({ appearance: "form-button-outlined", @@ -158,7 +173,15 @@ qx.Class.define("osparc.dashboard.WorkspaceHeader", { __buildLayout: function(workspaceId) { this.getChildControl("icon"); - const title = this.getChildControl("title"); + const title = this.getChildControl("title").set({ + cursor: "pointer" + }); + title.addListener("tap", () => { + const folderId = null; + this.setCurrentFolderId(folderId); + }); + + this.getChildControl("breadcrumbs"); this.getChildControl("edit-button").exclude(); this.resetAccessRights(); diff --git a/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js b/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js index d2e7e013f13..34dfc397b37 100644 --- a/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js +++ b/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js @@ -72,7 +72,11 @@ qx.Class.define("osparc.node.TierSelectionView", { .then(preselectedPricingUnit => { if (preselectedPricingUnit && preselectedPricingUnit["pricingUnitId"]) { const tierFound = tierBox.getSelectables().find(t => t.getModel() === preselectedPricingUnit["pricingUnitId"]); - tierBox.setSelection([tierFound]); + if (tierFound) { + tierBox.setSelection([tierFound]); + } else { + console.error("Tier not found"); + } } }) .finally(() => { From fcc7b78df3b2cbe9233b0bfa15c97909c117a6eb Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Mon, 23 Sep 2024 12:19:16 +0200 Subject: [PATCH 014/104] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20Access=20Fu?= =?UTF-8?q?ll=20TIP=20(#6423)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../class/osparc/dashboard/StudyBrowser.js | 3 +-- .../class/osparc/navigation/NavigationBar.js | 19 ++++++--------- .../class/osparc/navigation/UserMenu.js | 24 ++++++++++++++----- .../source/class/osparc/product/TIPTeaser.js | 2 +- 4 files changed, 27 insertions(+), 21 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js index a5a3ddad248..22dc8ffa842 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js @@ -794,8 +794,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { if (product in newStudiesData) { const mode = this._resourcesContainer.getMode(); const title = this.tr("New Plan"); - const desc = this.tr("Choose Plan in pop-up"); - const newStudyBtn = (mode === "grid") ? new osparc.dashboard.GridButtonNew(title, desc) : new osparc.dashboard.ListButtonNew(title, desc); + const newStudyBtn = (mode === "grid") ? new osparc.dashboard.GridButtonNew(title) : new osparc.dashboard.ListButtonNew(title); newStudyBtn.setCardKey("new-study"); newStudyBtn.subscribeToFilterGroup("searchBarFilter"); osparc.utils.Utils.setIdToWidget(newStudyBtn, "newStudyBtn"); diff --git a/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js b/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js index 4353cd497f0..f926f95b6e0 100644 --- a/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js +++ b/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js @@ -133,9 +133,6 @@ qx.Class.define("osparc.navigation.NavigationBar", { this.getChildControl("read-only-info"); // right-items - if (osparc.product.Utils.isProduct("tiplite")) { - this.getChildControl("tip-lite-button"); - } this.getChildControl("tasks-button"); this.getChildControl("notifications-button"); this.getChildControl("expiration-icon"); @@ -186,6 +183,13 @@ qx.Class.define("osparc.navigation.NavigationBar", { width: osparc.product.Utils.isS4LProduct() ? 150 : 100, height: osparc.navigation.NavigationBar.HEIGHT }); + if (osparc.product.Utils.isProduct("tiplite")) { + control.set({ + cursor: "pointer", + toolTipText: this.tr("This is TIP.lite, a light version of TIP.
Request access to TIP.") + }); + control.addListener("tap", () => osparc.product.TIPTeaser.getInstance().open()); + } this.getChildControl("left-items").add(control); break; case "logo-powered": @@ -231,15 +235,6 @@ qx.Class.define("osparc.navigation.NavigationBar", { this.getChildControl("center-items").add(control); break; } - case "tip-lite-button": - control = new qx.ui.form.Button(this.tr("Access TIP")).set({ - marginRight: 30, - ...this.self().BUTTON_OPTIONS, - }); - osparc.utils.Utils.setIdToWidget(control, "accessTIPBtn"); - control.addListener("execute", () => osparc.product.TIPTeaser.getInstance().open()); - this.getChildControl("right-items").add(control); - break; case "credits-button": control = new osparc.desktop.credits.CreditsIndicatorButton(); this.getChildControl("right-items").add(control); diff --git a/services/static-webserver/client/source/class/osparc/navigation/UserMenu.js b/services/static-webserver/client/source/class/osparc/navigation/UserMenu.js index b226a949ae3..3c92dae0659 100644 --- a/services/static-webserver/client/source/class/osparc/navigation/UserMenu.js +++ b/services/static-webserver/client/source/class/osparc/navigation/UserMenu.js @@ -95,12 +95,6 @@ qx.Class.define("osparc.navigation.UserMenu", { control.addListener("execute", () => osparc.cluster.Utils.popUpClustersDetails(), this); this.add(control); break; - case "license": - control = new qx.ui.menu.Button(this.tr("License")); - osparc.store.Support.getLicenseURL() - .then(licenseURL => control.addListener("execute", () => window.open(licenseURL))); - this.add(control); - break; case "about": control = new qx.ui.menu.Button(this.tr("About oSPARC")); control.addListener("execute", () => osparc.About.getInstance().open()); @@ -116,6 +110,18 @@ qx.Class.define("osparc.navigation.UserMenu", { this.add(control); break; } + case "license": + control = new qx.ui.menu.Button(this.tr("License")); + osparc.store.Support.getLicenseURL() + .then(licenseURL => control.addListener("execute", () => window.open(licenseURL))); + this.add(control); + break; + case "tip-lite-button": + control = new qx.ui.menu.Button(this.tr("Access Full TIP")); + osparc.utils.Utils.setIdToWidget(control, "userMenuAccessTIPBtn"); + control.addListener("execute", () => osparc.product.TIPTeaser.getInstance().open()); + this.add(control); + break; case "log-out": { const authData = osparc.auth.Data.getInstance(); control = new qx.ui.menu.Button(authData.isGuest() ? this.tr("Exit") : this.tr("Log out")); @@ -167,6 +173,9 @@ qx.Class.define("osparc.navigation.UserMenu", { this.getChildControl("about-product"); } this.getChildControl("license"); + if (osparc.product.Utils.isProduct("tiplite")) { + this.getChildControl("tip-lite-button"); + } this.addSeparator(); this.getChildControl("log-out"); @@ -220,6 +229,9 @@ qx.Class.define("osparc.navigation.UserMenu", { this.getChildControl("about-product"); } this.getChildControl("license"); + if (osparc.product.Utils.isProduct("tiplite")) { + this.getChildControl("tip-lite-button"); + } this.addSeparator(); this.getChildControl("log-out"); diff --git a/services/static-webserver/client/source/class/osparc/product/TIPTeaser.js b/services/static-webserver/client/source/class/osparc/product/TIPTeaser.js index aac97d48eeb..d71ac819f3b 100644 --- a/services/static-webserver/client/source/class/osparc/product/TIPTeaser.js +++ b/services/static-webserver/client/source/class/osparc/product/TIPTeaser.js @@ -20,7 +20,7 @@ qx.Class.define("osparc.product.TIPTeaser", { type: "singleton", construct: function() { - this.base(arguments, this.tr("Access TIP")); + this.base(arguments, this.tr("Access Full TIP")); this.set({ layout: new qx.ui.layout.VBox(10), From 0a11c8e4852191e058f77f0f1c60f79488a6502f Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Mon, 23 Sep 2024 16:48:48 +0200 Subject: [PATCH 015/104] =?UTF-8?q?=F0=9F=90=9B=20[Frontend]=20Do=20not=20?= =?UTF-8?q?list/request=20``/frontend/``=20service's=20pricing=20plans=20(?= =?UTF-8?q?#6424)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../class/osparc/study/StudyPricingUnits.js | 34 +++---------------- 1 file changed, 5 insertions(+), 29 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js b/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js index 90d0767b249..793fee5cb34 100644 --- a/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js +++ b/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js @@ -27,7 +27,7 @@ qx.Class.define("osparc.study.StudyPricingUnits", { this.__studyData = studyData; - this.showPricingUnits(); + this.__showPricingUnits(); }, events: { @@ -38,7 +38,7 @@ qx.Class.define("osparc.study.StudyPricingUnits", { members: { __studyData: null, - showPricingUnits: function() { + __showPricingUnits: function() { const unitsLoading = () => this.fireEvent("loadingUnits"); const unitsAdded = () => this.fireEvent("unitsReady"); unitsLoading(); @@ -48,6 +48,9 @@ qx.Class.define("osparc.study.StudyPricingUnits", { const workbench = this.__studyData["workbench"]; Object.keys(workbench).forEach(nodeId => { const node = workbench[nodeId]; + if (osparc.data.model.Node.isFrontend(node)) { + return; + } const nodePricingUnits = new osparc.study.NodePricingUnits(this.__studyData["uuid"], nodeId, node); this._add(nodePricingUnits); promises.push(nodePricingUnits.showPricingUnits()); @@ -55,33 +58,6 @@ qx.Class.define("osparc.study.StudyPricingUnits", { } Promise.all(promises) .then(() => unitsAdded()); - }, - - __createPricingUnitsGroup: function(nodeLabel, pricingPlans, preselectedPricingUnit) { - if (pricingPlans && "pricingUnits" in pricingPlans && pricingPlans["pricingUnits"].length) { - const pricingUnitsLayout = osparc.study.StudyOptions.createGroupBox(nodeLabel); - - const unitButtons = new osparc.study.PricingUnits(pricingPlans["pricingUnits"], preselectedPricingUnit); - pricingUnitsLayout.add(unitButtons); - - return { - layout: pricingUnitsLayout, - unitButtons - }; - } - return null; - }, - - __pricingUnitSelected: function(nodeId, pricingPlanId, selectedPricingUnitId) { - const params = { - url: { - studyId: this.__studyData["uuid"], - nodeId, - pricingPlanId, - pricingUnitId: selectedPricingUnitId - } - }; - return osparc.data.Resources.fetch("studies", "putPricingUnit", params); } } }); From e5b96647e06fba8d616c391390415876937dcfc2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 20:05:50 +0200 Subject: [PATCH 016/104] Bump actions/setup-node from 4.0.3 to 4.0.4 (#6425) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> --- .github/workflows/ci-testing-deploy.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index 89b0620085d..5c5ec32d940 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -1288,7 +1288,7 @@ jobs: uses: docker/setup-buildx-action@v3 with: driver: docker-container - - uses: actions/setup-node@v4.0.3 + - uses: actions/setup-node@v4.0.4 with: node-version: ${{ matrix.node }} cache: "npm" @@ -2359,7 +2359,7 @@ jobs: uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} - - uses: actions/setup-node@v4.0.3 + - uses: actions/setup-node@v4.0.4 with: node-version: ${{ matrix.node }} cache: "npm" From 2129de45f310e82f9ab3209234a975aca926ecb5 Mon Sep 17 00:00:00 2001 From: Andrei Neagu <5694077+GitHK@users.noreply.github.com> Date: Tue, 24 Sep 2024 08:26:01 +0200 Subject: [PATCH 017/104] =?UTF-8?q?=F0=9F=8E=A8=20Persistent=20logs=20when?= =?UTF-8?q?=20GC=20removes=20services=20(#6403)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrei Neagu --- .../src/servicelib/logging_utils.py | 5 ++ .../tests/test_logging_utils.py | 55 +++++++++++++++++++ .../garbage_collector/_core_disconnected.py | 3 + .../garbage_collector/plugin.py | 10 +++- 4 files changed, 71 insertions(+), 2 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index bc8ba72b4c0..2e6b9960eff 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -392,3 +392,8 @@ def guess_message_log_level(message: str) -> LogLevelInt: ): return logging.WARNING return logging.INFO + + +def set_parent_module_log_level(current_module: str, desired_log_level: int) -> None: + parent_module = ".".join(current_module.split(".")[:-1]) + logging.getLogger(parent_module).setLevel(desired_log_level) diff --git a/packages/service-library/tests/test_logging_utils.py b/packages/service-library/tests/test_logging_utils.py index 024ce9966aa..abdfcd5411e 100644 --- a/packages/service-library/tests/test_logging_utils.py +++ b/packages/service-library/tests/test_logging_utils.py @@ -14,6 +14,7 @@ log_context, log_decorator, log_exceptions, + set_parent_module_log_level, ) _logger = logging.getLogger(__name__) @@ -322,3 +323,57 @@ def test_log_exceptions_and_reraise(caplog: pytest.LogCaptureFixture, level: int assert len(caplog.records) == (1 if level != logging.NOTSET else 0) assert all(r.levelno == level for r in caplog.records) + + +def test_set_parent_module_log_level_(caplog: pytest.LogCaptureFixture): + caplog.clear() + # emulates service logger + logging.root.setLevel(logging.WARNING) + + parent = logging.getLogger("parent") + child = logging.getLogger("parent.child") + + assert parent.level == logging.NOTSET + assert child.level == logging.NOTSET + + parent.debug("parent debug") + child.debug("child debug") + + parent.info("parent info") + child.info("child info") + + parent.warning("parent warning") + child.warning("child warning") + + assert "parent debug" not in caplog.text + assert "child debug" not in caplog.text + + assert "parent info" not in caplog.text + assert "child info" not in caplog.text + + assert "parent warning" in caplog.text + assert "child warning" in caplog.text + + caplog.clear() + set_parent_module_log_level("parent.child", logging.INFO) + + assert parent.level == logging.INFO + assert child.level == logging.NOTSET + + parent.debug("parent debug") + child.debug("child debug") + + parent.info("parent info") + child.info("child info") + + parent.warning("parent warning") + child.warning("child warning") + + assert "parent debug" not in caplog.text + assert "child debug" not in caplog.text + + assert "parent info" in caplog.text + assert "child info" in caplog.text + + assert "parent warning" in caplog.text + assert "child warning" in caplog.text diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_disconnected.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_disconnected.py index 72e70898b71..2acdbed9447 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_disconnected.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_disconnected.py @@ -106,6 +106,9 @@ async def remove_disconnected_user_resources( # inform that the project can be closed on the backend side # try: + _logger.info( + "Closing services for project '%s'", resource_value + ) await remove_project_dynamic_services( user_id=user_id, project_uuid=f"{resource_value}", diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py b/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py index 2b90c4d92b4..c4b62d7424d 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py @@ -2,7 +2,9 @@ from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from servicelib.logging_utils import set_parent_module_log_level +from ..application_settings import get_application_settings from ..login.plugin import setup_login_storage from ..projects.db import setup_projects_db from ..socketio.plugin import setup_socketio @@ -11,14 +13,14 @@ from ._tasks_users import create_background_task_for_trial_accounts from .settings import get_plugin_settings -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) @app_module_setup( "simcore_service_webserver.garbage_collector", ModuleCategory.ADDON, settings_name="WEBSERVER_GARBAGE_COLLECTOR", - logger=logger, + logger=_logger, ) def setup_garbage_collector(app: web.Application) -> None: # - project-api needs access to db @@ -32,6 +34,10 @@ def setup_garbage_collector(app: web.Application) -> None: app.cleanup_ctx.append(run_background_task) + set_parent_module_log_level( + _logger.name, min(logging.INFO, get_application_settings(app).log_level) + ) + # NOTE: scaling web-servers will lead to having multiple tasks upgrading the db # not a huge deal. Instead this task runs in the GC. # If more tasks of this nature are needed, we should setup some sort of registration mechanism From 84119ba57be4199f31a4886791be5c70f7356cba Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 <60785969+matusdrobuliak66@users.noreply.github.com> Date: Tue, 24 Sep 2024 09:24:25 +0200 Subject: [PATCH 018/104] =?UTF-8?q?=F0=9F=8E=A8=20The=20user=20can=20remov?= =?UTF-8?q?e=20themselves=20from=20the=20project/workspace.=20(#6415)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../projects/_groups_api.py | 17 +++++++++-------- .../workspaces/_groups_api.py | 4 ++-- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/_groups_api.py b/services/web/server/src/simcore_service_webserver/projects/_groups_api.py index db84993fdb7..2477c36ecfc 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_groups_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_groups_api.py @@ -139,19 +139,20 @@ async def delete_project_group( group_id: GroupID, product_name: ProductName, ) -> None: - await check_user_project_permission( - app, - project_id=project_id, - user_id=user_id, - product_name=product_name, - permission="delete", - ) + user: dict = await users_api.get_user(app, user_id=user_id) + if user["primary_gid"] != group_id: + await check_user_project_permission( + app, + project_id=project_id, + user_id=user_id, + product_name=product_name, + permission="delete", + ) project_db: ProjectDBAPI = app[APP_PROJECT_DBAPI] project = await project_db.get_project_db(project_id) project_owner_user: dict = await users_api.get_user(app, project.prj_owner) if project_owner_user["primary_gid"] == group_id: - user: dict = await users_api.get_user(app, user_id) if user["primary_gid"] != project_owner_user["primary_gid"]: # Only the owner of the project can delete the owner group raise ProjectInvalidRightsError( diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_groups_api.py b/services/web/server/src/simcore_service_webserver/workspaces/_groups_api.py index d58fc8e2ab7..0ec1e44618e 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_groups_api.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_groups_api.py @@ -159,15 +159,15 @@ async def delete_workspace_group( group_id: GroupID, product_name: ProductName, ) -> None: + user: dict = await users_api.get_user(app, user_id=user_id) workspace: UserWorkspaceAccessRightsDB = await workspaces_db.get_workspace_for_user( app=app, user_id=user_id, workspace_id=workspace_id, product_name=product_name ) - if workspace.my_access_rights.delete is False: + if user["primary_gid"] != group_id and workspace.my_access_rights.delete is False: raise WorkspaceAccessForbiddenError( reason=f"User does not have delete access to workspace {workspace_id}" ) if workspace.owner_primary_gid == group_id: - user: dict = await users_api.get_user(app, user_id) if user["primary_gid"] != workspace.owner_primary_gid: # Only the owner of the workspace can delete the owner group raise WorkspaceAccessForbiddenError( From aa0b57c52dc8b8893811812f17edcec99619e980 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 <60785969+matusdrobuliak66@users.noreply.github.com> Date: Tue, 24 Sep 2024 10:46:15 +0200 Subject: [PATCH 019/104] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20Maintenance:=20rem?= =?UTF-8?q?oving=20old=20folders=20(#6383)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../47ca7335e146_remove_old_folders.py | 169 ++ .../models/folders.py | 183 -- .../utils_folders.py | 1155 -------- .../tests/test_utils_folders.py | 2312 ----------------- ...handlers__clone_in_workspace_and_folder.py | 4 +- 5 files changed, 171 insertions(+), 3652 deletions(-) create mode 100644 packages/postgres-database/src/simcore_postgres_database/migration/versions/47ca7335e146_remove_old_folders.py delete mode 100644 packages/postgres-database/src/simcore_postgres_database/models/folders.py delete mode 100644 packages/postgres-database/src/simcore_postgres_database/utils_folders.py delete mode 100644 packages/postgres-database/tests/test_utils_folders.py diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/47ca7335e146_remove_old_folders.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/47ca7335e146_remove_old_folders.py new file mode 100644 index 00000000000..63fb1a29923 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/47ca7335e146_remove_old_folders.py @@ -0,0 +1,169 @@ +"""remove old folders + +Revision ID: 47ca7335e146 +Revises: 9f381dcb9b95 +Create Date: 2024-09-17 11:54:39.600025+00:00 + +""" +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "47ca7335e146" +down_revision = "9f381dcb9b95" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("folders_to_projects") + op.drop_table("folders_access_rights") + op.drop_table("folders") + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "folders", + sa.Column( + "id", + sa.BIGINT(), + server_default=sa.text("nextval('folders_id_seq'::regclass)"), + autoincrement=True, + nullable=False, + ), + sa.Column("name", sa.VARCHAR(), autoincrement=False, nullable=False), + sa.Column( + "description", + sa.VARCHAR(), + server_default=sa.text("''::character varying"), + autoincrement=False, + nullable=False, + ), + sa.Column("created_by", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column( + "created", + postgresql.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.Column( + "modified", + postgresql.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.Column("product_name", sa.VARCHAR(), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint( + ["created_by"], + ["groups.gid"], + name="fk_folders_to_groups_gid", + ondelete="SET NULL", + ), + sa.ForeignKeyConstraint( + ["product_name"], + ["products.name"], + name="fk_folders_to_products_name", + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint("id", name="folders_pkey"), + postgresql_ignore_search_path=False, + ) + op.create_table( + "folders_access_rights", + sa.Column("folder_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("gid", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column( + "traversal_parent_id", sa.BIGINT(), autoincrement=False, nullable=True + ), + sa.Column( + "original_parent_id", sa.BIGINT(), autoincrement=False, nullable=True + ), + sa.Column("read", sa.BOOLEAN(), autoincrement=False, nullable=False), + sa.Column("write", sa.BOOLEAN(), autoincrement=False, nullable=False), + sa.Column("delete", sa.BOOLEAN(), autoincrement=False, nullable=False), + sa.Column( + "created", + postgresql.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.Column( + "modified", + postgresql.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.ForeignKeyConstraint( + ["folder_id"], + ["folders.id"], + name="fk_folders_access_rights_to_folders_id", + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.ForeignKeyConstraint( + ["gid"], + ["groups.gid"], + name="fk_folders_access_rights_to_groups_gid", + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.ForeignKeyConstraint( + ["original_parent_id"], + ["folders.id"], + name="fk_folders_to_folders_id_via_original_parent_id", + ondelete="SET NULL", + ), + sa.ForeignKeyConstraint( + ["traversal_parent_id"], + ["folders.id"], + name="fk_folders_to_folders_id_via_traversal_parent_id", + ondelete="SET NULL", + ), + sa.PrimaryKeyConstraint("folder_id", "gid", name="folders_access_rights_pk"), + ) + op.create_table( + "folders_to_projects", + sa.Column("folder_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("project_uuid", sa.VARCHAR(), autoincrement=False, nullable=False), + sa.Column( + "created", + postgresql.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.Column( + "modified", + postgresql.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + autoincrement=False, + nullable=False, + ), + sa.ForeignKeyConstraint( + ["folder_id"], + ["folders.id"], + name="fk_folders_to_projects_to_folders_id", + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.ForeignKeyConstraint( + ["project_uuid"], + ["projects.uuid"], + name="fk_folders_to_projects_to_projects_uuid", + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint( + "folder_id", "project_uuid", name="projects_to_folder_pk" + ), + ) + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/models/folders.py b/packages/postgres-database/src/simcore_postgres_database/models/folders.py deleted file mode 100644 index 82f3ef1a6c4..00000000000 --- a/packages/postgres-database/src/simcore_postgres_database/models/folders.py +++ /dev/null @@ -1,183 +0,0 @@ -import sqlalchemy as sa - -from ._common import ( - column_created_datetime, - column_modified_datetime, - register_modified_datetime_auto_update_trigger, -) -from .base import metadata - -folders = sa.Table( - "folders", - metadata, - sa.Column( - "id", - sa.BigInteger, - nullable=False, - autoincrement=True, - primary_key=True, - doc="Primary key", - ), - sa.Column( - "name", - sa.String, - nullable=False, - doc="name of the folder", - ), - sa.Column( - "description", - sa.String, - nullable=False, - server_default="", - doc="user provided description for the folder", - ), - sa.Column( - "product_name", - sa.String, - sa.ForeignKey( - "products.name", - onupdate="CASCADE", - ondelete="CASCADE", - name="fk_folders_to_products_name", - ), - nullable=False, - doc="product identifier", - ), - sa.Column( - "created_by", - sa.BigInteger, - sa.ForeignKey( - "groups.gid", - name="fk_folders_to_groups_gid", - ondelete="SET NULL", - ), - nullable=True, - doc="traces who created the folder", - ), - column_created_datetime(timezone=True), - column_modified_datetime(timezone=True), -) - - -register_modified_datetime_auto_update_trigger(folders) - -folders_access_rights = sa.Table( - "folders_access_rights", - metadata, - sa.Column( - "folder_id", - sa.BigInteger, - sa.ForeignKey( - "folders.id", - name="fk_folders_access_rights_to_folders_id", - onupdate="CASCADE", - ondelete="CASCADE", - ), - ), - sa.Column( - "gid", - sa.BigInteger, - sa.ForeignKey( - "groups.gid", - name="fk_folders_access_rights_to_groups_gid", - onupdate="CASCADE", - ondelete="CASCADE", - ), - ), - sa.Column( - "traversal_parent_id", - sa.BigInteger, - sa.ForeignKey( - "folders.id", - name="fk_folders_to_folders_id_via_traversal_parent_id", - ondelete="SET NULL", - ), - doc=( - "used for listing the contes of the folders, " - "can be changed by the user by moving the folder" - ), - ), - sa.Column( - "original_parent_id", - sa.BigInteger, - sa.ForeignKey( - "folders.id", - name="fk_folders_to_folders_id_via_original_parent_id", - ondelete="SET NULL", - ), - doc=( - "initially equal the same as `traversal_parent_id`, " - "keeps track of the original parent, " - "can never be changed once insteted" - ), - ), - sa.Column( - "read", - sa.Boolean(), - nullable=False, - doc=( - "if True can: " - "view folders inside current folder " - "view projects inside current folder" - ), - ), - sa.Column( - "write", - sa.Boolean(), - nullable=False, - doc=( - "if True can: " - "create folder inside current folder, " - "add project to folder" - ), - ), - sa.Column( - "delete", - sa.Boolean(), - nullable=False, - doc=( - "if True can: " - "share folder, " - "rename folder, " - "edit folder description, " - "delete folder, " - "delete project form folder" - ), - ), - column_created_datetime(timezone=True), - column_modified_datetime(timezone=True), - sa.PrimaryKeyConstraint("folder_id", "gid", name="folders_access_rights_pk"), -) - -register_modified_datetime_auto_update_trigger(folders_access_rights) - - -folders_to_projects = sa.Table( - "folders_to_projects", - metadata, - sa.Column( - "folder_id", - sa.BigInteger, - sa.ForeignKey( - "folders.id", - name="fk_folders_to_projects_to_folders_id", - onupdate="CASCADE", - ondelete="CASCADE", - ), - ), - sa.Column( - "project_uuid", - sa.String, - sa.ForeignKey( - "projects.uuid", - name="fk_folders_to_projects_to_projects_uuid", - onupdate="CASCADE", - ondelete="CASCADE", - ), - ), - column_created_datetime(timezone=True), - column_modified_datetime(timezone=True), - sa.PrimaryKeyConstraint("folder_id", "project_uuid", name="projects_to_folder_pk"), -) - -register_modified_datetime_auto_update_trigger(folders_to_projects) diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py b/packages/postgres-database/src/simcore_postgres_database/utils_folders.py deleted file mode 100644 index e0f59cdcfd2..00000000000 --- a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py +++ /dev/null @@ -1,1155 +0,0 @@ -import re -import uuid -from collections.abc import Iterable -from dataclasses import dataclass -from datetime import datetime -from enum import Enum -from functools import reduce -from typing import Any, ClassVar, Final, TypeAlias, cast - -import sqlalchemy as sa -from aiopg.sa.connection import SAConnection -from aiopg.sa.result import RowProxy -from pydantic import ( - BaseModel, - ConstrainedStr, - Field, - NonNegativeInt, - PositiveInt, - ValidationError, - parse_obj_as, -) -from pydantic.errors import PydanticErrorMixin -from simcore_postgres_database.utils_ordering import OrderByDict -from sqlalchemy import Column, func -from sqlalchemy.dialects import postgresql -from sqlalchemy.dialects.postgresql import BOOLEAN, INTEGER -from sqlalchemy.sql.elements import ColumnElement, Label -from sqlalchemy.sql.selectable import CTE - -from .models.folders import folders, folders_access_rights, folders_to_projects -from .models.groups import GroupType, groups -from .utils_ordering import OrderDirection - -_ProductName: TypeAlias = str -_ProjectID: TypeAlias = uuid.UUID -_GroupID: TypeAlias = PositiveInt -_FolderID: TypeAlias = PositiveInt - -### -### ERRORS -### - - -"""Errors hierarchy - -FoldersError - * InvalidFolderNameError - * FolderAccessError - * FolderNotFoundError - * FolderNotSharedWithGidError - * InsufficientPermissionsError - * NoAccessForGroupsFoundError - * BaseCreateFolderError - * FolderAlreadyExistsError - * ParentFolderIsNotWritableError - * CouldNotCreateFolderError - * GroupIdDoesNotExistError - * RootFolderRequiresAtLeastOnePrimaryGroupError - * BaseMoveFolderError - * CannotMoveFolderSharedViaNonPrimaryGroupError - * BaseAddProjectError - * ProjectAlreadyExistsInFolderError -""" - - -class FoldersError(PydanticErrorMixin, RuntimeError): - pass - - -class InvalidFolderNameError(FoldersError): - msg_template = "Provided folder name='{name}' is invalid: {reason}" - - -class FolderAccessError(FoldersError): - pass - - -class FolderNotFoundError(FolderAccessError): - msg_template = "no entry found for folder_id={folder_id}, gids={gids} and product_name={product_name}" - - -class FolderNotSharedWithGidError(FolderAccessError): - msg_template = "folder_id={folder_id} was not shared with gids={gids}" - - -class InsufficientPermissionsError(FolderAccessError): - msg_template = "could not find a parent for folder_id={folder_id} and gids={gids}, with permissions={permissions}" - - -class NoAccessForGroupsFoundError(FolderAccessError): - msg_template = "No parent found for folder_id={folder_id} and gids={gids}, with permissions={permissions}" - - -class BaseCreateFolderError(FoldersError): - pass - - -class FolderAlreadyExistsError(BaseCreateFolderError): - msg_template = "A folder='{folder}' with parent='{parent}' in product_name={product_name} already exists" - - -class ParentFolderIsNotWritableError(BaseCreateFolderError): - msg_template = "Cannot create any sub-folders inside folder_id={parent_folder_id} since it is not writable for gid={gid}." - - -class CouldNotCreateFolderError(BaseCreateFolderError): - msg_template = "Could not create folder='{folder}' and parent='{parent}'" - - -class NoGroupIDFoundError(BaseCreateFolderError): - msg_template = "None of the provided gids='{gids}' was found" - - -class RootFolderRequiresAtLeastOnePrimaryGroupError(BaseCreateFolderError): - msg_template = ( - "No parent={parent} defined and groupIDs={gids} did not contain a PRIMARY group. " - "Cannot create a folder isnide the 'root' wihtout using the user's group." - ) - - -class BaseMoveFolderError(FoldersError): - pass - - -class CannotMoveFolderSharedViaNonPrimaryGroupError(BaseMoveFolderError): - msg_template = ( - "deltected group_type={group_type} for gid={gid} which is not allowed" - ) - - -class BaseAddProjectError(FoldersError): - pass - - -class ProjectAlreadyExistsInFolderError(BaseAddProjectError): - msg_template = ( - "project_id={project_uuid} in folder_id={folder_id} is already present" - ) - - -### -### UTILS ACCESS LAYER -### - - -class FolderAccessRole(Enum): - """Used by the frontend to indicate a role in a simple manner""" - - NO_ACCESS = 0 - VIEWER = 1 - EDITOR = 2 - OWNER = 3 - - -@dataclass(frozen=True) -class _FolderPermissions: - read: bool - write: bool - delete: bool - - def to_dict(self, *, include_only_true: bool = False) -> dict[str, bool]: - data: dict[str, bool] = { - "read": self.read, - "write": self.write, - "delete": self.delete, - } - if include_only_true: - for key_to_remove in [k for k, v in data.items() if not v]: - data.pop(key_to_remove) - - return data - - -def _make_permissions( - *, r: bool = False, w: bool = False, d: bool = False, description: str = "" -) -> "_FolderPermissions": - _ = description - return _FolderPermissions(read=r, write=w, delete=d) - - -def _only_true_permissions(permissions: _FolderPermissions) -> dict: - return permissions.to_dict(include_only_true=True) - - -def _or_reduce(x: _FolderPermissions, y: _FolderPermissions) -> _FolderPermissions: - return _FolderPermissions( - read=x.read or y.read, write=x.write or y.write, delete=x.delete or y.delete - ) - - -def _or_dicts_list(dicts: Iterable[_FolderPermissions]) -> _FolderPermissions: - if not dicts: - return _make_permissions() - return reduce(_or_reduce, dicts) - - -class _BasePermissions: - GET_FOLDER: ClassVar[_FolderPermissions] = _make_permissions(r=True) - LIST_FOLDERS: ClassVar[_FolderPermissions] = _make_permissions(r=True) - - CREATE_FOLDER: ClassVar[_FolderPermissions] = _make_permissions(w=True) - ADD_PROJECT_TO_FOLDER: ClassVar[_FolderPermissions] = _make_permissions(w=True) - - SHARE_FOLDER: ClassVar[_FolderPermissions] = _make_permissions(d=True) - UPDATE_FOLDER: ClassVar[_FolderPermissions] = _make_permissions(d=True) - DELETE_FOLDER: ClassVar[_FolderPermissions] = _make_permissions(d=True) - REMOVE_PROJECT_FROM_FOLDER: ClassVar[_FolderPermissions] = _make_permissions(d=True) - - _MOVE_PROJECT_FROM_FOLDER_SOURCE: ClassVar[_FolderPermissions] = _make_permissions( - d=True, - description="apply to folder where the project is", - ) - _MOVE_PROJECT_FROM_FOLDER_DESTINATION: ClassVar[ - _FolderPermissions - ] = _make_permissions( - w=True, description="apply on the folder receiving the project" - ) - MOVE_PROJECT_FROM_FOLDER: ClassVar[_FolderPermissions] = _or_dicts_list( - [_MOVE_PROJECT_FROM_FOLDER_SOURCE, _MOVE_PROJECT_FROM_FOLDER_DESTINATION] - ) - - _MOVE_FOLDER_SOURCE: ClassVar[_FolderPermissions] = _make_permissions( - d=True, - description="apply to folder providing the data", - ) - _MOVE_FOLDER_DESTINATION: ClassVar[_FolderPermissions] = _make_permissions( - w=True, description="apply on the folder receiving the data" - ) - MOVE_FOLDER: ClassVar[_FolderPermissions] = _or_dicts_list( - [_MOVE_FOLDER_SOURCE, _MOVE_FOLDER_DESTINATION] - ) - - -NO_ACCESS_PERMISSIONS: _FolderPermissions = _make_permissions() - -VIEWER_PERMISSIONS: _FolderPermissions = _or_dicts_list( - [ - _BasePermissions.LIST_FOLDERS, - ] -) -EDITOR_PERMISSIONS: _FolderPermissions = _or_dicts_list( - [ - VIEWER_PERMISSIONS, - _BasePermissions.CREATE_FOLDER, - _BasePermissions.ADD_PROJECT_TO_FOLDER, - ] -) -OWNER_PERMISSIONS: _FolderPermissions = _or_dicts_list( - [ - EDITOR_PERMISSIONS, - _BasePermissions.SHARE_FOLDER, - _BasePermissions.UPDATE_FOLDER, - _BasePermissions.DELETE_FOLDER, - _BasePermissions.REMOVE_PROJECT_FROM_FOLDER, - _BasePermissions.MOVE_FOLDER, - ] -) - -_ROLE_TO_PERMISSIONS: dict[FolderAccessRole, _FolderPermissions] = { - FolderAccessRole.NO_ACCESS: NO_ACCESS_PERMISSIONS, - FolderAccessRole.VIEWER: VIEWER_PERMISSIONS, - FolderAccessRole.EDITOR: EDITOR_PERMISSIONS, - FolderAccessRole.OWNER: OWNER_PERMISSIONS, -} - - -def _get_permissions_from_role(role: FolderAccessRole) -> _FolderPermissions: - return _ROLE_TO_PERMISSIONS[role] - - -def _requires(*permissions: _FolderPermissions) -> _FolderPermissions: - if len(permissions) == 0: - return _make_permissions() - return _or_dicts_list(permissions) - - -def _get_filter_for_enabled_permissions( - permissions: _FolderPermissions, table: sa.Table | CTE -) -> ColumnElement | bool: - clauses: list[ColumnElement] = [] - - if permissions.read: - clauses.append(table.c.read.is_(True)) - if permissions.write: - clauses.append(table.c.write.is_(True)) - if permissions.delete: - clauses.append(table.c.delete.is_(True)) - - return sa.and_(*clauses) if clauses else True - - -### -### UTILS -### - - -class FolderName(ConstrainedStr): - regex = re.compile( - r'^(?!.*[<>:"/\\|?*\]])(?!.*\b(?:LPT9|COM1|LPT1|COM2|LPT3|LPT4|CON|COM5|COM3|COM4|AUX|PRN|LPT2|LPT5|COM6|LPT7|NUL|COM8|LPT6|COM9|COM7|LPT8)\b).+$', - re.IGNORECASE, - ) - min_length = 1 - max_length = 255 - - -class FolderEntry(BaseModel): - id: _FolderID - parent_folder: _FolderID | None = Field(alias="traversal_parent_id") - name: str - description: str - owner: _GroupID = Field(alias="created_by") - created: datetime = Field(alias="access_created") - modified: datetime = Field(alias="access_modified") - my_access_rights: _FolderPermissions - access_rights: dict[_GroupID, _FolderPermissions] - - class Config: - orm_mode = True - - -class _ResolvedAccessRights(BaseModel): - folder_id: _FolderID - gid: _GroupID - traversal_parent_id: _FolderID | None - original_parent_id: _FolderID | None - read: bool - write: bool - delete: bool - level: int - - class Config: - orm_mode = True - - -async def _get_resolved_access_rights( - connection: SAConnection, - folder_id: _FolderID, - gid: _GroupID, - *, - permissions: _FolderPermissions | None, -) -> _ResolvedAccessRights | None: - - # Define the anchor CTE - access_rights_cte = ( - sa.select( - folders_access_rights.c.folder_id, - folders_access_rights.c.gid, - folders_access_rights.c.traversal_parent_id, - folders_access_rights.c.original_parent_id, - folders_access_rights.c.read, - folders_access_rights.c.write, - folders_access_rights.c.delete, - sa.literal_column("0").label("level"), - ) - .where(folders_access_rights.c.folder_id == sa.bindparam("start_folder_id")) - .cte(name="access_rights_cte", recursive=True) - ) - - # Define the recursive part of the CTE - recursive = sa.select( - folders_access_rights.c.folder_id, - folders_access_rights.c.gid, - folders_access_rights.c.traversal_parent_id, - folders_access_rights.c.original_parent_id, - folders_access_rights.c.read, - folders_access_rights.c.write, - folders_access_rights.c.delete, - sa.literal_column("access_rights_cte.level + 1").label("level"), - ).select_from( - folders_access_rights.join( - access_rights_cte, - folders_access_rights.c.folder_id == access_rights_cte.c.original_parent_id, - ) - ) - - # Combine anchor and recursive CTE - folder_hierarchy: CTE = access_rights_cte.union_all(recursive) - - # Final query to filter and order results - query = ( - sa.select( - folder_hierarchy.c.folder_id, - folder_hierarchy.c.gid, - folder_hierarchy.c.traversal_parent_id, - folder_hierarchy.c.original_parent_id, - folder_hierarchy.c.read, - folder_hierarchy.c.write, - folder_hierarchy.c.delete, - folder_hierarchy.c.level, - ) - .where( - True - if not permissions - else _get_filter_for_enabled_permissions(permissions, folder_hierarchy) - ) - .where(folder_hierarchy.c.original_parent_id.is_(None)) - .where(folder_hierarchy.c.gid == gid) - .order_by(folder_hierarchy.c.level.asc()) - ) - - result = await connection.execute(query.params(start_folder_id=folder_id)) - resolved_access_rights: RowProxy | None = await result.fetchone() - return ( - _ResolvedAccessRights.from_orm(resolved_access_rights) - if resolved_access_rights - else None - ) - - -async def _check_and_get_folder_access_by_group( - connection: SAConnection, - product_name: _ProductName, - folder_id: _FolderID, - gid: _GroupID, - *, - error_reporting_gids: set[_GroupID], - permissions: _FolderPermissions, -) -> _ResolvedAccessRights: - """ - Raises: - FolderNotFoundError - FolderNotSharedWithGidError - InsufficientPermissionsError - """ - folder_entry: int | None = await connection.scalar( - sa.select(folders.c.id) - .where(folders.c.id == folder_id) - .where(folders.c.product_name == product_name) - ) - if not folder_entry: - raise FolderNotFoundError( - folder_id=folder_id, gids=error_reporting_gids, product_name=product_name - ) - - # check if folder was shared - resolved_access_rights_without_permissions = await _get_resolved_access_rights( - connection, - folder_id, - gid, - permissions=None, - ) - if not resolved_access_rights_without_permissions: - raise FolderNotSharedWithGidError( - folder_id=folder_id, gids=error_reporting_gids - ) - - # check if there are permissions - resolved_access_rights = await _get_resolved_access_rights( - connection, - folder_id, - gid, - permissions=permissions, - ) - if resolved_access_rights is None: - raise InsufficientPermissionsError( - folder_id=folder_id, - gids=error_reporting_gids, - permissions=_only_true_permissions(permissions), - ) - - return resolved_access_rights - - -async def _check_and_get_folder_access( - connection: SAConnection, - product_name: _ProductName, - folder_id: _FolderID, - gids: set[_GroupID], - *, - permissions: _FolderPermissions, -) -> _ResolvedAccessRights: - """ - Raises: - FolderNotFoundError - FolderNotSharedWithGidError - InsufficientPermissionsError - NoAccessForGroupsFoundError - """ - folder_access_error = None - - for gid in gids: - try: - return await _check_and_get_folder_access_by_group( - connection, - product_name, - folder_id, - gid, - error_reporting_gids=gids, - permissions=permissions, - ) - except FolderAccessError as e: # noqa: PERF203 - folder_access_error = e - - if folder_access_error: - raise folder_access_error - - raise NoAccessForGroupsFoundError( - folder_id=folder_id, - gids=gids, - permissions=_only_true_permissions(permissions), - ) - - -### -### API DB LAYER -### - - -async def folder_create( - connection: SAConnection, - product_name: _ProductName, - name: str, - gids: set[_GroupID], - description: str = "", - parent: _FolderID | None = None, - _required_permissions: _FolderPermissions = _requires( # noqa: B008 - _BasePermissions.CREATE_FOLDER - ), -) -> _FolderID: - """ - Raises: - FolderNotFoundError - FolderNotSharedWithGidError - InsufficientPermissionsError - NoAccessForGroupsFoundError - FolderAlreadyExistsError - CouldNotCreateFolderError - GroupIdDoesNotExistError - RootFolderRequiresAtLeastOnePrimaryGroupError - """ - try: - parse_obj_as(FolderName, name) - except ValidationError as exc: - raise InvalidFolderNameError(name=name, reason=f"{exc}") from exc - - async with connection.begin(): - entry_exists: int | None = await connection.scalar( - sa.select(folders.c.id) - .select_from( - folders.join( - folders_access_rights, - folders.c.id == folders_access_rights.c.folder_id, - ) - ) - .where(folders.c.name == name) - .where(folders.c.product_name == product_name) - .where(folders_access_rights.c.original_parent_id == parent) - ) - if entry_exists: - raise FolderAlreadyExistsError( - product_name=product_name, folder=name, parent=parent - ) - - # `permissions_gid` is computed as follows: - # - `folder has a parent?` taken from the resolved access rights of the parent folder - # - `is root folder, a.k.a. no parent?` taken from the user's primary group - permissions_gid = None - if parent: - resolved_access_rights = await _check_and_get_folder_access( - connection, - product_name, - folder_id=parent, - gids=gids, - permissions=_required_permissions, - ) - permissions_gid = resolved_access_rights.gid - - if permissions_gid is None: - groups_results: list[RowProxy] | None = await ( - await connection.execute( - sa.select(groups.c.gid, groups.c.type).where(groups.c.gid.in_(gids)) - ) - ).fetchall() - - if not groups_results: - raise NoGroupIDFoundError(gids=gids) - - primary_gid = None - for group in groups_results: - if group["type"] == GroupType.PRIMARY: - primary_gid = group["gid"] - if primary_gid is None: - raise RootFolderRequiresAtLeastOnePrimaryGroupError( - parent=parent, gids=gids - ) - - permissions_gid = primary_gid - - # folder entry can now be inserted - folder_id = await connection.scalar( - sa.insert(folders) - .values( - name=name, - description=description, - created_by=permissions_gid, - product_name=product_name, - ) - .returning(folders.c.id) - ) - - if not folder_id: - raise CouldNotCreateFolderError(folder=name, parent=parent) - - await connection.execute( - sa.insert(folders_access_rights).values( - folder_id=folder_id, - gid=permissions_gid, - traversal_parent_id=parent, - original_parent_id=parent, - **OWNER_PERMISSIONS.to_dict(), - ) - ) - - return _FolderID(folder_id) - - -async def folder_share_or_update_permissions( - connection: SAConnection, - product_name: _ProductName, - folder_id: _FolderID, - sharing_gids: set[_GroupID], - *, - recipient_gid: _GroupID, - recipient_role: FolderAccessRole, - required_permissions: _FolderPermissions = _requires( # noqa: B008 - _BasePermissions.SHARE_FOLDER - ), -) -> None: - """ - Raises: - FolderNotFoundError - FolderNotSharedWithGidError - InsufficientPermissionsError - NoAccessForGroupsFoundError - """ - # NOTE: if the `sharing_gid`` has permissions to share it can share it with any `FolderAccessRole` - async with connection.begin(): - await _check_and_get_folder_access( - connection, - product_name, - folder_id=folder_id, - gids=sharing_gids, - permissions=required_permissions, - ) - - # update or create permissions entry - sharing_permissions: _FolderPermissions = _get_permissions_from_role( - recipient_role - ) - data: dict[str, Any] = { - "folder_id": folder_id, - "gid": recipient_gid, - "original_parent_id": None, - "traversal_parent_id": None, - **sharing_permissions.to_dict(), - } - insert_stmt = postgresql.insert(folders_access_rights).values(**data) - upsert_stmt = insert_stmt.on_conflict_do_update( - index_elements=[ - folders_access_rights.c.folder_id, - folders_access_rights.c.gid, - ], - set_=data, - ) - await connection.execute(upsert_stmt) - - -async def folder_update( - connection: SAConnection, - product_name: _ProductName, - folder_id: _FolderID, - gids: set[_GroupID], - *, - name: str | None = None, - description: str | None = None, - _required_permissions: _FolderPermissions = _requires( # noqa: B008 - _BasePermissions.UPDATE_FOLDER - ), -) -> None: - """ - Raises: - FolderNotFoundError - FolderNotSharedWithGidError - InsufficientPermissionsError - NoAccessForGroupsFoundError - """ - async with connection.begin(): - await _check_and_get_folder_access( - connection, - product_name, - folder_id=folder_id, - gids=gids, - permissions=_required_permissions, - ) - - # do not update if nothing changed - if name is None and description is None: - return - - values: dict[str, str] = {} - if name: - values["name"] = name - if description is not None: # Can be empty string - values["description"] = description - - # update entry - await connection.execute( - folders.update().where(folders.c.id == folder_id).values(**values) - ) - - -async def folder_delete( - connection: SAConnection, - product_name: _ProductName, - folder_id: _FolderID, - gids: set[_GroupID], - *, - _required_permissions: _FolderPermissions = _requires( # noqa: B008 - _BasePermissions.DELETE_FOLDER - ), -) -> None: - """ - Raises: - FolderNotFoundError - FolderNotSharedWithGidError - InsufficientPermissionsError - NoAccessForGroupsFoundError - """ - childern_folder_ids: list[_FolderID] = [] - - async with connection.begin(): - await _check_and_get_folder_access( - connection, - product_name, - folder_id=folder_id, - gids=gids, - permissions=_required_permissions, - ) - - # list all children then delete - results = await connection.execute( - folders_access_rights.select().where( - folders_access_rights.c.traversal_parent_id == folder_id - ) - ) - rows = await results.fetchall() - if rows: - for entry in rows: - childern_folder_ids.append(entry.folder_id) # noqa: PERF401 - - # first remove all childeren - for child_folder_id in childern_folder_ids: - await folder_delete(connection, product_name, child_folder_id, gids) - - # as a last step remove the folder per se - async with connection.begin(): - await connection.execute(folders.delete().where(folders.c.id == folder_id)) - - -async def folder_move( - connection: SAConnection, - product_name: _ProductName, - source_folder_id: _FolderID, - gids: set[_GroupID], - *, - destination_folder_id: _FolderID | None, - required_permissions_source: _FolderPermissions = _requires( # noqa: B008 - _BasePermissions._MOVE_FOLDER_SOURCE # pylint:disable=protected-access # noqa: SLF001 - ), - required_permissions_destination: _FolderPermissions = _requires( # noqa: B008 - _BasePermissions._MOVE_FOLDER_DESTINATION # pylint:disable=protected-access # noqa: SLF001 - ), -) -> None: - """ - Raises: - FolderNotFoundError - FolderNotSharedWithGidError - InsufficientPermissionsError - NoAccessForGroupsFoundError - CannotMoveFolderSharedViaNonPrimaryGroupError: - """ - async with connection.begin(): - source_access_entry = await _check_and_get_folder_access( - connection, - product_name, - folder_id=source_folder_id, - gids=gids, - permissions=required_permissions_source, - ) - - source_access_gid = source_access_entry.gid - group_type: GroupType | None = await connection.scalar( - sa.select(groups.c.type).where(groups.c.gid == source_access_gid) - ) - # Might drop primary check - if group_type is None or group_type != GroupType.PRIMARY: - raise CannotMoveFolderSharedViaNonPrimaryGroupError( - group_type=group_type, gid=source_access_gid - ) - if destination_folder_id: - await _check_and_get_folder_access( - connection, - product_name, - folder_id=destination_folder_id, - gids=gids, - permissions=required_permissions_destination, - ) - - # set new traversa_parent_id on the source_folder_id which is equal to destination_folder_id - await connection.execute( - folders_access_rights.update() - .where( - sa.and_( - folders_access_rights.c.folder_id == source_folder_id, - folders_access_rights.c.gid.in_(gids), - ) - ) - .values(traversal_parent_id=destination_folder_id) - ) - - -async def folder_add_project( - connection: SAConnection, - product_name: _ProductName, - folder_id: _FolderID, - gids: set[_GroupID], - *, - project_uuid: _ProjectID, - required_permissions=_requires( # noqa: B008 - _BasePermissions.ADD_PROJECT_TO_FOLDER - ), -) -> None: - """ - Raises: - FolderNotFoundError - FolderNotSharedWithGidError - InsufficientPermissionsError - NoAccessForGroupsFoundError - ProjectAlreadyExistsInFolderError - """ - async with connection.begin(): - await _check_and_get_folder_access( - connection, - product_name, - folder_id=folder_id, - gids=gids, - permissions=required_permissions, - ) - - # check if already added in folder - project_in_folder_entry = await ( - await connection.execute( - folders_to_projects.select() - .where(folders_to_projects.c.folder_id == folder_id) - .where(folders_to_projects.c.project_uuid == f"{project_uuid}") - ) - ).fetchone() - if project_in_folder_entry: - raise ProjectAlreadyExistsInFolderError( - project_uuid=project_uuid, folder_id=folder_id - ) - - # finally add project to folder - await connection.execute( - folders_to_projects.insert().values( - folder_id=folder_id, project_uuid=f"{project_uuid}" - ) - ) - - -async def folder_move_project( - connection: SAConnection, - product_name: _ProductName, - source_folder_id: _FolderID, - gids: set[_GroupID], - *, - project_uuid: _ProjectID, - destination_folder_id: _FolderID | None, - _required_permissions_source: _FolderPermissions = _requires( # noqa: B008 - _BasePermissions._MOVE_PROJECT_FROM_FOLDER_SOURCE # pylint:disable=protected-access # noqa: SLF001 - ), - _required_permissions_destination: _FolderPermissions = _requires( # noqa: B008 - _BasePermissions._MOVE_PROJECT_FROM_FOLDER_DESTINATION # pylint:disable=protected-access # noqa: SLF001 - ), -) -> None: - """ - Raises: - FolderNotFoundError - FolderNotSharedWithGidError - InsufficientPermissionsError - CannotMoveFolderSharedViaNonPrimaryGroupError: - """ - async with connection.begin(): - await _check_and_get_folder_access( - connection, - product_name, - folder_id=source_folder_id, - gids=gids, - permissions=_required_permissions_source, - ) - - if destination_folder_id is None: - # NOTE: As the project is moved to the root directory we will just remove it from the folders_to_projects table - await folder_remove_project( - connection, - product_name, - folder_id=source_folder_id, - gids=gids, - project_uuid=project_uuid, - ) - return - - async with connection.begin(): - await _check_and_get_folder_access( - connection, - product_name, - folder_id=destination_folder_id, - gids=gids, - permissions=_required_permissions_destination, - ) - - await connection.execute( - folders_to_projects.delete() - .where(folders_to_projects.c.folder_id == source_folder_id) - .where(folders_to_projects.c.project_uuid == f"{project_uuid}") - ) - await connection.execute( - folders_to_projects.insert().values( - folder_id=destination_folder_id, project_uuid=f"{project_uuid}" - ) - ) - - -async def get_project_folder_without_check( - connection: SAConnection, - *, - project_uuid: _ProjectID, -) -> _FolderID | None: - """ - This is temporary, until we discuss how to proceed. In first version we assume there is only one unique project uuid - in the folders_to_projects table. - - Raises: - FolderNotFoundError - FolderNotSharedWithGidError - InsufficientPermissionsError - CannotMoveFolderSharedViaNonPrimaryGroupError: - """ - async with connection.begin(): - folder_id = await connection.scalar( - sa.select(folders_to_projects.c.folder_id).where( - folders_to_projects.c.project_uuid == f"{project_uuid}" - ) - ) - if folder_id: - return _FolderID(folder_id) - return None - - -async def folder_remove_project( - connection: SAConnection, - product_name: _ProductName, - folder_id: _FolderID, - gids: set[_GroupID], - *, - project_uuid: _ProjectID, - required_permissions=_requires( # noqa: B008 - _BasePermissions.REMOVE_PROJECT_FROM_FOLDER - ), -) -> None: - """ - Raises: - FolderNotFoundError - FolderNotSharedWithGidError - InsufficientPermissionsError - NoAccessForGroupsFoundError - """ - async with connection.begin(): - await _check_and_get_folder_access( - connection, - product_name, - folder_id=folder_id, - gids=gids, - permissions=required_permissions, - ) - - await connection.execute( - folders_to_projects.delete() - .where(folders_to_projects.c.folder_id == folder_id) - .where(folders_to_projects.c.project_uuid == f"{project_uuid}") - ) - - -_LIST_GROUP_BY_FIELDS: Final[tuple[Column, ...]] = ( - folders.c.id, - folders.c.name, - folders.c.description, - folders.c.created_by, - folders_access_rights.c.traversal_parent_id, -) -_LIST_SELECT_FIELDS: Final[tuple[Label | Column, ...]] = ( - *_LIST_GROUP_BY_FIELDS, - # access_rights - ( - sa.select( - sa.func.jsonb_object_agg( - folders_access_rights.c.gid, - sa.func.jsonb_build_object( - "read", - folders_access_rights.c.read, - "write", - folders_access_rights.c.write, - "delete", - folders_access_rights.c.delete, - ), - ).label("access_rights"), - ) - .where(folders_access_rights.c.folder_id == folders.c.id) - .correlate(folders) - .scalar_subquery() - ).label("access_rights"), - # my_access_rights - func.json_build_object( - "read", - func.max(folders_access_rights.c.read.cast(INTEGER)).cast(BOOLEAN), - "write", - func.max(folders_access_rights.c.write.cast(INTEGER)).cast(BOOLEAN), - "delete", - func.max(folders_access_rights.c.delete.cast(INTEGER)).cast(BOOLEAN), - ).label("my_access_rights"), - # access_created - func.max(folders_access_rights.c.created).label("access_created"), - # access_modified - func.max(folders_access_rights.c.modified).label("access_modified"), -) - - -async def folder_list( - connection: SAConnection, - product_name: _ProductName, - folder_id: _FolderID | None, - gids: set[_GroupID], - *, - offset: NonNegativeInt, - limit: NonNegativeInt, - order_by: OrderByDict = OrderByDict( # noqa: B008 - field="modified", direction=OrderDirection.DESC - ), - required_permissions: _FolderPermissions = _requires( # noqa: B008 - _BasePermissions.LIST_FOLDERS - ), -) -> tuple[int, list[FolderEntry]]: - """ - Raises: - FolderNotFoundError - FolderNotSharedWithGidError - InsufficientPermissionsError - NoAccessForGroupsFoundError - """ - # NOTE: when `folder_id is None` list the root folder of the `gids` - - if folder_id is not None: - await _check_and_get_folder_access( - connection, - product_name, - folder_id=folder_id, - gids=gids, - permissions=required_permissions, - ) - - results: list[FolderEntry] = [] - - base_query = ( - sa.select(*_LIST_SELECT_FIELDS) - .join(folders_access_rights, folders.c.id == folders_access_rights.c.folder_id) - .where(folders.c.product_name == product_name) - .where( - folders_access_rights.c.traversal_parent_id.is_(None) - if folder_id is None - else folders_access_rights.c.traversal_parent_id == folder_id - ) - .where(folders_access_rights.c.gid.in_(gids)) - .where( - _get_filter_for_enabled_permissions( - required_permissions, folders_access_rights - ) - ) - .group_by(*_LIST_GROUP_BY_FIELDS) - ) - - # Select total count from base_query - subquery = base_query.subquery() - count_query = sa.select(sa.func.count()).select_from(subquery) - count_result = await connection.execute(count_query) - total_count = await count_result.scalar() - - # Ordering and pagination - if order_by["direction"] == OrderDirection.ASC: - list_query = base_query.order_by(sa.asc(getattr(folders.c, order_by["field"]))) - else: - list_query = base_query.order_by(sa.desc(getattr(folders.c, order_by["field"]))) - list_query = list_query.offset(offset).limit(limit) - - async for entry in connection.execute(list_query): - results.append(FolderEntry.from_orm(entry)) # noqa: PERF401s - - return cast(int, total_count), results - - -async def folder_get( - connection: SAConnection, - product_name: _ProductName, - folder_id: _FolderID, - gids: set[_GroupID], - *, - required_permissions: _FolderPermissions = _requires( # noqa: B008 - _BasePermissions.GET_FOLDER - ), -) -> FolderEntry: - resolved_access_rights: _ResolvedAccessRights = await _check_and_get_folder_access( - connection, - product_name, - folder_id=folder_id, - gids=gids, - permissions=required_permissions, - ) - permissions_gid: _GroupID = resolved_access_rights.gid - - query = ( - sa.select(*_LIST_SELECT_FIELDS) - .join(folders_access_rights, folders.c.id == folders_access_rights.c.folder_id) - .where(folders_access_rights.c.folder_id == folder_id) - .where(folders_access_rights.c.gid == permissions_gid) - .where( - _get_filter_for_enabled_permissions( - required_permissions, folders_access_rights - ) - if folder_id is None - else True - ) - .where(folders.c.product_name == product_name) - .group_by(*_LIST_GROUP_BY_FIELDS) - ) - - query_result: RowProxy | None = await (await connection.execute(query)).fetchone() - - if query_result is None: - raise FolderNotFoundError( - folder_id=folder_id, gids=gids, product_name=product_name - ) - - return FolderEntry.from_orm(query_result) - - -__all__ = ["OrderByDict"] diff --git a/packages/postgres-database/tests/test_utils_folders.py b/packages/postgres-database/tests/test_utils_folders.py deleted file mode 100644 index 8c49fd9914f..00000000000 --- a/packages/postgres-database/tests/test_utils_folders.py +++ /dev/null @@ -1,2312 +0,0 @@ -# pylint:disable=redefined-outer-name -# pylint:disable=too-many-statements -# pylint:disable=unused-variable - -import itertools -from collections.abc import AsyncIterable, Awaitable, Callable -from copy import deepcopy -from typing import NamedTuple -from unittest.mock import Mock - -import pytest -import sqlalchemy as sa -from aiopg.sa.connection import SAConnection -from aiopg.sa.result import RowProxy -from pydantic import BaseModel, Field, NonNegativeInt -from pytest_simcore.helpers.faker_factories import random_product -from simcore_postgres_database.models.folders import ( - folders, - folders_access_rights, - folders_to_projects, -) -from simcore_postgres_database.models.groups import GroupType, groups -from simcore_postgres_database.utils_folders import ( - _ROLE_TO_PERMISSIONS, - EDITOR_PERMISSIONS, - NO_ACCESS_PERMISSIONS, - OWNER_PERMISSIONS, - VIEWER_PERMISSIONS, - CannotMoveFolderSharedViaNonPrimaryGroupError, - FolderAccessRole, - FolderAlreadyExistsError, - FolderEntry, - FolderNotFoundError, - FolderNotSharedWithGidError, - InsufficientPermissionsError, - InvalidFolderNameError, - NoGroupIDFoundError, - RootFolderRequiresAtLeastOnePrimaryGroupError, - _FolderID, - _FolderPermissions, - _get_filter_for_enabled_permissions, - _get_permissions_from_role, - _get_resolved_access_rights, - _GroupID, - _ProductName, - _ProjectID, - _requires, - folder_add_project, - folder_create, - folder_delete, - folder_get, - folder_list, - folder_move, - folder_remove_project, - folder_share_or_update_permissions, - folder_update, -) -from simcore_postgres_database.utils_products import products -from sqlalchemy.sql.elements import ColumnElement - - -def test_permissions_integrity(): - assert set(FolderAccessRole) == set(_ROLE_TO_PERMISSIONS.keys()) - - -@pytest.mark.parametrize( - "read, write, delete", list(itertools.product([True, False], repeat=3)) -) -def test__folder_permissions_to_dict(read: bool, write: bool, delete: bool): - folder_permissions = _FolderPermissions(read=read, write=write, delete=delete) - assert folder_permissions.to_dict() == { - "read": read, - "write": write, - "delete": delete, - } - only_true: dict[str, bool] = {} - if read: - only_true["read"] = True - if write: - only_true["write"] = True - if delete: - only_true["delete"] = True - assert folder_permissions.to_dict(include_only_true=True) == only_true - - -@pytest.mark.parametrize( - "role, expected_permissions", - [ - ( - FolderAccessRole.VIEWER, - _FolderPermissions(read=True, write=False, delete=False), - ), - ( - FolderAccessRole.EDITOR, - _FolderPermissions(read=True, write=True, delete=False), - ), - ( - FolderAccessRole.OWNER, - _FolderPermissions(read=True, write=True, delete=True), - ), - ], -) -def test_role_permissions( - role: FolderAccessRole, expected_permissions: dict[str, bool] -): - assert _get_permissions_from_role(role) == expected_permissions - - -@pytest.mark.parametrize( - "permissions, expected", - [ - ([], _FolderPermissions(read=False, write=False, delete=False)), - ( - [VIEWER_PERMISSIONS], - _FolderPermissions(read=True, write=False, delete=False), - ), - ([EDITOR_PERMISSIONS], _FolderPermissions(read=True, write=True, delete=False)), - ( - [EDITOR_PERMISSIONS, VIEWER_PERMISSIONS], - _FolderPermissions(read=True, write=True, delete=False), - ), - ([OWNER_PERMISSIONS], _FolderPermissions(read=True, write=True, delete=True)), - ( - [OWNER_PERMISSIONS, EDITOR_PERMISSIONS], - _FolderPermissions(read=True, write=True, delete=True), - ), - ( - [OWNER_PERMISSIONS, EDITOR_PERMISSIONS, VIEWER_PERMISSIONS], - _FolderPermissions(read=True, write=True, delete=True), - ), - ], -) -def test__requires_permissions( - permissions: list[_FolderPermissions], expected: dict[str, bool] -): - assert _requires(*permissions) == expected - - -@pytest.fixture -async def create_product( - connection: SAConnection, -) -> AsyncIterable[Callable[[str], Awaitable[_ProductName]]]: - created_products: list[_ProductName] = [] - - async def _(name: str) -> _ProductName: - assert name != "osparc", f"{name} is reserved! please choose a different one" - resultlt: _ProductName | None = await connection.scalar( - products.insert() - .values(random_product(name=name, group_id=None)) - .returning(products.c.name) - ) - assert resultlt is not None - return resultlt - - yield _ - - for product in created_products: - await connection.execute(products.delete().where(products.c.name == product)) - - -@pytest.fixture -async def default_product_name( - create_product: Callable[[str], Awaitable[_ProductName]] -) -> _ProductName: - return await create_product("test_product") - - -@pytest.mark.parametrize( - "invalid_name", - [ - None, - "", - "/", - ":", - '"', - "<", - ">", - "\\", - "|", - "?", - "My/Folder", - "MyFolder<", - "My*Folder", - "A" * (256), - "CON", - "PRN", - "AUX", - "NUL", - *[f"COM{i}" for i in range(1, 10)], - *[f"LPT{i}" for i in range(1, 10)], - ], -) -async def test_folder_create_wrong_folder_name(invalid_name: str): - with pytest.raises(InvalidFolderNameError): - await folder_create(Mock(), "mock_product", invalid_name, Mock()) - - -def test__get_where_clause(): - assert isinstance( - _get_filter_for_enabled_permissions(VIEWER_PERMISSIONS, folders_access_rights), - ColumnElement, - ) - assert isinstance( - _get_filter_for_enabled_permissions(EDITOR_PERMISSIONS, folders_access_rights), - ColumnElement, - ) - assert isinstance( - _get_filter_for_enabled_permissions(OWNER_PERMISSIONS, folders_access_rights), - ColumnElement, - ) - assert isinstance( - _get_filter_for_enabled_permissions( - _FolderPermissions(read=False, write=False, delete=False), - folders_access_rights, - ), - bool, - ) - - -async def _assert_folder_entires( - connection: SAConnection, - *, - folder_count: NonNegativeInt, - access_rights_count: NonNegativeInt | None = None, -) -> None: - async def _query_table(table_name: sa.Table, count: NonNegativeInt) -> None: - result = await connection.execute(table_name.select()) - rows = await result.fetchall() - assert rows is not None - assert len(rows) == count - - await _query_table(folders, folder_count) - await _query_table(folders_access_rights, access_rights_count or folder_count) - - -async def _assert_folderpermissions_exists( - connection: SAConnection, folder_id: _FolderID, gids: set[_GroupID] -) -> None: - result = await connection.execute( - folders_access_rights.select() - .where(folders_access_rights.c.folder_id == folder_id) - .where(folders_access_rights.c.gid.in_(gids)) - ) - rows = await result.fetchall() - assert rows is not None - assert len(rows) == 1 - - -async def _assert_folder_permissions( - connection: SAConnection, - *, - folder_id: _FolderID, - gid: _GroupID, - role: FolderAccessRole, -) -> None: - result = await connection.execute( - sa.select(folders_access_rights.c.folder_id) - .where(folders_access_rights.c.folder_id == folder_id) - .where(folders_access_rights.c.gid == gid) - .where( - _get_filter_for_enabled_permissions( - _get_permissions_from_role(role), folders_access_rights - ) - ) - ) - rows = await result.fetchall() - assert rows is not None - assert len(rows) == 1 - - -async def _assert_name_and_description( - connection: SAConnection, - folder_id: _FolderID, - *, - name: str, - description: str, -): - async with connection.execute( - sa.select(folders.c.name, folders.c.description).where( - folders.c.id == folder_id - ) - ) as result_proxy: - results = await result_proxy.fetchall() - assert results - assert len(results) == 1 - result = results[0] - assert result["name"] == name - assert result["description"] == description - - -@pytest.fixture -async def setup_users( - connection: SAConnection, create_fake_user: Callable[..., Awaitable[RowProxy]] -) -> list[RowProxy]: - users: list[RowProxy] = [] - for _ in range(10): - users.append(await create_fake_user(connection)) # noqa: PERF401 - return users - - -@pytest.fixture -async def setup_users_and_groups(setup_users: list[RowProxy]) -> set[_GroupID]: - return {u.primary_gid for u in setup_users} - - -@pytest.fixture -def get_unique_gids( - setup_users_and_groups: set[_GroupID], -) -> Callable[[int], tuple[_GroupID, ...]]: - def _(tuple_size: int) -> tuple[_GroupID, ...]: - copied_groups = deepcopy(setup_users_and_groups) - return tuple(copied_groups.pop() for _ in range(tuple_size)) - - return _ - - -@pytest.fixture -async def setup_projects_for_users( - connection: SAConnection, - setup_users: list[RowProxy], - create_fake_project: Callable[..., Awaitable[RowProxy]], -) -> set[_ProjectID]: - projects: set[_ProjectID] = set() - for user in setup_users: - project = await create_fake_project(connection, user) - projects.add(project.uuid) - return projects - - -@pytest.fixture -def get_unique_project_uuids( - setup_projects_for_users: set[_ProjectID], -) -> Callable[[int], tuple[_ProjectID, ...]]: - def _(tuple_size: int) -> tuple[_ProjectID, ...]: - copied_projects = deepcopy(setup_projects_for_users) - return tuple(copied_projects.pop() for _ in range(tuple_size)) - - return _ - - -class MkFolder(BaseModel): - name: str - gid: _GroupID - description: str = "" - parent: _FolderID | None = None - - shared_with: dict[_GroupID, FolderAccessRole] = Field(default_factory=dict) - children: set["MkFolder"] = Field(default_factory=set) - - def __hash__(self): - return hash( - ( - self.name, - self.description, - self.gid, - tuple(sorted(self.shared_with.items())), - frozenset(self.children), - ) - ) - - def __eq__(self, other): - if not isinstance(other, MkFolder): - return False - return ( - self.name == other.name - and self.description == other.description - and self.gid == other.gid - and self.shared_with == other.shared_with - and self.children == other.children - ) - - -@pytest.fixture -def make_folders( - connection: SAConnection, default_product_name: _ProductName -) -> Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]]: - async def _( - root_folders: set[MkFolder], *, parent: _FolderID | None = None - ) -> dict[str, _FolderID]: - folder_names_map: dict[str, _FolderID] = {} - - for root in root_folders: - # create folder - folder_names_map[root.name] = root_folder_id = await folder_create( - connection, - default_product_name, - root.name, - {root.gid}, - description=root.description, - parent=parent, - ) - # share with others - for gid, role in root.shared_with.items(): - await folder_share_or_update_permissions( - connection, - default_product_name, - root_folder_id, - sharing_gids={root.gid}, - recipient_gid=gid, - recipient_role=role, - ) - # create subfolders - subfolders_names_map = await _(root.children, parent=root_folder_id) - root_name = set(folder_names_map.keys()) - subfolder_names = set(subfolders_names_map.keys()) - if subfolder_names & root_name != set(): - msg = f"{root_name=} and {subfolder_names=} are not allowed to have common folder names" - raise ValueError(msg) - folder_names_map.update(subfolders_names_map) - - return folder_names_map - - return _ - - -async def test_folder_create( - connection: SAConnection, - create_product: Callable[[str], Awaitable[_ProductName]], - get_unique_gids: Callable[[int], tuple[_GroupID, ...]], -): - - (owner_gid,) = get_unique_gids(1) - - product_a = await create_product("product_a") - product_b = await create_product("product_b") - - expected_folder_count: int = 0 - for product_name in ( - product_a, - product_b, # NOTE: a different product also can dfeine the same folder strucutre - ): - - # 1. when GID is missing no entries should be present - missing_gid = 10202023302 - await _assert_folder_entires(connection, folder_count=expected_folder_count) - with pytest.raises(NoGroupIDFoundError): - await folder_create(connection, product_name, "f1", {missing_gid}) - await _assert_folder_entires(connection, folder_count=expected_folder_count) - - # 2. create a folder and a subfolder of the same name - f1_folder_id = await folder_create(connection, product_name, "f1", {owner_gid}) - expected_folder_count += 1 - await _assert_folder_entires(connection, folder_count=expected_folder_count) - await folder_create( - connection, product_name, "f1", {owner_gid}, parent=f1_folder_id - ) - expected_folder_count += 1 - await _assert_folder_entires(connection, folder_count=expected_folder_count) - - # 3. inserting already existing folder fails - with pytest.raises(FolderAlreadyExistsError): - await folder_create(connection, product_name, "f1", {owner_gid}) - await _assert_folder_entires(connection, folder_count=expected_folder_count) - - -async def test_folder_create_shared_via_groups( - connection: SAConnection, - default_product_name: _ProductName, - get_unique_gids: Callable[[int], tuple[_GroupID, ...]], - make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]], - create_fake_group: Callable[..., Awaitable[RowProxy]], -): - ####### - # SETUP - ####### - gid_original_owner: _GroupID - (gid_original_owner,) = get_unique_gids(1) - - gid_user: _GroupID = ( - await create_fake_group(connection, type=GroupType.PRIMARY) - ).gid - gid_everyone: _GroupID | None = await connection.scalar( - sa.select(groups.c.gid).where(groups.c.type == GroupType.EVERYONE) - ) - assert gid_everyone - gid_z43: _GroupID = ( - await create_fake_group(connection, type=GroupType.STANDARD) - ).gid - - folder_ids = await make_folders( - { - MkFolder( - name="root", - gid=gid_original_owner, - shared_with={ - gid_z43: FolderAccessRole.OWNER, - gid_everyone: FolderAccessRole.OWNER, - }, - ), - } - ) - - folder_id_root = folder_ids["root"] - - ####### - # TESTS - ####### - - # 1. can create when using one gid with permissions - folder_id_f1 = await folder_create( - connection, - default_product_name, - "f1", - {gid_z43, gid_user}, - parent=folder_id_root, - ) - await _assert_folderpermissions_exists(connection, folder_id_f1, {gid_z43}) - - folder_id_f2 = await folder_create( - connection, - default_product_name, - "f2", - {gid_everyone, gid_user}, - parent=folder_id_root, - ) - await _assert_folderpermissions_exists(connection, folder_id_f2, {gid_everyone}) - - # 2. can create new folder when using both gids with permissions - folder_id_f3 = await folder_create( - connection, - default_product_name, - "f3", - {gid_z43, gid_everyone, gid_user}, - parent=folder_id_root, - ) - await _assert_folderpermissions_exists( - connection, folder_id_f3, {gid_everyone, gid_z43} - ) - - # 3. cannot create a root folder without a primary group - with pytest.raises(RootFolderRequiresAtLeastOnePrimaryGroupError): - await folder_create( - connection, - default_product_name, - "folder_in_root", - {gid_z43, gid_everyone}, - ) - - -async def test__get_resolved_access_rights( - connection: SAConnection, - get_unique_gids: Callable[[int], tuple[_GroupID, ...]], - make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]], -): - ####### - # SETUP - ####### - ( - gid_owner_a, - gid_owner_b, - gid_owner_c, - gid_owner_d, - gid_editor_a, - gid_editor_b, - ) = get_unique_gids(6) - - folder_ids = await make_folders( - { - MkFolder( - name="root", - gid=gid_owner_a, - shared_with={ - gid_owner_b: FolderAccessRole.OWNER, - gid_owner_c: FolderAccessRole.OWNER, - gid_owner_d: FolderAccessRole.OWNER, - gid_editor_a: FolderAccessRole.EDITOR, - }, - children={ - MkFolder(name="b", gid=gid_owner_b), - MkFolder( - name="c", - gid=gid_owner_c, - children={ - MkFolder( - name="d", - gid=gid_owner_d, - shared_with={gid_editor_b: FolderAccessRole.EDITOR}, - children={MkFolder(name="editor_a", gid=gid_editor_a)}, - ) - }, - ), - }, - ), - } - ) - - folder_id_root = folder_ids["root"] - folder_id_b = folder_ids["b"] - folder_id_c = folder_ids["c"] - folder_id_d = folder_ids["d"] - folder_id_editor_a = folder_ids["editor_a"] - - # check resolved access rgihts resolution - async def _assert_resolves_to( - *, - target_folder_id: _FolderID, - gid: _GroupID, - permissions: _FolderPermissions, - expected_folder_id: _FolderID, - expected_gids: set[_FolderID], - ) -> None: - resolved_parent = await _get_resolved_access_rights( - connection, - target_folder_id, - gid, - permissions=permissions, - ) - assert resolved_parent - assert resolved_parent.folder_id == expected_folder_id - assert resolved_parent.gid in expected_gids - - ####### - # TESTS - ####### - - await _assert_resolves_to( - target_folder_id=folder_id_root, - gid=gid_owner_a, - permissions=OWNER_PERMISSIONS, - expected_folder_id=folder_id_root, - expected_gids={gid_owner_a}, - ) - await _assert_resolves_to( - target_folder_id=folder_id_b, - gid=gid_owner_b, - permissions=OWNER_PERMISSIONS, - expected_folder_id=folder_id_root, - expected_gids={gid_owner_b}, - ) - await _assert_resolves_to( - target_folder_id=folder_id_c, - gid=gid_owner_c, - permissions=OWNER_PERMISSIONS, - expected_folder_id=folder_id_root, - expected_gids={gid_owner_c}, - ) - await _assert_resolves_to( - target_folder_id=folder_id_d, - gid=gid_owner_d, - permissions=OWNER_PERMISSIONS, - expected_folder_id=folder_id_root, - expected_gids={gid_owner_d}, - ) - await _assert_resolves_to( - target_folder_id=folder_id_editor_a, - gid=gid_editor_a, - permissions=EDITOR_PERMISSIONS, - expected_folder_id=folder_id_root, - expected_gids={gid_editor_a}, - ) - await _assert_resolves_to( - target_folder_id=folder_id_editor_a, - gid=gid_editor_b, - permissions=EDITOR_PERMISSIONS, - expected_folder_id=folder_id_d, - expected_gids={gid_editor_b}, - ) - - -async def test_folder_share_or_update_permissions( - connection: SAConnection, - default_product_name: _ProductName, - get_unique_gids: Callable[[int], tuple[_GroupID, ...]], -): - ( - gid_owner, - gid_other_owner, - gid_editor, - gid_viewer, - gid_no_access, - gid_share_with_error, - ) = get_unique_gids(6) - - # 1. folder does not exist - folder_id_missing = 12313123232 - with pytest.raises(FolderNotFoundError): - await folder_share_or_update_permissions( - connection, - default_product_name, - folder_id_missing, - sharing_gids={gid_owner}, - recipient_gid=gid_share_with_error, - recipient_role=FolderAccessRole.OWNER, - ) - await _assert_folder_entires(connection, folder_count=0) - - # 2. share existing folder with all possible roles - folder_id = await folder_create(connection, default_product_name, "f1", {gid_owner}) - await _assert_folder_entires(connection, folder_count=1) - await _assert_folder_permissions( - connection, folder_id=folder_id, gid=gid_owner, role=FolderAccessRole.OWNER - ) - - await folder_share_or_update_permissions( - connection, - default_product_name, - folder_id, - sharing_gids={gid_owner}, - recipient_gid=gid_other_owner, - recipient_role=FolderAccessRole.OWNER, - ) - await _assert_folder_entires(connection, folder_count=1, access_rights_count=2) - await _assert_folder_permissions( - connection, - folder_id=folder_id, - gid=gid_other_owner, - role=FolderAccessRole.OWNER, - ) - - await folder_share_or_update_permissions( - connection, - default_product_name, - folder_id, - sharing_gids={gid_owner}, - recipient_gid=gid_editor, - recipient_role=FolderAccessRole.EDITOR, - ) - await _assert_folder_entires(connection, folder_count=1, access_rights_count=3) - await _assert_folder_permissions( - connection, folder_id=folder_id, gid=gid_editor, role=FolderAccessRole.EDITOR - ) - - await folder_share_or_update_permissions( - connection, - default_product_name, - folder_id, - sharing_gids={gid_owner}, - recipient_gid=gid_viewer, - recipient_role=FolderAccessRole.VIEWER, - ) - await _assert_folder_entires(connection, folder_count=1, access_rights_count=4) - await _assert_folder_permissions( - connection, folder_id=folder_id, gid=gid_viewer, role=FolderAccessRole.VIEWER - ) - - await folder_share_or_update_permissions( - connection, - default_product_name, - folder_id, - sharing_gids={gid_owner}, - recipient_gid=gid_no_access, - recipient_role=FolderAccessRole.NO_ACCESS, - ) - await _assert_folder_entires(connection, folder_count=1, access_rights_count=5) - await _assert_folder_permissions( - connection, - folder_id=folder_id, - gid=gid_no_access, - role=FolderAccessRole.NO_ACCESS, - ) - - # 3. roles without permissions cannot share with any role - for recipient_role in FolderAccessRole: - for no_access_gids in (gid_editor, gid_viewer, gid_no_access): - with pytest.raises(InsufficientPermissionsError): - await folder_share_or_update_permissions( - connection, - default_product_name, - folder_id, - sharing_gids={no_access_gids}, - recipient_gid=gid_share_with_error, - recipient_role=recipient_role, - ) - await _assert_folder_entires( - connection, folder_count=1, access_rights_count=5 - ) - - with pytest.raises(FolderNotSharedWithGidError): - await folder_share_or_update_permissions( - connection, - default_product_name, - folder_id, - sharing_gids={gid_share_with_error}, - recipient_gid=gid_share_with_error, - recipient_role=recipient_role, - ) - await _assert_folder_entires(connection, folder_count=1, access_rights_count=5) - - # 4. all users loose permission on the foler including the issuer - # NOTE: anoteher_owner dropped owner's permission and his permission to no access! - for gid_to_drop_permission in (gid_owner, gid_editor, gid_viewer, gid_other_owner): - await folder_share_or_update_permissions( - connection, - default_product_name, - folder_id, - sharing_gids={gid_other_owner}, - recipient_gid=gid_to_drop_permission, - recipient_role=FolderAccessRole.NO_ACCESS, - ) - await _assert_folder_entires(connection, folder_count=1, access_rights_count=5) - await _assert_folder_permissions( - connection, - folder_id=folder_id, - gid=gid_to_drop_permission, - role=FolderAccessRole.NO_ACCESS, - ) - - -async def test_folder_update( - connection: SAConnection, - default_product_name: _ProductName, - get_unique_gids: Callable[[int], tuple[_GroupID, ...]], -): - ( - owner_gid, - other_owner_gid, - editor_gid, - viewer_gid, - no_access_gid, - share_with_error_gid, - ) = get_unique_gids(6) - - # 1. folder is missing - missing_folder_id = 1231321332 - with pytest.raises(FolderNotFoundError): - await folder_update( - connection, default_product_name, missing_folder_id, {owner_gid} - ) - await _assert_folder_entires(connection, folder_count=0) - - # 2. owner updates created fodler - folder_id = await folder_create(connection, default_product_name, "f1", {owner_gid}) - await _assert_folder_entires(connection, folder_count=1) - await _assert_name_and_description(connection, folder_id, name="f1", description="") - - # nothing changes - await folder_update(connection, default_product_name, folder_id, {owner_gid}) - await _assert_name_and_description(connection, folder_id, name="f1", description="") - - # both changed - await folder_update( - connection, - default_product_name, - folder_id, - {owner_gid}, - name="new_folder", - description="new_desc", - ) - await _assert_name_and_description( - connection, folder_id, name="new_folder", description="new_desc" - ) - - # 3. another_owner can also update - await folder_share_or_update_permissions( - connection, - default_product_name, - folder_id, - sharing_gids={owner_gid}, - recipient_gid=other_owner_gid, - recipient_role=FolderAccessRole.OWNER, - ) - await folder_update( - connection, - default_product_name, - folder_id, - {owner_gid}, - name="another_owner_name", - description="another_owner_description", - ) - await _assert_name_and_description( - connection, - folder_id, - name="another_owner_name", - description="another_owner_description", - ) - - # 4. other roles have no permission to update - await folder_share_or_update_permissions( - connection, - default_product_name, - folder_id, - sharing_gids={owner_gid}, - recipient_gid=editor_gid, - recipient_role=FolderAccessRole.EDITOR, - ) - await folder_share_or_update_permissions( - connection, - default_product_name, - folder_id, - sharing_gids={owner_gid}, - recipient_gid=viewer_gid, - recipient_role=FolderAccessRole.VIEWER, - ) - await folder_share_or_update_permissions( - connection, - default_product_name, - folder_id, - sharing_gids={owner_gid}, - recipient_gid=no_access_gid, - recipient_role=FolderAccessRole.NO_ACCESS, - ) - - for target_user_gid in (editor_gid, viewer_gid, no_access_gid): - with pytest.raises(InsufficientPermissionsError): - await folder_update( - connection, - default_product_name, - folder_id, - {target_user_gid}, - name="error_name", - description="error_description", - ) - await _assert_name_and_description( - connection, - folder_id, - name="another_owner_name", - description="another_owner_description", - ) - - with pytest.raises(FolderNotSharedWithGidError): - await folder_update( - connection, - default_product_name, - folder_id, - {share_with_error_gid}, - name="error_name", - description="error_description", - ) - await _assert_name_and_description( - connection, - folder_id, - name="another_owner_name", - description="another_owner_description", - ) - - -async def test_folder_delete( - connection: SAConnection, - default_product_name: _ProductName, - get_unique_gids: Callable[[int], tuple[_GroupID, ...]], -): - ( - owner_gid, - other_owner_gid, - editor_gid, - viewer_gid, - no_access_gid, - share_with_error_gid, - ) = get_unique_gids(6) - - # 1. folder is missing - missing_folder_id = 1231321332 - with pytest.raises(FolderNotFoundError): - await folder_delete( - connection, default_product_name, missing_folder_id, {owner_gid} - ) - await _assert_folder_entires(connection, folder_count=0) - - # 2. owner deletes folder - folder_id = await folder_create(connection, default_product_name, "f1", {owner_gid}) - await _assert_folder_entires(connection, folder_count=1) - - await folder_delete(connection, default_product_name, folder_id, {owner_gid}) - await _assert_folder_entires(connection, folder_count=0) - - # 3. other owners can delete the folder - folder_id = await folder_create(connection, default_product_name, "f1", {owner_gid}) - await _assert_folder_entires(connection, folder_count=1) - - await folder_share_or_update_permissions( - connection, - default_product_name, - folder_id, - sharing_gids={owner_gid}, - recipient_gid=other_owner_gid, - recipient_role=FolderAccessRole.OWNER, - ) - - await folder_delete(connection, default_product_name, folder_id, {other_owner_gid}) - await _assert_folder_entires(connection, folder_count=0) - - # 4. non owner users cannot delete the folder - folder_id = await folder_create(connection, default_product_name, "f1", {owner_gid}) - await _assert_folder_entires(connection, folder_count=1) - - await folder_share_or_update_permissions( - connection, - default_product_name, - folder_id, - sharing_gids={owner_gid}, - recipient_gid=editor_gid, - recipient_role=FolderAccessRole.EDITOR, - ) - await folder_share_or_update_permissions( - connection, - default_product_name, - folder_id, - sharing_gids={owner_gid}, - recipient_gid=viewer_gid, - recipient_role=FolderAccessRole.VIEWER, - ) - await folder_share_or_update_permissions( - connection, - default_product_name, - folder_id, - sharing_gids={owner_gid}, - recipient_gid=no_access_gid, - recipient_role=FolderAccessRole.NO_ACCESS, - ) - await _assert_folder_entires(connection, folder_count=1, access_rights_count=4) - - for non_owner_gid in (editor_gid, viewer_gid, no_access_gid): - with pytest.raises(InsufficientPermissionsError): - await folder_delete( - connection, default_product_name, folder_id, {non_owner_gid} - ) - - with pytest.raises(FolderNotSharedWithGidError): - await folder_delete( - connection, default_product_name, folder_id, {share_with_error_gid} - ) - - await _assert_folder_entires(connection, folder_count=1, access_rights_count=4) - - -async def test_folder_delete_nested_folders( - connection: SAConnection, - default_product_name: _ProductName, - get_unique_gids: Callable[[int], tuple[_GroupID, ...]], - make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]], -): - ####### - # SETUP - ####### - ( - gid_owner_a, - gid_owner_b, - gid_editor_a, - gid_editor_b, - gid_viewer, - gid_no_access, - gid_not_shared, - ) = get_unique_gids(7) - - async def _setup_folders() -> _FolderID: - await _assert_folder_entires(connection, folder_count=0) - folder_ids = await make_folders( - { - MkFolder( - name="root_folder", - gid=gid_owner_a, - shared_with={ - gid_owner_b: FolderAccessRole.OWNER, - gid_editor_a: FolderAccessRole.EDITOR, - gid_editor_b: FolderAccessRole.EDITOR, - gid_viewer: FolderAccessRole.VIEWER, - gid_no_access: FolderAccessRole.NO_ACCESS, - }, - ) - } - ) - folder_id_root_folder = folder_ids["root_folder"] - await _assert_folder_entires(connection, folder_count=1, access_rights_count=6) - - GIDS_WITH_CREATE_PERMISSIONS: set[_GroupID] = { - gid_owner_a, - gid_owner_b, - gid_editor_a, - gid_editor_b, - } - - previous_folder_id = folder_id_root_folder - for i in range(100): - previous_folder_id = await folder_create( - connection, - default_product_name, - f"f{i}", - GIDS_WITH_CREATE_PERMISSIONS, - parent=previous_folder_id, - ) - await _assert_folder_entires( - connection, folder_count=101, access_rights_count=106 - ) - return folder_id_root_folder - - ####### - # TESTS - ####### - - # 1. delete via `gid_owner_a` - folder_id_root_folder = await _setup_folders() - await folder_delete( - connection, default_product_name, folder_id_root_folder, {gid_owner_a} - ) - await _assert_folder_entires(connection, folder_count=0) - - # 2. delete via shared with `gid_owner_b` - folder_id_root_folder = await _setup_folders() - await folder_delete( - connection, default_product_name, folder_id_root_folder, {gid_owner_b} - ) - await _assert_folder_entires(connection, folder_count=0) - - # 3. delete is not permitted - folder_id_root_folder = await _setup_folders() - for no_permissions_gid in (gid_editor_a, gid_editor_b, gid_viewer): - with pytest.raises(InsufficientPermissionsError): - await folder_delete( - connection, - default_product_name, - folder_id_root_folder, - {no_permissions_gid}, - ) - for no_permissions_gid in (gid_not_shared,): - with pytest.raises(FolderNotSharedWithGidError): - await folder_delete( - connection, - default_product_name, - folder_id_root_folder, - {no_permissions_gid}, - ) - await _assert_folder_entires(connection, folder_count=101, access_rights_count=106) - - -async def test_folder_move( - connection: SAConnection, - default_product_name: _ProductName, - get_unique_gids: Callable[[int], tuple[_GroupID, ...]], - make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]], -): - ####### - # SETUP - ####### - - (gid_sharing, gid_user_a, gid_user_b) = get_unique_gids(3) - - folder_ids = await make_folders( - { - MkFolder( - name="USER_A", - gid=gid_user_a, - children={MkFolder(name="f_user_a", gid=gid_user_a)}, - ), - MkFolder( - name="USER_B", - gid=gid_user_b, - children={MkFolder(name="f_user_b", gid=gid_user_b)}, - ), - MkFolder( - name="SHARED_AS_OWNER", - gid=gid_sharing, - children={ - MkFolder( - name="f_shared_as_owner_user_a", - gid=gid_sharing, - shared_with={gid_user_a: FolderAccessRole.OWNER}, - ), - MkFolder( - name="f_shared_as_owner_user_b", - gid=gid_sharing, - shared_with={gid_user_b: FolderAccessRole.OWNER}, - ), - }, - ), - MkFolder( - name="SHARED_AS_EDITOR", - gid=gid_sharing, - children={ - MkFolder( - name="f_shared_as_editor_user_a", - gid=gid_sharing, - shared_with={gid_user_a: FolderAccessRole.EDITOR}, - ), - MkFolder( - name="f_shared_as_editor_user_b", - gid=gid_sharing, - shared_with={gid_user_b: FolderAccessRole.EDITOR}, - ), - }, - ), - MkFolder( - name="SHARED_AS_VIEWER", - gid=gid_sharing, - children={ - MkFolder( - name="f_shared_as_viewer_user_a", - gid=gid_sharing, - shared_with={gid_user_a: FolderAccessRole.VIEWER}, - ), - MkFolder( - name="f_shared_as_viewer_user_b", - gid=gid_sharing, - shared_with={gid_user_b: FolderAccessRole.VIEWER}, - ), - }, - ), - MkFolder( - name="SHARED_AS_NO_ACCESS", - gid=gid_sharing, - children={ - MkFolder( - name="f_shared_as_no_access_user_a", - gid=gid_sharing, - shared_with={gid_user_a: FolderAccessRole.NO_ACCESS}, - ), - MkFolder( - name="f_shared_as_no_access_user_b", - gid=gid_sharing, - shared_with={gid_user_b: FolderAccessRole.NO_ACCESS}, - ), - }, - ), - MkFolder(name="NOT_SHARED", gid=gid_sharing), - } - ) - - folder_id_user_a = folder_ids["USER_A"] - folder_id_f_user_a = folder_ids["f_user_a"] - folder_id_user_b = folder_ids["USER_B"] - folder_id_f_user_b = folder_ids["f_user_b"] - folder_id_f_shared_as_owner_user_a = folder_ids["f_shared_as_owner_user_a"] - folder_id_f_shared_as_owner_user_b = folder_ids["f_shared_as_owner_user_b"] - folder_id_f_shared_as_editor_user_a = folder_ids["f_shared_as_editor_user_a"] - folder_id_f_shared_as_editor_user_b = folder_ids["f_shared_as_editor_user_b"] - folder_id_f_shared_as_viewer_user_a = folder_ids["f_shared_as_viewer_user_a"] - folder_id_f_shared_as_viewer_user_b = folder_ids["f_shared_as_viewer_user_b"] - folder_id_f_shared_as_no_access_user_a = folder_ids["f_shared_as_no_access_user_a"] - folder_id_f_shared_as_no_access_user_b = folder_ids["f_shared_as_no_access_user_b"] - folder_id_not_shared = folder_ids["NOT_SHARED"] - - async def _move_fails_not_shared_with_error( - gid: _GroupID, *, source: _FolderID, destination: _FolderID - ) -> None: - with pytest.raises(FolderNotSharedWithGidError): - await folder_move( - connection, - default_product_name, - source, - {gid}, - destination_folder_id=destination, - ) - - async def _move_fails_insufficient_permissions_error( - gid: _GroupID, *, source: _FolderID, destination: _FolderID - ) -> None: - with pytest.raises(InsufficientPermissionsError): - await folder_move( - connection, - default_product_name, - source, - {gid}, - destination_folder_id=destination, - ) - - async def _move_back_and_forth( - gid: _GroupID, - *, - source: _FolderID, - destination: _FolderID, - source_parent: _FolderID, - ) -> None: - async def _assert_folder_permissions( - connection: SAConnection, - *, - folder_id: _FolderID, - gid: _GroupID, - parent_folder: _FolderID, - ) -> None: - result = await connection.execute( - sa.select(folders_access_rights.c.folder_id) - .where(folders_access_rights.c.folder_id == folder_id) - .where(folders_access_rights.c.gid == gid) - .where(folders_access_rights.c.traversal_parent_id == parent_folder) - ) - rows = await result.fetchall() - assert rows is not None - assert len(rows) == 1 - - # check parent should be parent_before - await _assert_folder_permissions( - connection, folder_id=source, gid=gid, parent_folder=source_parent - ) - - await folder_move( - connection, - default_product_name, - source, - {gid}, - destination_folder_id=destination, - ) - - # check parent should be destination - await _assert_folder_permissions( - connection, folder_id=source, gid=gid, parent_folder=destination - ) - - await folder_move( - connection, - default_product_name, - source, - {gid}, - destination_folder_id=source_parent, - ) - - # check parent should be parent_before - await _assert_folder_permissions( - connection, folder_id=source, gid=gid, parent_folder=source_parent - ) - - ####### - # TESTS - ####### - - # 1. not working: - # - `USER_A/f_user_a -> USER_B` - await _move_fails_not_shared_with_error( - gid_user_a, source=folder_id_f_user_a, destination=folder_id_user_b - ) - # - `USER_B.f_user_b -/> USER_A` - await _move_fails_not_shared_with_error( - gid_user_b, source=folder_id_f_user_b, destination=folder_id_user_a - ) - # - `USER_A/f_user_a -> NOT_SHARED` - await _move_fails_not_shared_with_error( - gid_user_a, source=folder_id_f_user_a, destination=folder_id_not_shared - ) - # - `USER_B/f_user_b -> NOT_SHARED` - await _move_fails_not_shared_with_error( - gid_user_b, source=folder_id_f_user_b, destination=folder_id_not_shared - ) - # - `USER_A/f_user_a -> f_shared_as_no_access_user_a` - await _move_fails_insufficient_permissions_error( - gid_user_a, - source=folder_id_f_user_a, - destination=folder_id_f_shared_as_no_access_user_a, - ) - # - `USER_B/f_user_b -> f_shared_as_no_access_user_b` - await _move_fails_insufficient_permissions_error( - gid_user_b, - source=folder_id_f_user_b, - destination=folder_id_f_shared_as_no_access_user_b, - ) - # - `USER_A/f_user_a -> f_shared_as_viewer_user_a` - await _move_fails_insufficient_permissions_error( - gid_user_a, - source=folder_id_f_user_a, - destination=folder_id_f_shared_as_viewer_user_a, - ) - # - `USER_B/f_user_b -> f_shared_as_viewer_user_b` - await _move_fails_insufficient_permissions_error( - gid_user_b, - source=folder_id_f_user_b, - destination=folder_id_f_shared_as_viewer_user_b, - ) - - # 2. allowed oeprations: - # - `USER_A/f_user_a -> f_shared_as_editor_user_a` (& reverse) - await _move_back_and_forth( - gid_user_a, - source=folder_id_f_user_a, - destination=folder_id_f_shared_as_editor_user_a, - source_parent=folder_id_user_a, - ) - # - `USER_B/f_user_b -> f_shared_as_editor_user_b` (& reverse) - await _move_back_and_forth( - gid_user_b, - source=folder_id_f_user_b, - destination=folder_id_f_shared_as_editor_user_b, - source_parent=folder_id_user_b, - ) - # - `USER_A/f_user_a -> f_shared_as_owner_user_a` (& reverse) - await _move_back_and_forth( - gid_user_a, - source=folder_id_f_user_a, - destination=folder_id_f_shared_as_owner_user_a, - source_parent=folder_id_user_a, - ) - # - `USER_B/f_user_b -> f_shared_as_owner_user_b` (& reverse) - await _move_back_and_forth( - gid_user_b, - source=folder_id_f_user_b, - destination=folder_id_f_shared_as_owner_user_b, - source_parent=folder_id_user_b, - ) - - # 3. allowed to move in `root` folder - for to_move_folder_id, to_move_gid in [ - (folder_id_f_user_a, gid_user_a), - (folder_id_f_user_b, gid_user_b), - (folder_id_f_shared_as_owner_user_a, gid_user_a), - (folder_id_f_shared_as_owner_user_b, gid_user_b), - ]: - await folder_move( - connection, - default_product_name, - to_move_folder_id, - {to_move_gid}, - destination_folder_id=None, - ) - - # 4. not allowed to move in `root` folder - for to_move_folder_id, to_move_gid in [ - (folder_id_f_shared_as_editor_user_a, gid_user_a), - (folder_id_f_shared_as_editor_user_b, gid_user_b), - (folder_id_f_shared_as_viewer_user_a, gid_user_a), - (folder_id_f_shared_as_viewer_user_b, gid_user_b), - (folder_id_f_shared_as_no_access_user_a, gid_user_a), - (folder_id_f_shared_as_no_access_user_b, gid_user_b), - ]: - with pytest.raises(InsufficientPermissionsError): - await folder_move( - connection, - default_product_name, - to_move_folder_id, - {to_move_gid}, - destination_folder_id=None, - ) - - for to_move_gid in [gid_user_a, gid_user_b]: - with pytest.raises(FolderNotSharedWithGidError): - await folder_move( - connection, - default_product_name, - folder_id_not_shared, - {to_move_gid}, - destination_folder_id=None, - ) - - -async def test_move_only_owners_can_move( - connection: SAConnection, - default_product_name: _ProductName, - get_unique_gids: Callable[[int], tuple[_GroupID, ...]], - make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]], -): - ####### - # SETUP - ####### - ( - gid_owner, - gid_editor, - gid_viewer, - gid_no_access, - gid_not_shared, - ) = get_unique_gids(5) - - folder_ids = await make_folders( - { - MkFolder( - name="to_move", - gid=gid_owner, - shared_with={ - gid_editor: FolderAccessRole.EDITOR, - gid_viewer: FolderAccessRole.VIEWER, - gid_no_access: FolderAccessRole.NO_ACCESS, - }, - ), - MkFolder(name="target_owner", gid=gid_owner), - MkFolder(name="target_editor", gid=gid_editor), - MkFolder(name="target_viewer", gid=gid_viewer), - MkFolder(name="target_no_access", gid=gid_no_access), - MkFolder(name="target_not_shared", gid=gid_not_shared), - } - ) - - folder_id_to_move = folder_ids["to_move"] - folder_id_target_owner = folder_ids["target_owner"] - folder_id_target_editor = folder_ids["target_editor"] - folder_id_target_viewer = folder_ids["target_viewer"] - folder_id_target_no_access = folder_ids["target_no_access"] - folder_id_target_not_shared = folder_ids["target_not_shared"] - - async def _fails_to_move(gid: _GroupID, destination_folder_id: _FolderID) -> None: - with pytest.raises(InsufficientPermissionsError): - await folder_move( - connection, - default_product_name, - folder_id_to_move, - {gid}, - destination_folder_id=destination_folder_id, - ) - - ####### - # TESTS - ####### - - # 1. no permissions to move - await _fails_to_move(gid_editor, folder_id_target_editor) - await _fails_to_move(gid_viewer, folder_id_target_viewer) - await _fails_to_move(gid_no_access, folder_id_target_no_access) - - # 2. not shared with user - with pytest.raises(FolderNotSharedWithGidError): - await folder_move( - connection, - default_product_name, - folder_id_to_move, - {gid_not_shared}, - destination_folder_id=folder_id_target_not_shared, - ) - - # 3. owner us able to move - await folder_move( - connection, - default_product_name, - folder_id_to_move, - {gid_owner}, - destination_folder_id=folder_id_target_owner, - ) - - -async def test_move_group_non_standard_groups_raise_error( - connection: SAConnection, - default_product_name: _ProductName, - get_unique_gids: Callable[[int], tuple[_GroupID, ...]], - make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]], - create_fake_group: Callable[..., Awaitable[RowProxy]], -): - ####### - # SETUP - ####### - gid_original_owner: _GroupID - (gid_original_owner,) = get_unique_gids(1) - gid_primary: _GroupID = ( - await create_fake_group(connection, type=GroupType.PRIMARY) - ).gid - gid_everyone: _GroupID | None = await connection.scalar( - sa.select(groups.c.gid).where(groups.c.type == GroupType.EVERYONE) - ) - assert gid_everyone - gid_standard: _GroupID = ( - await create_fake_group(connection, type=GroupType.STANDARD) - ).gid - - folder_ids = await make_folders( - { - MkFolder( - name="SHARING_USER", - gid=gid_original_owner, - shared_with={ - gid_primary: FolderAccessRole.EDITOR, - gid_everyone: FolderAccessRole.EDITOR, - gid_standard: FolderAccessRole.EDITOR, - }, - ), - MkFolder( - name="PRIMARY", - gid=gid_original_owner, - shared_with={gid_primary: FolderAccessRole.OWNER}, - ), - MkFolder( - name="EVERYONE", - gid=gid_original_owner, - shared_with={gid_everyone: FolderAccessRole.OWNER}, - ), - MkFolder( - name="STANDARD", - gid=gid_original_owner, - shared_with={gid_standard: FolderAccessRole.OWNER}, - ), - } - ) - - folder_id_sharing_user = folder_ids["SHARING_USER"] - folder_id_primary = folder_ids["PRIMARY"] - folder_id_everyone = folder_ids["EVERYONE"] - folder_id_standard = folder_ids["STANDARD"] - - ####### - # TESTS - ####### - - with pytest.raises(CannotMoveFolderSharedViaNonPrimaryGroupError) as exc: - await folder_move( - connection, - default_product_name, - folder_id_everyone, - {gid_everyone}, - destination_folder_id=folder_id_sharing_user, - ) - assert "EVERYONE" in f"{exc.value}" - - with pytest.raises(CannotMoveFolderSharedViaNonPrimaryGroupError) as exc: - await folder_move( - connection, - default_product_name, - folder_id_standard, - {gid_standard}, - destination_folder_id=folder_id_sharing_user, - ) - assert "STANDARD" in f"{exc.value}" - - # primary gorup does not raise error - await folder_move( - connection, - default_product_name, - folder_id_primary, - {gid_primary}, - destination_folder_id=folder_id_sharing_user, - ) - - -async def test_add_remove_project_in_folder( - connection: SAConnection, - default_product_name: _ProductName, - get_unique_gids: Callable[[int], tuple[_GroupID, ...]], - make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]], - get_unique_project_uuids: Callable[[int], tuple[_ProjectID, ...]], -): - ####### - # SETUP - ####### - - (gid_owner, gid_editor, gid_viewer, gid_no_access) = get_unique_gids(4) - (project_uuid,) = get_unique_project_uuids(1) - - folder_ids = await make_folders( - { - MkFolder( - name="f1", - gid=gid_owner, - shared_with={ - gid_editor: FolderAccessRole.EDITOR, - gid_viewer: FolderAccessRole.VIEWER, - gid_no_access: FolderAccessRole.NO_ACCESS, - }, - ) - } - ) - folder_id_f1 = folder_ids["f1"] - - async def _is_project_present( - connection: SAConnection, - folder_id: _FolderID, - project_id: _ProjectID, - ) -> bool: - async with connection.execute( - folders_to_projects.select() - .where(folders_to_projects.c.folder_id == folder_id) - .where(folders_to_projects.c.project_uuid == project_id) - ) as result: - rows = await result.fetchall() - assert rows is not None - return len(rows) == 1 - - async def _add_folder_as(gid: _GroupID) -> None: - await folder_add_project( - connection, - default_product_name, - folder_id_f1, - {gid}, - project_uuid=project_uuid, - ) - assert await _is_project_present(connection, folder_id_f1, project_uuid) is True - - async def _remove_folder_as(gid: _GroupID) -> None: - await folder_remove_project( - connection, - default_product_name, - folder_id_f1, - {gid}, - project_uuid=project_uuid, - ) - assert ( - await _is_project_present(connection, folder_id_f1, project_uuid) is False - ) - - assert await _is_project_present(connection, folder_id_f1, project_uuid) is False - - ####### - # TESTS - ####### - - # 1. owner can add and remove - await _add_folder_as(gid_owner) - await _remove_folder_as(gid_owner) - - # 2 editor can add and can't remove - await _add_folder_as(gid_editor) - with pytest.raises(InsufficientPermissionsError): - await _remove_folder_as(gid_editor) - await _remove_folder_as(gid_owner) # cleanup - - # 3. viwer can't add and can't remove - with pytest.raises(InsufficientPermissionsError): - await _add_folder_as(gid_viewer) - with pytest.raises(InsufficientPermissionsError): - await _remove_folder_as(gid_viewer) - - # 4. no_access can't add and can't remove - with pytest.raises(InsufficientPermissionsError): - await _add_folder_as(gid_no_access) - with pytest.raises(InsufficientPermissionsError): - await _remove_folder_as(gid_no_access) - - -class ExpectedValues(NamedTuple): - id: _FolderID - my_access_rights: _FolderPermissions - access_rights: dict[_GroupID, _FolderPermissions] - - def __hash__(self): - return hash( - ( - self.id, - self.my_access_rights, - tuple(sorted(self.access_rights.items())), - ) - ) - - def __eq__(self, other): - if not isinstance(other, ExpectedValues): - return False - return ( - self.id == other.id - and self.my_access_rights == other.my_access_rights - and self.access_rights == other.access_rights - ) - - -def _assert_expected_entries( - folders: list[FolderEntry], *, expected: set[ExpectedValues] -) -> None: - for folder_entry in folders: - expected_values = ExpectedValues( - folder_entry.id, - folder_entry.my_access_rights, - folder_entry.access_rights, - ) - assert expected_values in expected - - -ALL_IN_ONE_PAGE_OFFSET: NonNegativeInt = 0 -ALL_IN_ONE_PAGE_LIMIT: NonNegativeInt = 1000 - - -async def _list_folder_as( - connection: SAConnection, - default_product_name: _ProductName, - folder_id: _FolderID | None, - gids: set[_GroupID], - offset: NonNegativeInt = ALL_IN_ONE_PAGE_OFFSET, - limit: NonNegativeInt = ALL_IN_ONE_PAGE_LIMIT, -) -> list[FolderEntry]: - - _, folders_db = await folder_list( - connection, default_product_name, folder_id, gids, offset=offset, limit=limit - ) - return folders_db - - -async def test_folder_list( - connection: SAConnection, - default_product_name: _ProductName, - get_unique_gids: Callable[[int], tuple[_GroupID, ...]], - make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]], -): - ####### - # SETUP - ####### - ( - gid_owner, - gid_editor, - gid_viewer, - gid_no_access, - gid_not_shared, - ) = get_unique_gids(5) - - folder_ids = await make_folders( - { - MkFolder( - name="owner_folder", - gid=gid_owner, - shared_with={ - gid_editor: FolderAccessRole.EDITOR, - gid_viewer: FolderAccessRole.VIEWER, - gid_no_access: FolderAccessRole.NO_ACCESS, - }, - children={ - *{MkFolder(name=f"f{i}", gid=gid_owner) for i in range(1, 10)}, - MkFolder( - name="f10", - gid=gid_owner, - children={ - MkFolder(name=f"sub_f{i}", gid=gid_owner) - for i in range(1, 11) - }, - ), - }, - ) - } - ) - - folder_id_owner_folder = folder_ids["owner_folder"] - folder_id_f1 = folder_ids["f1"] - folder_id_f2 = folder_ids["f2"] - folder_id_f3 = folder_ids["f3"] - folder_id_f4 = folder_ids["f4"] - folder_id_f5 = folder_ids["f5"] - folder_id_f6 = folder_ids["f6"] - folder_id_f7 = folder_ids["f7"] - folder_id_f8 = folder_ids["f8"] - folder_id_f9 = folder_ids["f9"] - folder_id_f10 = folder_ids["f10"] - folder_id_sub_f1 = folder_ids["sub_f1"] - folder_id_sub_f2 = folder_ids["sub_f2"] - folder_id_sub_f3 = folder_ids["sub_f3"] - folder_id_sub_f4 = folder_ids["sub_f4"] - folder_id_sub_f5 = folder_ids["sub_f5"] - folder_id_sub_f6 = folder_ids["sub_f6"] - folder_id_sub_f7 = folder_ids["sub_f7"] - folder_id_sub_f8 = folder_ids["sub_f8"] - folder_id_sub_f9 = folder_ids["sub_f9"] - folder_id_sub_f10 = folder_ids["sub_f10"] - - ALL_FOLDERS_FX = ( - folder_id_f1, - folder_id_f2, - folder_id_f3, - folder_id_f4, - folder_id_f5, - folder_id_f6, - folder_id_f7, - folder_id_f8, - folder_id_f9, - folder_id_f10, - ) - - ALL_FOLDERS_SUB_FX = ( - folder_id_sub_f1, - folder_id_sub_f2, - folder_id_sub_f3, - folder_id_sub_f4, - folder_id_sub_f5, - folder_id_sub_f6, - folder_id_sub_f7, - folder_id_sub_f8, - folder_id_sub_f9, - folder_id_sub_f10, - ) - - ALL_FOLDERS_AND_SUBFOLDERS = ( - folder_id_owner_folder, - *ALL_FOLDERS_FX, - *ALL_FOLDERS_SUB_FX, - ) - - ACCESS_RIGHTS_BY_GID: dict[_GroupID, _FolderPermissions] = { - gid_owner: OWNER_PERMISSIONS, - gid_editor: EDITOR_PERMISSIONS, - gid_viewer: VIEWER_PERMISSIONS, - gid_no_access: NO_ACCESS_PERMISSIONS, - } - - ####### - # TESTS - ####### - - # 1. list all levels per gid with access - for listing_gid in (gid_owner, gid_editor, gid_viewer): - # list `root` for gid - _assert_expected_entries( - await _list_folder_as( - connection, default_product_name, None, {listing_gid} - ), - expected={ - ExpectedValues( - folder_id_owner_folder, - ACCESS_RIGHTS_BY_GID[listing_gid], - { - gid_owner: OWNER_PERMISSIONS, - gid_editor: EDITOR_PERMISSIONS, - gid_viewer: VIEWER_PERMISSIONS, - gid_no_access: NO_ACCESS_PERMISSIONS, - }, - ), - }, - ) - # list `owner_folder` for gid - _assert_expected_entries( - await _list_folder_as( - connection, default_product_name, folder_id_owner_folder, {listing_gid} - ), - expected={ - ExpectedValues( - fx, - ACCESS_RIGHTS_BY_GID[listing_gid], - {gid_owner: OWNER_PERMISSIONS}, - ) - for fx in ALL_FOLDERS_FX - }, - ) - # list `f10` for gid - _assert_expected_entries( - await _list_folder_as( - connection, default_product_name, folder_id_f10, {listing_gid} - ), - expected={ - ExpectedValues( - sub_fx, - ACCESS_RIGHTS_BY_GID[listing_gid], - {gid_owner: OWNER_PERMISSIONS}, - ) - for sub_fx in ALL_FOLDERS_SUB_FX - }, - ) - - # 2. lisit all levels for `gid_no_access` - # can always be ran but should not list any entry - _assert_expected_entries( - await _list_folder_as(connection, default_product_name, None, {gid_no_access}), - expected=set(), - ) - # there are insusficient permissions - for folder_id_to_check in ALL_FOLDERS_AND_SUBFOLDERS: - with pytest.raises(InsufficientPermissionsError): - await _list_folder_as( - connection, default_product_name, folder_id_to_check, {gid_no_access} - ) - - # 3. lisit all levels for `gid_not_shared`` - # can always list the contets of the "root" folder for a gid - _assert_expected_entries( - await _list_folder_as(connection, default_product_name, None, {gid_not_shared}), - expected=set(), - ) - for folder_id_to_check in ALL_FOLDERS_AND_SUBFOLDERS: - with pytest.raises(FolderNotSharedWithGidError): - await _list_folder_as( - connection, default_product_name, folder_id_to_check, {gid_not_shared} - ) - - # 4. list with pagination - for initial_limit in (1, 2, 3, 4, 5): - offset = 0 - limit = initial_limit - found_folders: list[FolderEntry] = [] - while items := await _list_folder_as( - connection, - default_product_name, - folder_id_owner_folder, - {gid_owner}, - offset=offset, - limit=limit, - ): - found_folders.extend(items) - offset += limit - if len(items) != limit: - break - - one_shot_query = await _list_folder_as( - connection, default_product_name, folder_id_owner_folder, {gid_owner} - ) - - assert len(found_folders) == len(one_shot_query) - assert found_folders == one_shot_query - - -async def test_folder_list_shared_with_different_permissions( - connection: SAConnection, - default_product_name: _ProductName, - get_unique_gids: Callable[[int], tuple[_GroupID, ...]], - make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]], -): - ####### - # SETUP - ####### - - (gid_owner_a, gid_owner_b, gid_owner_c, gid_owner_level_2) = get_unique_gids(4) - - folder_ids = await make_folders( - { - MkFolder( - name="f_owner_a", - gid=gid_owner_a, - shared_with={ - gid_owner_b: FolderAccessRole.OWNER, - gid_owner_c: FolderAccessRole.OWNER, - }, - children={ - MkFolder( - name="f_owner_b", - gid=gid_owner_b, - children={ - MkFolder( - name="f_owner_c", - gid=gid_owner_c, - shared_with={gid_owner_level_2: FolderAccessRole.OWNER}, - children={ - MkFolder(name="f_sub_owner_c", gid=gid_owner_c), - MkFolder( - name="f_owner_level_2", gid=gid_owner_level_2 - ), - }, - ) - }, - ) - }, - ) - } - ) - - folder_id_f_owner_a = folder_ids["f_owner_a"] - folder_id_f_owner_b = folder_ids["f_owner_b"] - folder_id_f_owner_c = folder_ids["f_owner_c"] - folder_id_f_sub_owner_c = folder_ids["f_sub_owner_c"] - folder_id_f_owner_level_2 = folder_ids["f_owner_level_2"] - - ####### - # TESTS - ####### - - # 1. `gid_owner_a`, `gid_owner_b`, `gid_owner_c` have the exact same veiw - for listing_gid in (gid_owner_a, gid_owner_b, gid_owner_c): - # list `root` for gid - _assert_expected_entries( - await _list_folder_as( - connection, default_product_name, None, {listing_gid} - ), - expected={ - ExpectedValues( - folder_id_f_owner_a, - OWNER_PERMISSIONS, - { - gid_owner_a: OWNER_PERMISSIONS, - gid_owner_b: OWNER_PERMISSIONS, - gid_owner_c: OWNER_PERMISSIONS, - }, - ), - }, - ) - # list `f_owner_a` for gid - _assert_expected_entries( - await _list_folder_as( - connection, default_product_name, folder_id_f_owner_a, {listing_gid} - ), - expected={ - ExpectedValues( - folder_id_f_owner_b, - OWNER_PERMISSIONS, - {gid_owner_b: OWNER_PERMISSIONS}, - ), - }, - ) - # list `f_owner_b` for gid - _assert_expected_entries( - await _list_folder_as( - connection, default_product_name, folder_id_f_owner_b, {listing_gid} - ), - expected={ - ExpectedValues( - folder_id_f_owner_c, - OWNER_PERMISSIONS, - { - gid_owner_c: OWNER_PERMISSIONS, - gid_owner_level_2: OWNER_PERMISSIONS, - }, - ), - }, - ) - # list `f_owner_c` for gid - _assert_expected_entries( - await _list_folder_as( - connection, default_product_name, folder_id_f_owner_c, {listing_gid} - ), - expected={ - ExpectedValues( - folder_id_f_sub_owner_c, - OWNER_PERMISSIONS, - { - gid_owner_c: OWNER_PERMISSIONS, - }, - ), - ExpectedValues( - folder_id_f_owner_level_2, - OWNER_PERMISSIONS, - { - gid_owner_level_2: OWNER_PERMISSIONS, - }, - ), - }, - ) - - # 2. `gid_owner_level_2` can only access from `f_owner_c` downwards - # list `f_owner_c` for `gid_owner_level_2` - _assert_expected_entries( - await _list_folder_as( - connection, default_product_name, None, {gid_owner_level_2} - ), - expected={ - ExpectedValues( - folder_id_f_owner_c, - OWNER_PERMISSIONS, - { - gid_owner_c: OWNER_PERMISSIONS, - gid_owner_level_2: OWNER_PERMISSIONS, - }, - ), - }, - ) - # list `root` for `gid_owner_level_2` - _assert_expected_entries( - await _list_folder_as( - connection, default_product_name, folder_id_f_owner_c, {gid_owner_level_2} - ), - expected={ - ExpectedValues( - folder_id_f_sub_owner_c, - OWNER_PERMISSIONS, - { - gid_owner_c: OWNER_PERMISSIONS, - }, - ), - ExpectedValues( - folder_id_f_owner_level_2, - OWNER_PERMISSIONS, - { - gid_owner_level_2: OWNER_PERMISSIONS, - }, - ), - }, - ) - - -async def test_folder_list_in_root_with_different_groups_avoids_duplicate_entries( - connection: SAConnection, - default_product_name: _ProductName, - get_unique_gids: Callable[[int], tuple[_GroupID, ...]], - make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]], -): - ####### - # SETUP - ####### - - (gid_z43, gid_osparc, gid_user) = get_unique_gids(3) - - await make_folders( - { - MkFolder( - name="f1", - gid=gid_user, - shared_with={ - gid_z43: FolderAccessRole.OWNER, - gid_osparc: FolderAccessRole.OWNER, - }, - ), - MkFolder( - name="f2", - gid=gid_z43, - shared_with={ - gid_osparc: FolderAccessRole.OWNER, - }, - ), - MkFolder( - name="f3", - gid=gid_osparc, - shared_with={ - gid_z43: FolderAccessRole.OWNER, - }, - ), - } - ) - - ####### - # TESTS - ####### - - # 1. gid_z43 and gid_osparc see all folders - for gid_all_folders in (gid_z43, gid_osparc): - entries_z43 = await _list_folder_as( - connection, default_product_name, None, {gid_all_folders} - ) - assert len(entries_z43) == 3 - - # 2. gid_user only sees it's own folder - entries_user = await _list_folder_as( - connection, default_product_name, None, {gid_user} - ) - assert len(entries_user) == 1 - - # 3. all gids see all fodlers - entries_all_groups = await _list_folder_as( - connection, default_product_name, None, {gid_z43, gid_osparc, gid_user} - ) - assert len(entries_all_groups) == 3 - - -async def test_regression_list_folder_parent( - connection: SAConnection, - default_product_name: _ProductName, - get_unique_gids: Callable[[int], tuple[_GroupID, ...]], - make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]], -): - ####### - # SETUP - ####### - - (gid_user,) = get_unique_gids(1) - - folder_ids = await make_folders( - { - MkFolder( - name="f1", - gid=gid_user, - children={ - MkFolder( - name="f2", - gid=gid_user, - children={ - MkFolder(name="f3", gid=gid_user), - }, - ) - }, - ), - } - ) - - folder_id_f1 = folder_ids["f1"] - folder_id_f2 = folder_ids["f2"] - folder_id_f3 = folder_ids["f3"] - - ####### - # TESTS - ####### - - for folder_id in (None, folder_id_f1, folder_id_f2): - folder_content = await _list_folder_as( - connection, default_product_name, folder_id, {gid_user} - ) - assert len(folder_content) == 1 - assert folder_content[0] - assert folder_content[0].parent_folder == folder_id - - f3_content = await _list_folder_as( - connection, default_product_name, folder_id_f3, {gid_user} - ) - assert len(f3_content) == 0 - - -async def test_folder_get( - connection: SAConnection, - default_product_name: _ProductName, - get_unique_gids: Callable[[int], tuple[_GroupID, ...]], - make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]], -): - ####### - # SETUP - ####### - ( - gid_owner, - gid_other_owner, - gid_not_shared, - ) = get_unique_gids(3) - - folder_ids = await make_folders( - { - MkFolder( - name="owner_folder", - gid=gid_owner, - shared_with={ - gid_other_owner: FolderAccessRole.OWNER, - }, - children={ - *{MkFolder(name=f"f{i}", gid=gid_owner) for i in range(1, 3)}, - MkFolder( - name="f10", - gid=gid_owner, - children={ - MkFolder(name=f"sub_f{i}", gid=gid_owner) - for i in range(1, 3) - }, - ), - }, - ) - } - ) - - folder_id_owner_folder = folder_ids["owner_folder"] - folder_id_f1 = folder_ids["f1"] - folder_id_f2 = folder_ids["f2"] - folder_id_sub_f1 = folder_ids["sub_f1"] - folder_id_sub_f2 = folder_ids["sub_f2"] - - ####### - # TESTS - ####### - - # 1. query exsisting directories - for folder_id_to_list in ( - None, - folder_id_owner_folder, - folder_id_f1, - folder_id_f2, - folder_id_sub_f1, - folder_id_sub_f2, - ): - folder_entries = await _list_folder_as( - connection, default_product_name, folder_id_to_list, {gid_owner} - ) - for entry in folder_entries: - queried_folder = await folder_get( - connection, default_product_name, entry.id, {gid_owner} - ) - assert entry == queried_folder - - # 2. query via gid_not_shared - with pytest.raises(FolderNotSharedWithGidError): - await folder_get( - connection, default_product_name, folder_id_owner_folder, {gid_not_shared} - ) - - # 3. query with missing folder_id - missing_folder_id = 12312313123 - for gid_to_test in ( - gid_owner, - gid_other_owner, - gid_not_shared, - ): - with pytest.raises(FolderNotFoundError): - await folder_get( - connection, default_product_name, missing_folder_id, {gid_to_test} - ) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py index 763976e2504..051f522fcd9 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py @@ -16,7 +16,7 @@ from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_parametrizations import MockedStorageSubsystem from servicelib.aiohttp.long_running_tasks.client import long_running_task_request -from simcore_postgres_database.models.folders import folders +from simcore_postgres_database.models.folders_v2 import folders_v2 from simcore_postgres_database.models.workspaces import workspaces from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.folders._folders_api import create_folder @@ -51,7 +51,7 @@ async def create_workspace_and_folder( yield (workspace.workspace_id, folder.folder_id) with postgres_db.connect() as con: - con.execute(folders.delete()) + con.execute(folders_v2.delete()) con.execute(workspaces.delete()) From 83b31523b787539921670061f340fc4f529eea4d Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 <60785969+matusdrobuliak66@users.noreply.github.com> Date: Tue, 24 Sep 2024 13:33:37 +0200 Subject: [PATCH 020/104] =?UTF-8?q?=F0=9F=8E=A8=20improve=20DB=20foreign?= =?UTF-8?q?=20key=20dependencies=20(=F0=9F=97=83=EF=B8=8F)=20(#6428)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ...07000d_improve_foreign_key_dependencies.py | 110 ++++++++++++++++++ ...esource_tracker_pricing_plan_to_service.py | 7 ++ .../models/resource_tracker_pricing_plans.py | 9 +- .../resource_tracker_pricing_unit_costs.py | 16 ++- .../models/wallets.py | 13 ++- .../models/workspaces.py | 13 ++- ...test_api_resource_tracker_pricing_plans.py | 20 ++++ ..._api_resource_tracker_pricing_plans_rpc.py | 89 +++++++++----- ...age_triggered_by_listening_with_billing.py | 10 ++ ...t_process_rabbitmq_message_with_billing.py | 10 ++ ...ss_rabbitmq_message_with_billing_cost_0.py | 10 ++ 11 files changed, 274 insertions(+), 33 deletions(-) create mode 100644 packages/postgres-database/src/simcore_postgres_database/migration/versions/10729e07000d_improve_foreign_key_dependencies.py diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/10729e07000d_improve_foreign_key_dependencies.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/10729e07000d_improve_foreign_key_dependencies.py new file mode 100644 index 00000000000..16bfc82acd8 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/10729e07000d_improve_foreign_key_dependencies.py @@ -0,0 +1,110 @@ +"""improve foreign key dependencies + +Revision ID: 10729e07000d +Revises: 47ca7335e146 +Create Date: 2024-09-24 07:52:20.253076+00:00 + +""" +from alembic import op + +# revision identifiers, used by Alembic. +revision = "10729e07000d" +down_revision = "47ca7335e146" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_foreign_key( + "fk_rut_pricing_plan_to_service_key_and_version", + "resource_tracker_pricing_plan_to_service", + "services_meta_data", + ["service_key", "service_version"], + ["key", "version"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.drop_index( + "ix_resource_tracker_pricing_plans_product_name", + table_name="resource_tracker_pricing_plans", + ) + op.create_foreign_key( + "fk_rut_pricing_plans_product_name", + "resource_tracker_pricing_plans", + "products", + ["product_name"], + ["name"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.create_foreign_key( + "fk_resource_tracker_pricing_units_costs_pricing_plan_id", + "resource_tracker_pricing_unit_costs", + "resource_tracker_pricing_plans", + ["pricing_plan_id"], + ["pricing_plan_id"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.create_foreign_key( + "fk_resource_tracker_pricing_units_costs_pricing_unit_id", + "resource_tracker_pricing_unit_costs", + "resource_tracker_pricing_units", + ["pricing_unit_id"], + ["pricing_unit_id"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.create_foreign_key( + "fk_wallets_product_name", + "wallets", + "products", + ["product_name"], + ["name"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.create_foreign_key( + "fk_workspaces_product_name", + "workspaces", + "products", + ["product_name"], + ["name"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint("fk_workspaces_product_name", "workspaces", type_="foreignkey") + op.drop_constraint("fk_wallets_product_name", "wallets", type_="foreignkey") + op.drop_constraint( + "fk_resource_tracker_pricing_units_costs_pricing_unit_id", + "resource_tracker_pricing_unit_costs", + type_="foreignkey", + ) + op.drop_constraint( + "fk_resource_tracker_pricing_units_costs_pricing_plan_id", + "resource_tracker_pricing_unit_costs", + type_="foreignkey", + ) + op.drop_constraint( + "fk_rut_pricing_plans_product_name", + "resource_tracker_pricing_plans", + type_="foreignkey", + ) + op.create_index( + "ix_resource_tracker_pricing_plans_product_name", + "resource_tracker_pricing_plans", + ["product_name"], + unique=False, + ) + op.drop_constraint( + "fk_rut_pricing_plan_to_service_key_and_version", + "resource_tracker_pricing_plan_to_service", + type_="foreignkey", + ) + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plan_to_service.py b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plan_to_service.py index b0040d93ae6..820ec42fc50 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plan_to_service.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plan_to_service.py @@ -45,4 +45,11 @@ doc="Option to mark default pricing plan for the service (ex. when there are more pricing plans for the same service)", ), # --------------------------- + sa.ForeignKeyConstraint( + ["service_key", "service_version"], + ["services_meta_data.key", "services_meta_data.version"], + name="fk_rut_pricing_plan_to_service_key_and_version", + onupdate="CASCADE", + ondelete="CASCADE", + ), ) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plans.py b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plans.py index 8ec50b0f206..81d98ebcac1 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plans.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plans.py @@ -33,9 +33,14 @@ class PricingPlanClassification(str, enum.Enum): sa.Column( "product_name", sa.String, + sa.ForeignKey( + "products.name", + onupdate="CASCADE", + ondelete="CASCADE", + name="fk_rut_pricing_plans_product_name", + ), nullable=False, - doc="Product name", - index=True, + doc="Products unique name", ), sa.Column( "display_name", diff --git a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_unit_costs.py b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_unit_costs.py index defaf49eb4a..46031532387 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_unit_costs.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_unit_costs.py @@ -22,8 +22,14 @@ sa.Column( "pricing_plan_id", sa.BigInteger, + sa.ForeignKey( + "resource_tracker_pricing_plans.pricing_plan_id", + name="fk_resource_tracker_pricing_units_costs_pricing_plan_id", + onupdate="CASCADE", + ondelete="CASCADE", + ), nullable=False, - doc="Parent pricing plan", + doc="Foreign key to pricing plan", index=True, ), sa.Column( @@ -35,8 +41,14 @@ sa.Column( "pricing_unit_id", sa.BigInteger, + sa.ForeignKey( + "resource_tracker_pricing_units.pricing_unit_id", + name="fk_resource_tracker_pricing_units_costs_pricing_unit_id", + onupdate="CASCADE", + ondelete="CASCADE", + ), nullable=False, - doc="Parent pricing unit", + doc="Foreign key to pricing unit", index=True, ), sa.Column( diff --git a/packages/postgres-database/src/simcore_postgres_database/models/wallets.py b/packages/postgres-database/src/simcore_postgres_database/models/wallets.py index e26545f1f4a..3c765529976 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/wallets.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/wallets.py @@ -50,7 +50,18 @@ class WalletStatus(str, enum.Enum): ), column_created_datetime(timezone=True), column_modified_datetime(timezone=True), - sa.Column("product_name", sa.String, nullable=False, doc="Product name"), + sa.Column( + "product_name", + sa.String, + sa.ForeignKey( + "products.name", + onupdate="CASCADE", + ondelete="CASCADE", + name="fk_wallets_product_name", + ), + nullable=False, + doc="Products unique name", + ), ) # ------------------------ TRIGGERS diff --git a/packages/postgres-database/src/simcore_postgres_database/models/workspaces.py b/packages/postgres-database/src/simcore_postgres_database/models/workspaces.py index f4b76812a6c..998c7676761 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/workspaces.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/workspaces.py @@ -34,7 +34,18 @@ nullable=False, doc="Identifier of the group that owns this workspace (Should be just PRIMARY GROUP)", ), - sa.Column("product_name", sa.String, nullable=False, doc="Product name"), + sa.Column( + "product_name", + sa.String, + sa.ForeignKey( + "products.name", + onupdate="CASCADE", + ondelete="CASCADE", + name="fk_workspaces_product_name", + ), + nullable=False, + doc="Products unique name", + ), column_created_datetime(timezone=True), column_modified_datetime(timezone=True), ) diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans.py index 5e241e60767..609b0ebd54f 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans.py @@ -27,6 +27,7 @@ from simcore_postgres_database.models.resource_tracker_pricing_units import ( resource_tracker_pricing_units, ) +from simcore_postgres_database.models.services import services_meta_data from starlette import status from yarl import URL @@ -184,6 +185,15 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato modified=datetime.now(tz=timezone.utc), ) ) + + con.execute( + services_meta_data.insert().values( + key=_SERVICE_KEY, + version=_SERVICE_VERSION, + name="name", + description="description", + ) + ) con.execute( resource_tracker_pricing_plan_to_service.insert().values( pricing_plan_id=_PRICING_PLAN_ID, @@ -192,6 +202,15 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato service_default_plan=True, ) ) + + con.execute( + services_meta_data.insert().values( + key=_SERVICE_KEY_2, + version=_SERVICE_VERSION_2, + name="name", + description="description", + ) + ) con.execute( resource_tracker_pricing_plan_to_service.insert().values( pricing_plan_id=_PRICING_PLAN_ID_2, @@ -207,6 +226,7 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato con.execute(resource_tracker_pricing_units.delete()) con.execute(resource_tracker_pricing_plans.delete()) con.execute(resource_tracker_pricing_unit_costs.delete()) + con.execute(services_meta_data.delete()) async def test_get_default_pricing_plan_for_service( diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans_rpc.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans_rpc.py index 5a12fd24dbe..4ec8d45bb72 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans_rpc.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans_rpc.py @@ -37,6 +37,7 @@ from simcore_postgres_database.models.resource_tracker_pricing_units import ( resource_tracker_pricing_units, ) +from simcore_postgres_database.models.services import services_meta_data pytest_simcore_core_services_selection = ["postgres", "rabbit"] pytest_simcore_ops_services_selection = [ @@ -44,18 +45,52 @@ ] +_SERVICE_KEY = "simcore/services/comp/itis/sleeper" +_SERVICE_VERSION_1 = "2.0.2" +_SERVICE_VERSION_2 = "3.0.0" + +_SERVICE_KEY_3 = "simcore/services/comp/itis/different-service" +_SERVICE_VERSION_3 = "1.0.1" + + @pytest.fixture() def resource_tracker_setup_db( postgres_db: sa.engine.Engine, ) -> Iterator[None]: with postgres_db.connect() as con: + con.execute( + services_meta_data.insert().values( + key=_SERVICE_KEY, + version=_SERVICE_VERSION_1, + name="name", + description="description", + ) + ) + con.execute( + services_meta_data.insert().values( + key=_SERVICE_KEY, + version=_SERVICE_VERSION_2, + name="name", + description="description", + ) + ) + con.execute( + services_meta_data.insert().values( + key=_SERVICE_KEY_3, + version=_SERVICE_VERSION_3, + name="name", + description="description", + ) + ) + yield con.execute(resource_tracker_pricing_unit_costs.delete()) con.execute(resource_tracker_pricing_units.delete()) con.execute(resource_tracker_pricing_plan_to_service.delete()) con.execute(resource_tracker_pricing_plans.delete()) + con.execute(services_meta_data.delete()) async def test_rpc_pricing_plans_workflow( @@ -68,7 +103,7 @@ async def test_rpc_pricing_plans_workflow( result = await pricing_plans.create_pricing_plan( rpc_client, data=PricingPlanCreate( - product_name="s4l", + product_name="osparc", display_name=_display_name, description=faker.sentence(), classification=PricingPlanClassification.TIER, @@ -84,7 +119,7 @@ async def test_rpc_pricing_plans_workflow( _update_description = "description name updated" result = await pricing_plans.update_pricing_plan( rpc_client, - product_name="s4l", + product_name="osparc", data=PricingPlanUpdate( pricing_plan_id=_pricing_plan_id, display_name=_update_display_name, @@ -99,7 +134,7 @@ async def test_rpc_pricing_plans_workflow( result = await pricing_plans.get_pricing_plan( rpc_client, - product_name="s4l", + product_name="osparc", pricing_plan_id=_pricing_plan_id, ) assert isinstance(result, PricingPlanGet) @@ -110,7 +145,7 @@ async def test_rpc_pricing_plans_workflow( result = await pricing_plans.list_pricing_plans( rpc_client, - product_name="s4l", + product_name="osparc", ) assert isinstance(result, list) assert len(result) == 1 @@ -120,7 +155,7 @@ async def test_rpc_pricing_plans_workflow( # Now I will deactivate the pricing plan result = await pricing_plans.update_pricing_plan( rpc_client, - product_name="s4l", + product_name="osparc", data=PricingPlanUpdate( pricing_plan_id=_pricing_plan_id, display_name=faker.word(), @@ -142,7 +177,7 @@ async def test_rpc_pricing_plans_with_units_workflow( result = await pricing_plans.create_pricing_plan( rpc_client, data=PricingPlanCreate( - product_name="s4l", + product_name="osparc", display_name=_display_name, description=faker.sentence(), classification=PricingPlanClassification.TIER, @@ -156,7 +191,7 @@ async def test_rpc_pricing_plans_with_units_workflow( result = await pricing_units.create_pricing_unit( rpc_client, - product_name="s4l", + product_name="osparc", data=PricingUnitWithCostCreate( pricing_plan_id=_pricing_plan_id, unit_name="SMALL", @@ -175,7 +210,7 @@ async def test_rpc_pricing_plans_with_units_workflow( # Get pricing plan result = await pricing_plans.get_pricing_plan( rpc_client, - product_name="s4l", + product_name="osparc", pricing_plan_id=_pricing_plan_id, ) assert isinstance(result, PricingPlanGet) @@ -187,7 +222,7 @@ async def test_rpc_pricing_plans_with_units_workflow( _unit_name = "VERY SMALL" result = await pricing_units.update_pricing_unit( rpc_client, - product_name="s4l", + product_name="osparc", data=PricingUnitWithCostUpdate( pricing_plan_id=_pricing_plan_id, pricing_unit_id=_first_pricing_unit_id, @@ -206,7 +241,7 @@ async def test_rpc_pricing_plans_with_units_workflow( # Update pricing unit with COST update! result = await pricing_units.update_pricing_unit( rpc_client, - product_name="s4l", + product_name="osparc", data=PricingUnitWithCostUpdate( pricing_plan_id=_pricing_plan_id, pricing_unit_id=_first_pricing_unit_id, @@ -228,7 +263,7 @@ async def test_rpc_pricing_plans_with_units_workflow( # Test get pricing unit result = await pricing_units.get_pricing_unit( rpc_client, - product_name="s4l", + product_name="osparc", pricing_plan_id=_pricing_plan_id, pricing_unit_id=_first_pricing_unit_id, ) @@ -238,7 +273,7 @@ async def test_rpc_pricing_plans_with_units_workflow( # Create one more unit result = await pricing_units.create_pricing_unit( rpc_client, - product_name="s4l", + product_name="osparc", data=PricingUnitWithCostCreate( pricing_plan_id=_pricing_plan_id, unit_name="LARGE", @@ -256,7 +291,7 @@ async def test_rpc_pricing_plans_with_units_workflow( # Get pricing plan with units result = await pricing_plans.get_pricing_plan( rpc_client, - product_name="s4l", + product_name="osparc", pricing_plan_id=_pricing_plan_id, ) assert isinstance(result, PricingPlanGet) @@ -275,7 +310,7 @@ async def test_rpc_pricing_plans_to_service_workflow( result = await pricing_plans.create_pricing_plan( rpc_client, data=PricingPlanCreate( - product_name="s4l", + product_name="osparc", display_name=faker.word(), description=faker.sentence(), classification=PricingPlanClassification.TIER, @@ -288,19 +323,19 @@ async def test_rpc_pricing_plans_to_service_workflow( result = ( await pricing_plans.list_connected_services_to_pricing_plan_by_pricing_plan( rpc_client, - product_name="s4l", + product_name="osparc", pricing_plan_id=_pricing_plan_id, ) ) assert isinstance(result, list) assert result == [] - _first_service_version = ServiceVersion("2.0.2") + _first_service_version = ServiceVersion(_SERVICE_VERSION_1) result = await pricing_plans.connect_service_to_pricing_plan( rpc_client, - product_name="s4l", + product_name="osparc", pricing_plan_id=_pricing_plan_id, - service_key=ServiceKey("simcore/services/comp/itis/sleeper"), + service_key=ServiceKey(_SERVICE_KEY), service_version=_first_service_version, ) assert isinstance(result, PricingPlanToServiceGet) @@ -310,7 +345,7 @@ async def test_rpc_pricing_plans_to_service_workflow( result = ( await pricing_plans.list_connected_services_to_pricing_plan_by_pricing_plan( rpc_client, - product_name="s4l", + product_name="osparc", pricing_plan_id=_pricing_plan_id, ) ) @@ -318,12 +353,12 @@ async def test_rpc_pricing_plans_to_service_workflow( assert len(result) == 1 # Connect different version - _second_service_version = ServiceVersion("3.0.0") + _second_service_version = ServiceVersion(_SERVICE_VERSION_2) result = await pricing_plans.connect_service_to_pricing_plan( rpc_client, - product_name="s4l", + product_name="osparc", pricing_plan_id=_pricing_plan_id, - service_key=ServiceKey("simcore/services/comp/itis/sleeper"), + service_key=ServiceKey(_SERVICE_KEY), service_version=_second_service_version, ) assert isinstance(result, PricingPlanToServiceGet) @@ -333,7 +368,7 @@ async def test_rpc_pricing_plans_to_service_workflow( result = ( await pricing_plans.list_connected_services_to_pricing_plan_by_pricing_plan( rpc_client, - product_name="s4l", + product_name="osparc", pricing_plan_id=_pricing_plan_id, ) ) @@ -341,13 +376,13 @@ async def test_rpc_pricing_plans_to_service_workflow( assert len(result) == 2 # Connect different service - _different_service_key = ServiceKey("simcore/services/comp/itis/different-service") + _different_service_key = ServiceKey(_SERVICE_KEY_3) result = await pricing_plans.connect_service_to_pricing_plan( rpc_client, - product_name="s4l", + product_name="osparc", pricing_plan_id=_pricing_plan_id, service_key=_different_service_key, - service_version=ServiceVersion("1.0.0"), + service_version=ServiceVersion(_SERVICE_VERSION_3), ) assert isinstance(result, PricingPlanToServiceGet) assert result.pricing_plan_id == _pricing_plan_id @@ -356,7 +391,7 @@ async def test_rpc_pricing_plans_to_service_workflow( result = ( await pricing_plans.list_connected_services_to_pricing_plan_by_pricing_plan( rpc_client, - product_name="s4l", + product_name="osparc", pricing_plan_id=_pricing_plan_id, ) ) diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py index 8d95ae78d75..7a5e2114c1d 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py @@ -28,6 +28,7 @@ from simcore_postgres_database.models.resource_tracker_pricing_units import ( resource_tracker_pricing_units, ) +from simcore_postgres_database.models.services import services_meta_data from .conftest import assert_service_runs_db_row @@ -128,6 +129,14 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato modified=datetime.now(tz=timezone.utc), ) ) + con.execute( + services_meta_data.insert().values( + key="simcore/services/comp/itis/sleeper", + version="1.0.16", + name="name", + description="description", + ) + ) con.execute( resource_tracker_pricing_plan_to_service.insert().values( pricing_plan_id=1, @@ -144,6 +153,7 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato con.execute(resource_tracker_pricing_plans.delete()) con.execute(resource_tracker_pricing_unit_costs.delete()) con.execute(resource_tracker_credit_transactions.delete()) + con.execute(services_meta_data.delete()) @pytest.mark.flaky(max_runs=3) diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py index 92946509e91..4b6c1a0dfac 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py @@ -30,6 +30,7 @@ from simcore_postgres_database.models.resource_tracker_pricing_units import ( resource_tracker_pricing_units, ) +from simcore_postgres_database.models.services import services_meta_data from simcore_service_resource_usage_tracker.modules.db.repositories.resource_tracker import ( ResourceTrackerRepository, ) @@ -142,6 +143,14 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato modified=datetime.now(tz=timezone.utc), ) ) + con.execute( + services_meta_data.insert().values( + key="simcore/services/comp/itis/sleeper", + version="1.0.16", + name="name", + description="description", + ) + ) con.execute( resource_tracker_pricing_plan_to_service.insert().values( pricing_plan_id=1, @@ -158,6 +167,7 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato con.execute(resource_tracker_pricing_plans.delete()) con.execute(resource_tracker_pricing_unit_costs.delete()) con.execute(resource_tracker_credit_transactions.delete()) + con.execute(services_meta_data.delete()) @pytest.fixture diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py index d5bc497fb0f..c1d62af5b23 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py @@ -30,6 +30,7 @@ from simcore_postgres_database.models.resource_tracker_pricing_units import ( resource_tracker_pricing_units, ) +from simcore_postgres_database.models.services import services_meta_data from simcore_service_resource_usage_tracker.modules.db.repositories.resource_tracker import ( ResourceTrackerRepository, ) @@ -88,6 +89,14 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato modified=datetime.now(tz=timezone.utc), ) ) + con.execute( + services_meta_data.insert().values( + key="simcore/services/comp/itis/sleeper", + version="1.0.16", + name="name", + description="description", + ) + ) con.execute( resource_tracker_pricing_plan_to_service.insert().values( pricing_plan_id=1, @@ -104,6 +113,7 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato con.execute(resource_tracker_pricing_plans.delete()) con.execute(resource_tracker_pricing_unit_costs.delete()) con.execute(resource_tracker_credit_transactions.delete()) + con.execute(services_meta_data.delete()) @pytest.fixture From 529122e9296c9d2487fe1dcee9f852413986dfdf Mon Sep 17 00:00:00 2001 From: Andrei Neagu <5694077+GitHK@users.noreply.github.com> Date: Tue, 24 Sep 2024 15:37:43 +0200 Subject: [PATCH 021/104] =?UTF-8?q?=E2=9C=A8=20`dynamic-scheduler`=20pushe?= =?UTF-8?q?s=20service=20state=20to=20the=20frontend=20(=E2=9A=A0=EF=B8=8F?= =?UTF-8?q?=20devops)=20(#5892)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrei Neagu --- .../api_schemas_dynamic_scheduler/socketio.py | 3 + .../api_schemas_webserver/projects_nodes.py | 43 +- .../src/models_library/services_enums.py | 9 +- packages/models-library/tests/conftest.py | 1 + .../models-library/tests/test_utils_nodes.py | 6 - .../deferred_tasks/_redis_task_tracker.py | 7 +- .../src/servicelib/services_utils.py | 17 + packages/service-library/tests/conftest.py | 6 +- .../tests/deferred_tasks/conftest.py | 6 +- .../tests/deferred_tasks/example_app.py | 25 +- .../test__base_deferred_handler.py | 5 +- .../src/settings_library/redis.py | 1 + services/director-v2/tests/conftest.py | 3 +- .../tests/unit/test_modules_dask_client.py | 5 - services/docker-compose-ops.yml | 3 +- services/docker-compose.yml | 14 +- .../dynamic-scheduler/requirements/_base.in | 3 +- .../dynamic-scheduler/requirements/_base.txt | 9 + .../api/rest/_dependencies.py | 9 +- .../api/rest/_health.py | 12 +- .../api/rpc/_services.py | 3 + .../core/application.py | 12 + .../services/deferred_manager.py | 24 + .../services/notifier/__init__.py | 7 + .../services/notifier/_notifier.py | 55 +++ .../services/notifier/_setup.py | 8 + .../services/notifier/_socketio.py | 32 ++ .../services/redis.py | 41 +- .../services/service_tracker/__init__.py | 33 ++ .../services/service_tracker/_api.py | 248 +++++++++++ .../services/service_tracker/_models.py | 123 ++++++ .../services/service_tracker/_setup.py | 19 + .../services/service_tracker/_tracker.py | 44 ++ .../services/status_monitor/__init__.py | 3 + .../status_monitor/_deferred_get_status.py | 85 ++++ .../services/status_monitor/_monitor.py | 121 +++++ .../services/status_monitor/_setup.py | 28 ++ services/dynamic-scheduler/tests/conftest.py | 46 +- .../tests/unit/api_rest/conftest.py | 20 +- .../unit/api_rest/test_api_rest__health.py | 7 +- .../unit/api_rest/test_api_rest__meta.py | 13 - .../unit/api_rpc/test_api_rpc__services.py | 2 +- .../dynamic-scheduler/tests/unit/conftest.py | 29 ++ .../tests/unit/service_tracker/test__api.py | 325 ++++++++++++++ .../unit/service_tracker/test__models.py | 57 +++ .../unit/service_tracker/test__tracker.py | 94 ++++ .../test_services_status_monitor__monitor.py | 415 ++++++++++++++++++ .../tests/unit/test_services_rabbitmq.py | 4 + .../tests/unit/test_services_redis.py | 10 +- .../projects/_nodes_handlers.py | 7 +- .../unit/isolated/test_dynamic_scheduler.py | 4 +- .../02/test_projects_states_handlers.py | 2 +- 52 files changed, 2010 insertions(+), 98 deletions(-) create mode 100644 packages/models-library/src/models_library/api_schemas_dynamic_scheduler/socketio.py create mode 100644 services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/deferred_manager.py create mode 100644 services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/__init__.py create mode 100644 services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_notifier.py create mode 100644 services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_setup.py create mode 100644 services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_socketio.py create mode 100644 services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/__init__.py create mode 100644 services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_api.py create mode 100644 services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_models.py create mode 100644 services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_setup.py create mode 100644 services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_tracker.py create mode 100644 services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/__init__.py create mode 100644 services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_deferred_get_status.py create mode 100644 services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_monitor.py create mode 100644 services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_setup.py create mode 100644 services/dynamic-scheduler/tests/unit/conftest.py create mode 100644 services/dynamic-scheduler/tests/unit/service_tracker/test__api.py create mode 100644 services/dynamic-scheduler/tests/unit/service_tracker/test__models.py create mode 100644 services/dynamic-scheduler/tests/unit/service_tracker/test__tracker.py create mode 100644 services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py diff --git a/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/socketio.py b/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/socketio.py new file mode 100644 index 00000000000..89a493a56cc --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/socketio.py @@ -0,0 +1,3 @@ +from typing import Final + +SOCKET_IO_SERVICE_STATUS_EVENT: Final[str] = "serviceStatus" diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py index 25a6f5fb0dd..0c2bdd07c7f 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py @@ -93,19 +93,36 @@ class NodeGet(OutputSchema): class Config: schema_extra: ClassVar[dict[str, Any]] = { - "example": { - "published_port": 30000, - "entrypoint": "/the/entry/point/is/here", - "service_uuid": "3fa85f64-5717-4562-b3fc-2c963f66afa6", - "service_key": "simcore/services/comp/itis/sleeper", - "service_version": "1.2.3", - "service_host": "jupyter_E1O2E-LAH", - "service_port": 8081, - "service_basepath": "/x/E1O2E-LAH", - "service_state": "pending", - "service_message": "no suitable node (insufficient resources on 1 node)", - "user_id": 123, - } + "examples": [ + # computational + { + "published_port": 30000, + "entrypoint": "/the/entry/point/is/here", + "service_uuid": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "service_key": "simcore/services/comp/itis/sleeper", + "service_version": "1.2.3", + "service_host": "jupyter_E1O2E-LAH", + "service_port": 8081, + "service_basepath": "/x/E1O2E-LAH", + "service_state": "pending", + "service_message": "no suitable node (insufficient resources on 1 node)", + "user_id": 123, + }, + # dynamic + { + "published_port": 30000, + "entrypoint": "/the/entry/point/is/here", + "service_uuid": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "service_key": "simcore/services/dynamic/some-dynamic-service", + "service_version": "1.2.3", + "service_host": "jupyter_E1O2E-LAH", + "service_port": 8081, + "service_basepath": "/x/E1O2E-LAH", + "service_state": "pending", + "service_message": "no suitable node (insufficient resources on 1 node)", + "user_id": 123, + }, + ] } diff --git a/packages/models-library/src/models_library/services_enums.py b/packages/models-library/src/models_library/services_enums.py index 50a83313482..ec5414218e3 100644 --- a/packages/models-library/src/models_library/services_enums.py +++ b/packages/models-library/src/models_library/services_enums.py @@ -11,14 +11,18 @@ class ServiceBootType(str, Enum): @functools.total_ordering @unique class ServiceState(Enum): + FAILED = "failed" + PENDING = "pending" PULLING = "pulling" STARTING = "starting" RUNNING = "running" - COMPLETE = "complete" - FAILED = "failed" + STOPPING = "stopping" + COMPLETE = "complete" + IDLE = "idle" + def __lt__(self, other): if self.__class__ is other.__class__: comparison_order = ServiceState.comparison_order() @@ -39,6 +43,7 @@ def comparison_order() -> dict["ServiceState", int]: ServiceState.RUNNING: 4, ServiceState.STOPPING: 5, ServiceState.COMPLETE: 6, + ServiceState.IDLE: 7, } diff --git a/packages/models-library/tests/conftest.py b/packages/models-library/tests/conftest.py index 9169e570b51..8bf433b901d 100644 --- a/packages/models-library/tests/conftest.py +++ b/packages/models-library/tests/conftest.py @@ -9,6 +9,7 @@ import pytest pytest_plugins = [ + "pytest_simcore.faker_projects_data", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", diff --git a/packages/models-library/tests/test_utils_nodes.py b/packages/models-library/tests/test_utils_nodes.py index 47465ce236d..b4634770a97 100644 --- a/packages/models-library/tests/test_utils_nodes.py +++ b/packages/models-library/tests/test_utils_nodes.py @@ -16,12 +16,6 @@ from models_library.utils.nodes import compute_node_hash from pydantic import AnyUrl, parse_obj_as - -@pytest.fixture() -def node_id() -> NodeID: - return uuid4() - - ANOTHER_NODE_ID = uuid4() ANOTHER_NODE_OUTPUT_KEY = "the_output_link" ANOTHER_NODE_PAYLOAD = {"outputs": {ANOTHER_NODE_OUTPUT_KEY: 36}} diff --git a/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py b/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py index 69762108e71..bbe45ccc39a 100644 --- a/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py +++ b/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py @@ -1,3 +1,4 @@ +import pickle from typing import Final from uuid import uuid4 @@ -33,13 +34,15 @@ async def get_new_unique_identifier(self) -> TaskUID: async def _get_raw(self, redis_key: str) -> TaskScheduleModel | None: found_data = await self.redis_client_sdk.redis.get(redis_key) - return None if found_data is None else TaskScheduleModel.parse_raw(found_data) + return None if found_data is None else pickle.loads(found_data) # noqa: S301 async def get(self, task_uid: TaskUID) -> TaskScheduleModel | None: return await self._get_raw(_get_key(task_uid)) async def save(self, task_uid: TaskUID, task_schedule: TaskScheduleModel) -> None: - await self.redis_client_sdk.redis.set(_get_key(task_uid), task_schedule.json()) + await self.redis_client_sdk.redis.set( + _get_key(task_uid), pickle.dumps(task_schedule) + ) async def remove(self, task_uid: TaskUID) -> None: await self.redis_client_sdk.redis.delete(_get_key(task_uid)) diff --git a/packages/service-library/src/servicelib/services_utils.py b/packages/service-library/src/servicelib/services_utils.py index 60a9caf92a5..98aace49c6c 100644 --- a/packages/service-library/src/servicelib/services_utils.py +++ b/packages/service-library/src/servicelib/services_utils.py @@ -1,5 +1,11 @@ import urllib.parse +from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet +from models_library.api_schemas_webserver.projects_nodes import ( + NodeGet, + NodeGetIdle, + NodeGetUnknown, +) from models_library.services import ServiceType @@ -9,3 +15,14 @@ def get_service_from_key(service_key: str) -> ServiceType: if encoded_service_type == "comp": encoded_service_type = "computational" return ServiceType(encoded_service_type) + + +def get_status_as_dict( + status: NodeGetIdle | NodeGetUnknown | DynamicServiceGet | NodeGet, +) -> dict: + """shared between different backend services to guarantee same result to frontend""" + return ( + status.dict(by_alias=True) + if isinstance(status, DynamicServiceGet) + else status.dict() + ) diff --git a/packages/service-library/tests/conftest.py b/packages/service-library/tests/conftest.py index f069aeedd76..712746ccce9 100644 --- a/packages/service-library/tests/conftest.py +++ b/packages/service-library/tests/conftest.py @@ -76,9 +76,11 @@ async def get_redis_client_sdk( Callable[[RedisDatabase], AbstractAsyncContextManager[RedisClientSDK]] ]: @asynccontextmanager - async def _(database: RedisDatabase) -> AsyncIterator[RedisClientSDK]: + async def _( + database: RedisDatabase, decode_response: bool = True # noqa: FBT002 + ) -> AsyncIterator[RedisClientSDK]: redis_resources_dns = redis_service.build_redis_dsn(database) - client = RedisClientSDK(redis_resources_dns) + client = RedisClientSDK(redis_resources_dns, decode_responses=decode_response) assert client assert client.redis_dsn == redis_resources_dns await client.setup() diff --git a/packages/service-library/tests/deferred_tasks/conftest.py b/packages/service-library/tests/deferred_tasks/conftest.py index 642a67336b6..00881e61471 100644 --- a/packages/service-library/tests/deferred_tasks/conftest.py +++ b/packages/service-library/tests/deferred_tasks/conftest.py @@ -9,8 +9,10 @@ @pytest.fixture async def redis_client_sdk_deferred_tasks( get_redis_client_sdk: Callable[ - [RedisDatabase], AbstractAsyncContextManager[RedisClientSDK] + [RedisDatabase, bool], AbstractAsyncContextManager[RedisClientSDK] ] ) -> AsyncIterator[RedisClientSDK]: - async with get_redis_client_sdk(RedisDatabase.DEFERRED_TASKS) as client: + async with get_redis_client_sdk( + RedisDatabase.DEFERRED_TASKS, decode_response=False + ) as client: yield client diff --git a/packages/service-library/tests/deferred_tasks/example_app.py b/packages/service-library/tests/deferred_tasks/example_app.py index 75850fddc2e..0ba848178d8 100644 --- a/packages/service-library/tests/deferred_tasks/example_app.py +++ b/packages/service-library/tests/deferred_tasks/example_app.py @@ -8,6 +8,7 @@ from uuid import uuid4 from pydantic import NonNegativeInt +from redis.asyncio import Redis from servicelib.deferred_tasks import ( BaseDeferredHandler, DeferredContext, @@ -54,21 +55,22 @@ async def on_result(cls, result: str, context: DeferredContext) -> None: class InMemoryLists: def __init__(self, redis_settings: RedisSettings, port: int) -> None: - self.redis_client_sdk = RedisClientSDK( - redis_settings.build_redis_dsn(RedisDatabase.DEFERRED_TASKS) - ) + # NOTE: RedisClientSDK is not required here but it's used to easily construct + # a redis connection + self.redis: Redis = RedisClientSDK( + redis_settings.build_redis_dsn(RedisDatabase.DEFERRED_TASKS), + decode_responses=True, + ).redis self.port = port def _get_queue_name(self, queue_name: str) -> str: return f"in_memory_lists::{queue_name}.{self.port}" async def append_to(self, queue_name: str, value: Any) -> None: - await self.redis_client_sdk.redis.rpush(self._get_queue_name(queue_name), value) # type: ignore + await self.redis.rpush(self._get_queue_name(queue_name), value) # type: ignore async def get_all_from(self, queue_name: str) -> list: - return await self.redis_client_sdk.redis.lrange( - self._get_queue_name(queue_name), 0, -1 - ) # type: ignore + return await self.redis.lrange(self._get_queue_name(queue_name), 0, -1) # type: ignore class ExampleApp: @@ -79,18 +81,19 @@ def __init__( in_memory_lists: InMemoryLists, max_workers: NonNegativeInt, ) -> None: - self._redis_client_sdk = RedisClientSDK( - redis_settings.build_redis_dsn(RedisDatabase.DEFERRED_TASKS) + self._redis_client = RedisClientSDK( + redis_settings.build_redis_dsn(RedisDatabase.DEFERRED_TASKS), + decode_responses=False, ) self._manager = DeferredManager( rabbit_settings, - self._redis_client_sdk, + self._redis_client, globals_context={"in_memory_lists": in_memory_lists}, max_workers=max_workers, ) async def setup(self) -> None: - await self._redis_client_sdk.setup() + await self._redis_client.setup() await self._manager.setup() diff --git a/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py b/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py index 9f3451058bf..a5b45ed80d9 100644 --- a/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py +++ b/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py @@ -52,7 +52,10 @@ class MockKeys(StrAutoEnum): async def redis_client_sdk( redis_service: RedisSettings, ) -> AsyncIterable[RedisClientSDK]: - sdk = RedisClientSDK(redis_service.build_redis_dsn(RedisDatabase.DEFERRED_TASKS)) + sdk = RedisClientSDK( + redis_service.build_redis_dsn(RedisDatabase.DEFERRED_TASKS), + decode_responses=False, + ) await sdk.setup() yield sdk await sdk.shutdown() diff --git a/packages/settings-library/src/settings_library/redis.py b/packages/settings-library/src/settings_library/redis.py index 656ffdd2e71..b4873665dd1 100644 --- a/packages/settings-library/src/settings_library/redis.py +++ b/packages/settings-library/src/settings_library/redis.py @@ -17,6 +17,7 @@ class RedisDatabase(IntEnum): ANNOUNCEMENTS = 5 DISTRIBUTED_IDENTIFIERS = 6 DEFERRED_TASKS = 7 + DYNAMIC_SERVICES = 8 class RedisSettings(BaseCustomSettings): diff --git a/services/director-v2/tests/conftest.py b/services/director-v2/tests/conftest.py index f107cfa54f5..937ba4a3f30 100644 --- a/services/director-v2/tests/conftest.py +++ b/services/director-v2/tests/conftest.py @@ -42,12 +42,12 @@ "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", + "pytest_simcore.faker_projects_data", "pytest_simcore.faker_users_data", "pytest_simcore.minio_service", "pytest_simcore.postgres_service", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", - "pytest_simcore.socketio", "pytest_simcore.rabbit_service", "pytest_simcore.redis_service", "pytest_simcore.repository_paths", @@ -55,6 +55,7 @@ "pytest_simcore.simcore_dask_service", "pytest_simcore.simcore_services", "pytest_simcore.simcore_storage_service", + "pytest_simcore.socketio", ] logger = logging.getLogger(__name__) diff --git a/services/director-v2/tests/unit/test_modules_dask_client.py b/services/director-v2/tests/unit/test_modules_dask_client.py index a01980027c0..f63381c538b 100644 --- a/services/director-v2/tests/unit/test_modules_dask_client.py +++ b/services/director-v2/tests/unit/test_modules_dask_client.py @@ -284,11 +284,6 @@ def project_id() -> ProjectID: return uuid4() -@pytest.fixture -def node_id() -> NodeID: - return uuid4() - - @dataclass class ImageParams: image: Image diff --git a/services/docker-compose-ops.yml b/services/docker-compose-ops.yml index 358b22fb8ab..9beacf76c34 100644 --- a/services/docker-compose-ops.yml +++ b/services/docker-compose-ops.yml @@ -93,7 +93,8 @@ services: user_notifications:${REDIS_HOST}:${REDIS_PORT}:4:${REDIS_PASSWORD}, announcements:${REDIS_HOST}:${REDIS_PORT}:5:${REDIS_PASSWORD}, distributed_identifiers:${REDIS_HOST}:${REDIS_PORT}:6:${REDIS_PASSWORD}, - deferred_tasks:${REDIS_HOST}:${REDIS_PORT}:7:${REDIS_PASSWORD} + deferred_tasks:${REDIS_HOST}:${REDIS_PORT}:7:${REDIS_PASSWORD}, + dynamic_services:${REDIS_HOST}:${REDIS_PORT}:8:${REDIS_PASSWORD} # If you add/remove a db, do not forget to update the --databases entry in the docker-compose.yml ports: - "18081:8081" diff --git a/services/docker-compose.yml b/services/docker-compose.yml index af73de611b4..8e8f02db8a2 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -1168,7 +1168,19 @@ services: # also aof (append only) is also enabled such that we get full durability at the expense # of backup size. The backup is written into /data. # https://redis.io/topics/persistence - [ "redis-server", "--save", "60 1", "--loglevel", "verbose", "--databases", "8", "--appendonly", "yes", "--requirepass", "${REDIS_PASSWORD}" ] + [ + "redis-server", + "--save", + "60 1", + "--loglevel", + "verbose", + "--databases", + "9", + "--appendonly", + "yes", + "--requirepass", + "${REDIS_PASSWORD}" + ] networks: - default - autoscaling_subnet diff --git a/services/dynamic-scheduler/requirements/_base.in b/services/dynamic-scheduler/requirements/_base.in index 74bc0519c82..ab95aec0daa 100644 --- a/services/dynamic-scheduler/requirements/_base.in +++ b/services/dynamic-scheduler/requirements/_base.in @@ -14,9 +14,10 @@ --requirement ../../../packages/service-library/requirements/_fastapi.in - +arrow fastapi httpx packaging +python-socketio typer[all] uvicorn[standard] diff --git a/services/dynamic-scheduler/requirements/_base.txt b/services/dynamic-scheduler/requirements/_base.txt index bab6a9c099e..f60e814f088 100644 --- a/services/dynamic-scheduler/requirements/_base.txt +++ b/services/dynamic-scheduler/requirements/_base.txt @@ -47,6 +47,8 @@ attrs==23.2.0 # aiohttp # jsonschema # referencing +bidict==0.23.1 + # via python-socketio certifi==2024.2.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -107,6 +109,7 @@ h11==0.14.0 # via # httpcore # uvicorn + # wsproto httpcore==1.0.5 # via httpx httptools==0.6.1 @@ -265,6 +268,10 @@ python-dateutil==2.9.0.post0 # via arrow python-dotenv==1.0.1 # via uvicorn +python-engineio==4.9.1 + # via python-socketio +python-socketio==5.11.2 + # via -r requirements/_base.in pyyaml==6.0.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -306,6 +313,8 @@ setuptools==74.0.0 # via opentelemetry-instrumentation shellingham==1.5.4 # via typer +simple-websocket==1.0.0 + # via python-engineio six==1.16.0 # via python-dateutil sniffio==1.3.1 diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_dependencies.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_dependencies.py index 088745a07c3..ce43766f5a3 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_dependencies.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_dependencies.py @@ -3,7 +3,8 @@ from servicelib.fastapi.dependencies import get_app, get_reverse_url_mapper from servicelib.rabbitmq import RabbitMQClient, RabbitMQRPCClient from servicelib.redis import RedisClientSDK -from simcore_service_dynamic_scheduler.services.redis import get_redis_client +from settings_library.redis import RedisDatabase +from simcore_service_dynamic_scheduler.services.redis import get_all_redis_clients from ...services.rabbitmq import get_rabbitmq_client, get_rabbitmq_rpc_server @@ -19,8 +20,10 @@ def get_rabbitmq_rpc_server_from_request(request: Request) -> RabbitMQRPCClient: return get_rabbitmq_rpc_server(request.app) -def get_redis_client_from_request(request: Request) -> RedisClientSDK: - return get_redis_client(request.app) +def get_redis_clients_from_request( + request: Request, +) -> dict[RedisDatabase, RedisClientSDK]: + return get_all_redis_clients(request.app) __all__: tuple[str, ...] = ( diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_health.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_health.py index 515602aef7c..7e87c57fd06 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_health.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_health.py @@ -9,11 +9,12 @@ ) from servicelib.rabbitmq import RabbitMQClient, RabbitMQRPCClient from servicelib.redis import RedisClientSDK +from settings_library.redis import RedisDatabase from ._dependencies import ( get_rabbitmq_client_from_request, get_rabbitmq_rpc_server_from_request, - get_redis_client_from_request, + get_redis_clients_from_request, ) router = APIRouter() @@ -29,12 +30,17 @@ async def healthcheck( rabbit_rpc_server: Annotated[ RabbitMQRPCClient, Depends(get_rabbitmq_rpc_server_from_request) ], - redis_client_sdk: Annotated[RedisClientSDK, Depends(get_redis_client_from_request)], + redis_client_sdks: Annotated[ + dict[RedisDatabase, RedisClientSDK], + Depends(get_redis_clients_from_request), + ], ): if not rabbit_client.healthy or not rabbit_rpc_server.healthy: raise HealthCheckError(RABBITMQ_CLIENT_UNHEALTHY_MSG) - if not redis_client_sdk.is_healthy: + if not all( + redis_client_sdk.is_healthy for redis_client_sdk in redis_client_sdks.values() + ): raise HealthCheckError(REDIS_CLIENT_UNHEALTHY_MSG) return f"{__name__}@{arrow.utcnow().isoformat()}" diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/_services.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/_services.py index 991aa004703..0687c58bac1 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/_services.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/_services.py @@ -14,6 +14,7 @@ from ...core.settings import ApplicationSettings from ...services.director_v2 import DirectorV2Client +from ...services.service_tracker import set_request_as_running, set_request_as_stopped router = RPCRouter() @@ -37,6 +38,7 @@ async def run_dynamic_service( response: NodeGet | DynamicServiceGet = ( await director_v2_client.run_dynamic_service(dynamic_service_start) ) + await set_request_as_running(app, dynamic_service_start) return response @@ -59,4 +61,5 @@ async def stop_dynamic_service( timeout=settings.DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT, ) ) + await set_request_as_stopped(app, dynamic_service_stop) return response diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py index f1c00211386..e6ba2bbb53f 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py @@ -17,9 +17,13 @@ ) from ..api.rest.routes import setup_rest_api from ..api.rpc.routes import setup_rpc_api_routes +from ..services.deferred_manager import setup_deferred_manager from ..services.director_v2 import setup_director_v2 +from ..services.notifier import setup_notifier from ..services.rabbitmq import setup_rabbitmq from ..services.redis import setup_redis +from ..services.service_tracker import setup_service_tracker +from ..services.status_monitor import setup_status_monitor from .settings import ApplicationSettings @@ -57,10 +61,18 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: # PLUGINS SETUP setup_director_v2(app) + setup_rabbitmq(app) setup_rpc_api_routes(app) + setup_redis(app) + setup_notifier(app) + + setup_service_tracker(app) + setup_deferred_manager(app) + setup_status_monitor(app) + setup_rest_api(app) # ERROR HANDLERS diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/deferred_manager.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/deferred_manager.py new file mode 100644 index 00000000000..8544c0f38e6 --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/deferred_manager.py @@ -0,0 +1,24 @@ +from fastapi import FastAPI +from servicelib.deferred_tasks import DeferredManager +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisDatabase + +from .redis import get_redis_client + + +def setup_deferred_manager(app: FastAPI) -> None: + async def on_startup() -> None: + rabbit_settings: RabbitSettings = app.state.settings.DYNAMIC_SCHEDULER_RABBITMQ + + redis_client_sdk = get_redis_client(app, RedisDatabase.DEFERRED_TASKS) + app.state.deferred_manager = manager = DeferredManager( + rabbit_settings, redis_client_sdk, globals_context={"app": app} + ) + await manager.setup() + + async def on_shutdown() -> None: + manager: DeferredManager = app.state.deferred_manager + await manager.shutdown() + + app.add_event_handler("startup", on_startup) + app.add_event_handler("shutdown", on_shutdown) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/__init__.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/__init__.py new file mode 100644 index 00000000000..8cd33e12808 --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/__init__.py @@ -0,0 +1,7 @@ +from ._notifier import notify_service_status_change +from ._setup import setup_notifier + +__all__: tuple[str, ...] = ( + "setup_notifier", + "notify_service_status_change", +) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_notifier.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_notifier.py new file mode 100644 index 00000000000..0b8690a9676 --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_notifier.py @@ -0,0 +1,55 @@ +import contextlib + +import socketio # type: ignore[import-untyped] +from fastapi import FastAPI +from fastapi.encoders import jsonable_encoder +from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet +from models_library.api_schemas_dynamic_scheduler.socketio import ( + SOCKET_IO_SERVICE_STATUS_EVENT, +) +from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle +from models_library.api_schemas_webserver.socketio import SocketIORoomStr +from models_library.users import UserID +from servicelib.fastapi.app_state import SingletonInAppStateMixin +from servicelib.services_utils import get_status_as_dict + + +class Notifier(SingletonInAppStateMixin): + app_state_name: str = "notifier" + + def __init__(self, sio_manager: socketio.AsyncAioPikaManager): + self._sio_manager = sio_manager + + async def notify_service_status( + self, user_id: UserID, status: NodeGet | DynamicServiceGet | NodeGetIdle + ) -> None: + await self._sio_manager.emit( + SOCKET_IO_SERVICE_STATUS_EVENT, + data=jsonable_encoder(get_status_as_dict(status)), + room=SocketIORoomStr.from_user_id(user_id), + ) + + +async def notify_service_status_change( + app: FastAPI, user_id: UserID, status: NodeGet | DynamicServiceGet | NodeGetIdle +) -> None: + notifier: Notifier = Notifier.get_from_app_state(app) + await notifier.notify_service_status(user_id=user_id, status=status) + + +def setup(app: FastAPI): + async def _on_startup() -> None: + assert app.state.external_socketio # nosec + + notifier = Notifier( + sio_manager=app.state.external_socketio, + ) + notifier.set_to_app_state(app) + assert Notifier.get_from_app_state(app) == notifier # nosec + + async def _on_shutdown() -> None: + with contextlib.suppress(AttributeError): + Notifier.pop_from_app_state(app) + + app.add_event_handler("startup", _on_startup) + app.add_event_handler("shutdown", _on_shutdown) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_setup.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_setup.py new file mode 100644 index 00000000000..1542afa8a87 --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_setup.py @@ -0,0 +1,8 @@ +from fastapi import FastAPI + +from . import _notifier, _socketio + + +def setup_notifier(app: FastAPI): + _socketio.setup(app) + _notifier.setup(app) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_socketio.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_socketio.py new file mode 100644 index 00000000000..2f0abfbd3af --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_socketio.py @@ -0,0 +1,32 @@ +import logging + +import socketio # type: ignore[import-untyped] +from fastapi import FastAPI +from servicelib.socketio_utils import cleanup_socketio_async_pubsub_manager + +from ...core.settings import ApplicationSettings + +_logger = logging.getLogger(__name__) + + +def setup(app: FastAPI): + settings: ApplicationSettings = app.state.settings + + async def _on_startup() -> None: + assert app.state.rabbitmq_client # nosec + + # Connect to the as an external process in write-only mode + # SEE https://python-socketio.readthedocs.io/en/stable/server.html#emitting-from-external-processes + assert settings.DYNAMIC_SCHEDULER_RABBITMQ # nosec + app.state.external_socketio = socketio.AsyncAioPikaManager( + url=settings.DYNAMIC_SCHEDULER_RABBITMQ.dsn, logger=_logger, write_only=True + ) + + async def _on_shutdown() -> None: + if external_socketio := getattr(app.state, "external_socketio"): # noqa: B009 + await cleanup_socketio_async_pubsub_manager( + server_manager=external_socketio + ) + + app.add_event_handler("startup", _on_startup) + app.add_event_handler("shutdown", _on_shutdown) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py index 7904d5e1a5d..84131eaf54b 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py @@ -1,25 +1,46 @@ +from typing import Final + from fastapi import FastAPI -from servicelib.redis import RedisClientSDK +from servicelib.redis import RedisClientSDK, RedisClientsManager, RedisManagerDBConfig from settings_library.redis import RedisDatabase, RedisSettings +_DECODE_DBS: Final[set[RedisDatabase]] = { + RedisDatabase.LOCKS, +} + +_BINARY_DBS: Final[set[RedisDatabase]] = { + RedisDatabase.DEFERRED_TASKS, + RedisDatabase.DYNAMIC_SERVICES, +} + +_ALL_REDIS_DATABASES: Final[set[RedisDatabase]] = _DECODE_DBS | _BINARY_DBS + def setup_redis(app: FastAPI) -> None: settings: RedisSettings = app.state.settings.DYNAMIC_SCHEDULER_REDIS async def on_startup() -> None: - redis_locks_dsn = settings.build_redis_dsn(RedisDatabase.LOCKS) - app.state.redis_client_sdk = client = RedisClientSDK(redis_locks_dsn) - await client.setup() + app.state.redis_clients_manager = manager = RedisClientsManager( + {RedisManagerDBConfig(x, decode_responses=False) for x in _BINARY_DBS} + | {RedisManagerDBConfig(x, decode_responses=True) for x in _DECODE_DBS}, + settings, + ) + await manager.setup() async def on_shutdown() -> None: - redis_client_sdk: None | RedisClientSDK = app.state.redis_client_sdk - if redis_client_sdk: - await redis_client_sdk.shutdown() + manager: RedisClientsManager = app.state.redis_clients_manager + await manager.shutdown() app.add_event_handler("startup", on_startup) app.add_event_handler("shutdown", on_shutdown) -def get_redis_client(app: FastAPI) -> RedisClientSDK: - redis_client_sdk: RedisClientSDK = app.state.redis_client_sdk - return redis_client_sdk +def get_redis_client(app: FastAPI, database: RedisDatabase) -> RedisClientSDK: + manager: RedisClientsManager = app.state.redis_clients_manager + return manager.client(database) + + +def get_all_redis_clients( + app: FastAPI, +) -> dict[RedisDatabase, RedisClientSDK]: + return {d: get_redis_client(app, d) for d in _ALL_REDIS_DATABASES} diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/__init__.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/__init__.py new file mode 100644 index 00000000000..abf543d1bef --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/__init__.py @@ -0,0 +1,33 @@ +from ._api import ( + NORMAL_RATE_POLL_INTERVAL, + get_all_tracked_services, + get_tracked_service, + get_user_id_for_service, + remove_tracked_service, + set_frontned_notified_for_service, + set_if_status_changed_for_service, + set_request_as_running, + set_request_as_stopped, + set_service_scheduled_to_run, + set_service_status_task_uid, + should_notify_frontend_for_service, +) +from ._models import TrackedServiceModel +from ._setup import setup_service_tracker + +__all__: tuple[str, ...] = ( + "get_all_tracked_services", + "get_tracked_service", + "get_user_id_for_service", + "NORMAL_RATE_POLL_INTERVAL", + "remove_tracked_service", + "set_frontned_notified_for_service", + "set_if_status_changed_for_service", + "set_request_as_running", + "set_request_as_stopped", + "set_service_scheduled_to_run", + "set_service_status_task_uid", + "setup_service_tracker", + "should_notify_frontend_for_service", + "TrackedServiceModel", +) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_api.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_api.py new file mode 100644 index 00000000000..1b1b4a0d9f8 --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_api.py @@ -0,0 +1,248 @@ +import inspect +import logging +from datetime import timedelta +from typing import Final + +import arrow +from fastapi import FastAPI +from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet +from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( + DynamicServiceStart, + DynamicServiceStop, +) +from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle +from models_library.projects_nodes_io import NodeID +from models_library.services_enums import ServiceState +from models_library.users import UserID +from servicelib.deferred_tasks import TaskUID + +from ._models import SchedulerServiceState, TrackedServiceModel, UserRequestedState +from ._setup import get_tracker + +_logger = logging.getLogger(__name__) + + +_LOW_RATE_POLL_INTERVAL: Final[timedelta] = timedelta(seconds=1) +NORMAL_RATE_POLL_INTERVAL: Final[timedelta] = timedelta(seconds=5) +_MAX_PERIOD_WITHOUT_SERVICE_STATUS_UPDATES: Final[timedelta] = timedelta(seconds=60) + + +async def set_request_as_running( + app: FastAPI, + dynamic_service_start: DynamicServiceStart, +) -> None: + """Stores intention to `start` request""" + await get_tracker(app).save( + dynamic_service_start.node_uuid, + TrackedServiceModel( + dynamic_service_start=dynamic_service_start, + requested_state=UserRequestedState.RUNNING, + project_id=dynamic_service_start.project_id, + user_id=dynamic_service_start.user_id, + ), + ) + + +async def set_request_as_stopped( + app: FastAPI, dynamic_service_stop: DynamicServiceStop +) -> None: + """Stores intention to `stop` request""" + tracker = get_tracker(app) + model: TrackedServiceModel | None = await tracker.load(dynamic_service_stop.node_id) + + if model is None: + model = TrackedServiceModel( + dynamic_service_start=None, + user_id=dynamic_service_stop.user_id, + project_id=dynamic_service_stop.project_id, + requested_state=UserRequestedState.STOPPED, + ) + + model.requested_state = UserRequestedState.STOPPED + await tracker.save(dynamic_service_stop.node_id, model) + + +def _get_service_state( + status: NodeGet | DynamicServiceGet | NodeGetIdle, +) -> ServiceState: + # Attributes where to find the state + # NodeGet -> service_state + # DynamicServiceGet -> state + # NodeGetIdle -> service_state + state_key = "state" if isinstance(status, DynamicServiceGet) else "service_state" + + state: ServiceState | str = getattr(status, state_key) + result: str = state.value if isinstance(state, ServiceState) else state + return ServiceState(result) + + +def _get_poll_interval(status: NodeGet | DynamicServiceGet | NodeGetIdle) -> timedelta: + if _get_service_state(status) != ServiceState.RUNNING: + return _LOW_RATE_POLL_INTERVAL + + return NORMAL_RATE_POLL_INTERVAL + + +def _get_current_scheduler_service_state( + requested_state: UserRequestedState, + status: NodeGet | DynamicServiceGet | NodeGetIdle, +) -> SchedulerServiceState: + """ + Computes the `SchedulerServiceState` used internally by the scheduler + to decide about a service's future. + """ + + if isinstance(status, NodeGetIdle): + return SchedulerServiceState.IDLE + + service_state: ServiceState = _get_service_state(status) + + if requested_state == UserRequestedState.RUNNING: + if service_state == ServiceState.RUNNING: + return SchedulerServiceState.RUNNING + + if ( + ServiceState.PENDING # type:ignore[operator] + <= service_state + <= ServiceState.STARTING + ): + return SchedulerServiceState.STARTING + + if service_state < ServiceState.PENDING or service_state > ServiceState.RUNNING: + return SchedulerServiceState.UNEXPECTED_OUTCOME + + if requested_state == UserRequestedState.STOPPED: + if service_state >= ServiceState.RUNNING: # type:ignore[operator] + return SchedulerServiceState.STOPPING + + if service_state < ServiceState.RUNNING: + return SchedulerServiceState.UNEXPECTED_OUTCOME + + msg = f"Could not determine current_state from: '{requested_state=}', '{status=}'" + raise TypeError(msg) + + +def _log_skipping_operation(node_id: NodeID) -> None: + # the caller is at index 1 (index 0 is the current function) + caller_name = inspect.stack()[1].function + + _logger.info( + "Could not find a %s entry for node_id %s: skipping %s", + TrackedServiceModel.__name__, + node_id, + caller_name, + ) + + +async def set_if_status_changed_for_service( + app: FastAPI, node_id: NodeID, status: NodeGet | DynamicServiceGet | NodeGetIdle +) -> bool: + """returns ``True`` if the tracker detected a status change""" + tracker = get_tracker(app) + model: TrackedServiceModel | None = await tracker.load(node_id) + if model is None: + _log_skipping_operation(node_id) + return False + + # set new polling interval in the future + model.set_check_status_after_to(_get_poll_interval(status)) + model.service_status_task_uid = None + model.scheduled_to_run = False + + # check if model changed + json_status = status.json() + if model.service_status != json_status: + model.service_status = json_status + model.current_state = _get_current_scheduler_service_state( + model.requested_state, status + ) + await tracker.save(node_id, model) + return True + + return False + + +async def should_notify_frontend_for_service( + app: FastAPI, node_id: NodeID, *, status_changed: bool +) -> bool: + """ + Checks if it's time to notify the frontend. + The frontend will be notified at regular intervals and on changes + Avoids sending too many updates. + """ + tracker = get_tracker(app) + model: TrackedServiceModel | None = await tracker.load(node_id) + + if model is None: + return False + + # check if too much time has passed since the last time an update was sent + return ( + status_changed + or arrow.utcnow().timestamp() - model.last_status_notification + > _MAX_PERIOD_WITHOUT_SERVICE_STATUS_UPDATES.total_seconds() + ) + + +async def set_frontned_notified_for_service(app: FastAPI, node_id: NodeID) -> None: + tracker = get_tracker(app) + model: TrackedServiceModel | None = await tracker.load(node_id) + if model is None: + _log_skipping_operation(node_id) + return + + model.set_last_status_notification_to_now() + await tracker.save(node_id, model) + + +async def set_service_scheduled_to_run( + app: FastAPI, node_id: NodeID, delay_from_now: timedelta +) -> None: + tracker = get_tracker(app) + model: TrackedServiceModel | None = await tracker.load(node_id) + if model is None: + _log_skipping_operation(node_id) + return + + model.scheduled_to_run = True + model.set_check_status_after_to(delay_from_now) + await tracker.save(node_id, model) + + +async def set_service_status_task_uid( + app: FastAPI, node_id: NodeID, task_uid: TaskUID +) -> None: + tracker = get_tracker(app) + model: TrackedServiceModel | None = await tracker.load(node_id) + if model is None: + _log_skipping_operation(node_id) + return + + model.service_status_task_uid = task_uid + await tracker.save(node_id, model) + + +async def remove_tracked_service(app: FastAPI, node_id: NodeID) -> None: + """ + Removes the service from tracking (usually after stop completes) + # NOTE: does not raise if node_id is not found + """ + await get_tracker(app).delete(node_id) + + +async def get_tracked_service( + app: FastAPI, node_id: NodeID +) -> TrackedServiceModel | None: + """Returns information about the tracked service""" + return await get_tracker(app).load(node_id) + + +async def get_all_tracked_services(app: FastAPI) -> dict[NodeID, TrackedServiceModel]: + """Returns all tracked services""" + return await get_tracker(app).all() + + +async def get_user_id_for_service(app: FastAPI, node_id: NodeID) -> UserID | None: + """returns user_id for the service""" + model: TrackedServiceModel | None = await get_tracker(app).load(node_id) + return model.user_id if model else None diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_models.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_models.py new file mode 100644 index 00000000000..985ca8feef5 --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_models.py @@ -0,0 +1,123 @@ +import pickle +from dataclasses import dataclass, field +from datetime import timedelta +from enum import auto + +import arrow +from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( + DynamicServiceStart, +) +from models_library.projects import ProjectID +from models_library.users import UserID +from models_library.utils.enums import StrAutoEnum +from servicelib.deferred_tasks import TaskUID + + +class UserRequestedState(StrAutoEnum): + RUNNING = auto() + STOPPED = auto() + + +class SchedulerServiceState(StrAutoEnum): + # service was started and is running as expected + RUNNING = auto() + # service is not present + IDLE = auto() + # something went wrong while starting/stopping service + UNEXPECTED_OUTCOME = auto() + + # service is being started + STARTING = auto() + # service is being stopped + STOPPING = auto() + + # service status has not been determined + UNKNOWN = auto() + + +@dataclass +class TrackedServiceModel: # pylint:disable=too-many-instance-attributes + + dynamic_service_start: DynamicServiceStart | None = field( + metadata={ + "description": ( + "used to create the service in any given moment if the requested_state is RUNNING" + "can be set to None only when stopping the service" + ) + } + ) + + user_id: UserID | None = field( + metadata={ + "description": "required for propagating status changes to the frontend" + } + ) + project_id: ProjectID | None = field( + metadata={ + "description": "required for propagating status changes to the frontend" + } + ) + + requested_state: UserRequestedState = field( + metadata={ + "description": ( + "status of the service desidered by the user RUNNING or STOPPED" + ) + } + ) + + current_state: SchedulerServiceState = field( + default=SchedulerServiceState.UNKNOWN, + metadata={ + "description": "to set after parsing the incoming state via the API calls" + }, + ) + + ############################# + ### SERVICE STATUS UPDATE ### + ############################# + + scheduled_to_run: bool = field( + default=False, + metadata={"description": "set when a job will be immediately scheduled"}, + ) + + service_status: str = field( + default="", + metadata={ + "description": "stored for debug mainly this is used to compute ``current_state``" + }, + ) + service_status_task_uid: TaskUID | None = field( + default=None, + metadata={"description": "uid of the job currently fetching the status"}, + ) + + check_status_after: float = field( + default_factory=lambda: arrow.utcnow().timestamp(), + metadata={"description": "used to determine when to poll the status again"}, + ) + + last_status_notification: float = field( + default=0, + metadata={ + "description": "used to determine when was the last time the status was notified" + }, + ) + + def set_check_status_after_to(self, delay_from_now: timedelta) -> None: + self.check_status_after = (arrow.utcnow() + delay_from_now).timestamp() + + def set_last_status_notification_to_now(self) -> None: + self.last_status_notification = arrow.utcnow().timestamp() + + ##################### + ### SERIALIZATION ### + ##################### + + def to_bytes(self) -> bytes: + return pickle.dumps(self) + + @classmethod + def from_bytes(cls, data: bytes) -> "TrackedServiceModel": + return pickle.loads(data) # type: ignore # noqa: S301 diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_setup.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_setup.py new file mode 100644 index 00000000000..40a47bb8bec --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_setup.py @@ -0,0 +1,19 @@ +from fastapi import FastAPI +from settings_library.redis import RedisDatabase + +from ..redis import get_redis_client +from ._tracker import Tracker + + +def setup_service_tracker(app: FastAPI) -> None: + async def on_startup() -> None: + app.state.service_tracker = Tracker( + get_redis_client(app, RedisDatabase.DYNAMIC_SERVICES) + ) + + app.add_event_handler("startup", on_startup) + + +def get_tracker(app: FastAPI) -> Tracker: + tracker: Tracker = app.state.service_tracker + return tracker diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_tracker.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_tracker.py new file mode 100644 index 00000000000..489cee15310 --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_tracker.py @@ -0,0 +1,44 @@ +from dataclasses import dataclass +from typing import Final + +from models_library.projects_nodes_io import NodeID +from servicelib.redis import RedisClientSDK + +from ._models import TrackedServiceModel + +_KEY_PREFIX: Final[str] = "t::" + + +def _get_key(node_id: NodeID) -> str: + return f"{_KEY_PREFIX}{node_id}" + + +@dataclass +class Tracker: + redis_client_sdk: RedisClientSDK + + async def save(self, node_id: NodeID, model: TrackedServiceModel) -> None: + await self.redis_client_sdk.redis.set(_get_key(node_id), model.to_bytes()) + + async def load(self, node_id: NodeID) -> TrackedServiceModel | None: + model_as_bytes: bytes | None = await self.redis_client_sdk.redis.get( + _get_key(node_id) + ) + return ( + None + if model_as_bytes is None + else TrackedServiceModel.from_bytes(model_as_bytes) + ) + + async def delete(self, node_id: NodeID) -> None: + await self.redis_client_sdk.redis.delete(_get_key(node_id)) + + async def all(self) -> dict[NodeID, TrackedServiceModel]: + found_keys = await self.redis_client_sdk.redis.keys(f"{_KEY_PREFIX}*") + found_values = await self.redis_client_sdk.redis.mget(found_keys) + + return { + NodeID(k.decode().lstrip(_KEY_PREFIX)): TrackedServiceModel.from_bytes(v) + for k, v in zip(found_keys, found_values, strict=True) + if v is not None + } diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/__init__.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/__init__.py new file mode 100644 index 00000000000..26345124325 --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/__init__.py @@ -0,0 +1,3 @@ +from ._setup import setup_status_monitor + +__all__: tuple[str, ...] = ("setup_status_monitor",) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_deferred_get_status.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_deferred_get_status.py new file mode 100644 index 00000000000..f710204504c --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_deferred_get_status.py @@ -0,0 +1,85 @@ +import logging +from datetime import timedelta + +from fastapi import FastAPI +from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet +from models_library.api_schemas_directorv2.dynamic_services_service import ( + RunningDynamicServiceDetails, +) +from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle +from models_library.projects_nodes_io import NodeID +from models_library.users import UserID +from servicelib.deferred_tasks import BaseDeferredHandler, TaskUID +from servicelib.deferred_tasks._base_deferred_handler import DeferredContext + +from .. import service_tracker +from ..director_v2 import DirectorV2Client +from ..notifier import notify_service_status_change + +_logger = logging.getLogger(__name__) + + +class DeferredGetStatus(BaseDeferredHandler[NodeGet | DynamicServiceGet | NodeGetIdle]): + @classmethod + async def get_timeout(cls, context: DeferredContext) -> timedelta: + assert context # nosec + return timedelta(seconds=5) + + @classmethod + async def start( # type:ignore[override] # pylint:disable=arguments-differ + cls, node_id: NodeID + ) -> DeferredContext: + _logger.debug("Getting service status for %s", node_id) + return {"node_id": node_id} + + @classmethod + async def on_created(cls, task_uid: TaskUID, context: DeferredContext) -> None: + """called after deferred was scheduled to run""" + app: FastAPI = context["app"] + node_id: NodeID = context["node_id"] + + await service_tracker.set_service_status_task_uid(app, node_id, task_uid) + + @classmethod + async def run( + cls, context: DeferredContext + ) -> NodeGet | DynamicServiceGet | NodeGetIdle: + app: FastAPI = context["app"] + node_id: NodeID = context["node_id"] + + director_v2_client: DirectorV2Client = DirectorV2Client.get_from_app_state(app) + service_status: NodeGet | RunningDynamicServiceDetails | NodeGetIdle = ( + await director_v2_client.get_status(node_id) + ) + _logger.debug( + "Service status type=%s, %s", type(service_status), service_status + ) + return service_status + + @classmethod + async def on_result( + cls, result: NodeGet | DynamicServiceGet | NodeGetIdle, context: DeferredContext + ) -> None: + app: FastAPI = context["app"] + node_id: NodeID = context["node_id"] + + _logger.debug("Received status for service '%s': '%s'", node_id, result) + + status_changed: bool = await service_tracker.set_if_status_changed_for_service( + app, node_id, result + ) + if await service_tracker.should_notify_frontend_for_service( + app, node_id, status_changed=status_changed + ): + user_id: UserID | None = await service_tracker.get_user_id_for_service( + app, node_id + ) + if user_id: + await notify_service_status_change(app, user_id, result) + await service_tracker.set_frontned_notified_for_service(app, node_id) + else: + _logger.info( + "Did not find a user for '%s', skipping status delivery of: %s", + node_id, + result, + ) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_monitor.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_monitor.py new file mode 100644 index 00000000000..0d8b5a2723f --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_monitor.py @@ -0,0 +1,121 @@ +import logging +from datetime import timedelta +from functools import cached_property +from typing import Final + +import arrow +from fastapi import FastAPI +from models_library.projects_nodes_io import NodeID +from pydantic import NonNegativeFloat, NonNegativeInt +from servicelib.background_task import stop_periodic_task +from servicelib.redis_utils import start_exclusive_periodic_task +from servicelib.utils import limited_gather +from settings_library.redis import RedisDatabase + +from .. import service_tracker +from ..redis import get_redis_client +from ..service_tracker import NORMAL_RATE_POLL_INTERVAL, TrackedServiceModel +from ..service_tracker._models import SchedulerServiceState, UserRequestedState +from ._deferred_get_status import DeferredGetStatus + +_logger = logging.getLogger(__name__) + +_INTERVAL_BETWEEN_CHECKS: Final[timedelta] = timedelta(seconds=1) +_MAX_CONCURRENCY: Final[NonNegativeInt] = 10 + + +async def _start_get_status_deferred( + app: FastAPI, node_id: NodeID, *, next_check_delay: timedelta +) -> None: + await service_tracker.set_service_scheduled_to_run(app, node_id, next_check_delay) + await DeferredGetStatus.start(node_id=node_id) + + +class Monitor: + def __init__(self, app: FastAPI, status_worker_interval: timedelta) -> None: + self.app = app + self.status_worker_interval = status_worker_interval + + @cached_property + def status_worker_interval_seconds(self) -> NonNegativeFloat: + return self.status_worker_interval.total_seconds() + + async def _worker_start_get_status_requests(self) -> None: + """ + Check if a service requires it's status to be polled. + Note that the interval at which the status is polled can vary. + This is a relatively low resoruce check. + """ + + # NOTE: this worker runs on only once across all instances of the scheduler + + models: dict[ + NodeID, TrackedServiceModel + ] = await service_tracker.get_all_tracked_services(self.app) + + to_remove: list[NodeID] = [] + to_start: list[NodeID] = [] + + current_timestamp = arrow.utcnow().timestamp() + + for node_id, model in models.items(): + # check if service is idle and status polling should stop + if ( + model.current_state == SchedulerServiceState.IDLE + and model.requested_state == UserRequestedState.STOPPED + ): + to_remove.append(node_id) + continue + + job_not_running = not ( + model.scheduled_to_run + and model.service_status_task_uid is not None + and await DeferredGetStatus.is_present(model.service_status_task_uid) + ) + wait_period_finished = current_timestamp > model.check_status_after + if job_not_running and wait_period_finished: + to_start.append(node_id) + else: + _logger.info( + "Skipping status check for %s, because: %s or %s", + node_id, + f"{job_not_running=}", + ( + f"{wait_period_finished=}" + if wait_period_finished + else f"can_start_in={model.check_status_after - current_timestamp}" + ), + ) + + _logger.debug("Removing tracked services: '%s'", to_remove) + await limited_gather( + *( + service_tracker.remove_tracked_service(self.app, node_id) + for node_id in to_remove + ), + limit=_MAX_CONCURRENCY, + ) + + _logger.debug("Poll status for tracked services: '%s'", to_start) + await limited_gather( + *( + _start_get_status_deferred( + self.app, node_id, next_check_delay=NORMAL_RATE_POLL_INTERVAL + ) + for node_id in to_start + ), + limit=_MAX_CONCURRENCY, + ) + + async def setup(self) -> None: + self.app.state.status_monitor_background_task = start_exclusive_periodic_task( + get_redis_client(self.app, RedisDatabase.LOCKS), + self._worker_start_get_status_requests, + task_period=_INTERVAL_BETWEEN_CHECKS, + retry_after=_INTERVAL_BETWEEN_CHECKS, + task_name="periodic_service_status_update", + ) + + async def shutdown(self) -> None: + if getattr(self.app.state, "status_monitor_background_task", None): + await stop_periodic_task(self.app.state.status_monitor_background_task) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_setup.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_setup.py new file mode 100644 index 00000000000..8f9601464bc --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_setup.py @@ -0,0 +1,28 @@ +from datetime import timedelta +from typing import Final + +from fastapi import FastAPI + +from ._monitor import Monitor + +_STATUS_WORKER_DEFAULT_INTERVAL: Final[timedelta] = timedelta(seconds=1) + + +def setup_status_monitor(app: FastAPI) -> None: + async def on_startup() -> None: + app.state.status_monitor = monitor = Monitor( + app, status_worker_interval=_STATUS_WORKER_DEFAULT_INTERVAL + ) + await monitor.setup() + + async def on_shutdown() -> None: + monitor: Monitor = app.state.status_monitor + await monitor.shutdown() + + app.add_event_handler("startup", on_startup) + app.add_event_handler("shutdown", on_shutdown) + + +def get_monitor(app: FastAPI) -> Monitor: + monitor: Monitor = app.state.status_monitor + return monitor diff --git a/services/dynamic-scheduler/tests/conftest.py b/services/dynamic-scheduler/tests/conftest.py index ff72140f5ee..2cb14086b2a 100644 --- a/services/dynamic-scheduler/tests/conftest.py +++ b/services/dynamic-scheduler/tests/conftest.py @@ -4,6 +4,7 @@ import string from collections.abc import AsyncIterator from pathlib import Path +from typing import Final import pytest import simcore_service_dynamic_scheduler @@ -13,6 +14,9 @@ from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict +from servicelib.redis import RedisClientsManager, RedisManagerDBConfig +from servicelib.utils import logged_gather +from settings_library.redis import RedisDatabase, RedisSettings from simcore_service_dynamic_scheduler.core.application import create_app pytest_plugins = [ @@ -20,6 +24,7 @@ "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", + "pytest_simcore.faker_projects_data", "pytest_simcore.rabbit_service", "pytest_simcore.redis_service", "pytest_simcore.repository_paths", @@ -73,17 +78,38 @@ def app_environment( ) +_PATH_APPLICATION: Final[str] = "simcore_service_dynamic_scheduler.core.application" + + @pytest.fixture def disable_rabbitmq_setup(mocker: MockerFixture) -> None: - base_path = "simcore_service_dynamic_scheduler.core.application" - mocker.patch(f"{base_path}.setup_rabbitmq") - mocker.patch(f"{base_path}.setup_rpc_api_routes") + mocker.patch(f"{_PATH_APPLICATION}.setup_rabbitmq") + mocker.patch(f"{_PATH_APPLICATION}.setup_rpc_api_routes") @pytest.fixture def disable_redis_setup(mocker: MockerFixture) -> None: - base_path = "simcore_service_dynamic_scheduler.core.application" - mocker.patch(f"{base_path}.setup_redis") + mocker.patch(f"{_PATH_APPLICATION}.setup_redis") + + +@pytest.fixture +def disable_service_tracker_setup(mocker: MockerFixture) -> None: + mocker.patch(f"{_PATH_APPLICATION}.setup_service_tracker") + + +@pytest.fixture +def disable_deferred_manager_setup(mocker: MockerFixture) -> None: + mocker.patch(f"{_PATH_APPLICATION}.setup_deferred_manager") + + +@pytest.fixture +def disable_notifier_setup(mocker: MockerFixture) -> None: + mocker.patch(f"{_PATH_APPLICATION}.setup_notifier") + + +@pytest.fixture +def disable_status_monitor_setup(mocker: MockerFixture) -> None: + mocker.patch(f"{_PATH_APPLICATION}.setup_status_monitor") MAX_TIME_FOR_APP_TO_STARTUP = 10 @@ -101,3 +127,13 @@ async def app( shutdown_timeout=None if is_pdb_enabled else MAX_TIME_FOR_APP_TO_SHUTDOWN, ): yield test_app + + +@pytest.fixture +async def remove_redis_data(redis_service: RedisSettings) -> None: + async with RedisClientsManager( + {RedisManagerDBConfig(x) for x in RedisDatabase}, redis_service + ) as manager: + await logged_gather( + *[manager.client(d).redis.flushall() for d in RedisDatabase] + ) diff --git a/services/dynamic-scheduler/tests/unit/api_rest/conftest.py b/services/dynamic-scheduler/tests/unit/api_rest/conftest.py index 987ed8c4d85..efef4241d98 100644 --- a/services/dynamic-scheduler/tests/unit/api_rest/conftest.py +++ b/services/dynamic-scheduler/tests/unit/api_rest/conftest.py @@ -1,13 +1,31 @@ +# pylint:disable=redefined-outer-name +# pylint:disable=unused-argument from collections.abc import AsyncIterator import pytest from fastapi import FastAPI from httpx import AsyncClient from httpx._transports.asgi import ASGITransport +from pytest_simcore.helpers.typing_env import EnvVarsDict @pytest.fixture -async def client(app: FastAPI) -> AsyncIterator[AsyncClient]: +def app_environment( + disable_rabbitmq_setup: None, + disable_redis_setup: None, + disable_service_tracker_setup: None, + disable_deferred_manager_setup: None, + disable_notifier_setup: None, + disable_status_monitor_setup: None, + app_environment: EnvVarsDict, +) -> EnvVarsDict: + return app_environment + + +@pytest.fixture +async def client( + app_environment: EnvVarsDict, app: FastAPI +) -> AsyncIterator[AsyncClient]: # - Needed for app to trigger start/stop event handlers # - Prefer this client instead of fastapi.testclient.TestClient async with AsyncClient( diff --git a/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__health.py b/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__health.py index 8cc1c3279ef..9b5648e12b4 100644 --- a/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__health.py +++ b/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__health.py @@ -21,7 +21,6 @@ def __init__(self, is_ok: bool) -> None: @pytest.fixture def mock_rabbitmq_clients( - disable_rabbitmq_setup: None, mocker: MockerFixture, rabbit_client_ok: bool, rabbit_rpc_server_ok: bool, @@ -39,11 +38,13 @@ def mock_rabbitmq_clients( @pytest.fixture def mock_redis_client( - disable_redis_setup: None, mocker: MockerFixture, redis_client_ok: bool + mocker: MockerFixture, + redis_client_ok: bool, ) -> None: base_path = "simcore_service_dynamic_scheduler.api.rest._dependencies" mocker.patch( - f"{base_path}.get_redis_client", return_value=MockHealth(redis_client_ok) + f"{base_path}.get_all_redis_clients", + return_value={0: MockHealth(redis_client_ok)}, ) diff --git a/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__meta.py b/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__meta.py index 6e68190bcee..8d986dfe60e 100644 --- a/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__meta.py +++ b/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__meta.py @@ -1,24 +1,11 @@ # pylint:disable=redefined-outer-name # pylint:disable=unused-argument - - -import pytest from fastapi import status from httpx import AsyncClient -from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_dynamic_scheduler._meta import API_VTAG from simcore_service_dynamic_scheduler.models.schemas.meta import Meta -@pytest.fixture -def app_environment( - disable_rabbitmq_setup: None, - disable_redis_setup: None, - app_environment: EnvVarsDict, -) -> EnvVarsDict: - return app_environment - - async def test_health(client: AsyncClient): response = await client.get(f"/{API_VTAG}/meta") assert response.status_code == status.HTTP_200_OK diff --git a/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py b/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py index 7c8dada1e18..c484f722ff9 100644 --- a/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py +++ b/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py @@ -59,7 +59,7 @@ def service_status_new_style() -> DynamicServiceGet: @pytest.fixture def service_status_legacy() -> NodeGet: - return NodeGet.parse_obj(NodeGet.Config.schema_extra["example"]) + return NodeGet.parse_obj(NodeGet.Config.schema_extra["examples"][1]) @pytest.fixture diff --git a/services/dynamic-scheduler/tests/unit/conftest.py b/services/dynamic-scheduler/tests/unit/conftest.py new file mode 100644 index 00000000000..642ed2170ce --- /dev/null +++ b/services/dynamic-scheduler/tests/unit/conftest.py @@ -0,0 +1,29 @@ +from collections.abc import Callable +from copy import deepcopy + +import pytest +from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( + DynamicServiceStart, + DynamicServiceStop, +) +from models_library.projects_nodes_io import NodeID + + +@pytest.fixture +def get_dynamic_service_start() -> Callable[[NodeID], DynamicServiceStart]: + def _(node_id: NodeID) -> DynamicServiceStart: + dict_data = deepcopy(DynamicServiceStart.Config.schema_extra["example"]) + dict_data["service_uuid"] = f"{node_id}" + return DynamicServiceStart.parse_obj(dict_data) + + return _ + + +@pytest.fixture +def get_dynamic_service_stop() -> Callable[[NodeID], DynamicServiceStop]: + def _(node_id: NodeID) -> DynamicServiceStop: + dict_data = deepcopy(DynamicServiceStop.Config.schema_extra["example"]) + dict_data["node_id"] = f"{node_id}" + return DynamicServiceStop.parse_obj(dict_data) + + return _ diff --git a/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py b/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py new file mode 100644 index 00000000000..0755f7e5d78 --- /dev/null +++ b/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py @@ -0,0 +1,325 @@ +# pylint:disable=redefined-outer-name +# pylint:disable=unused-argument + +from collections.abc import Callable +from datetime import timedelta +from typing import Any, Final, NamedTuple +from uuid import uuid4 + +import pytest +from faker import Faker +from fastapi import FastAPI +from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet +from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( + DynamicServiceStart, + DynamicServiceStop, +) +from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle +from models_library.projects_nodes_io import NodeID +from models_library.services_enums import ServiceState +from pydantic import NonNegativeInt +from pytest_simcore.helpers.typing_env import EnvVarsDict +from servicelib.deferred_tasks import TaskUID +from servicelib.utils import limited_gather +from settings_library.redis import RedisSettings +from simcore_service_dynamic_scheduler.services.service_tracker import ( + get_all_tracked_services, + get_tracked_service, + remove_tracked_service, + set_if_status_changed_for_service, + set_request_as_running, + set_request_as_stopped, + set_service_status_task_uid, +) +from simcore_service_dynamic_scheduler.services.service_tracker._api import ( + _LOW_RATE_POLL_INTERVAL, + NORMAL_RATE_POLL_INTERVAL, + _get_current_scheduler_service_state, + _get_poll_interval, +) +from simcore_service_dynamic_scheduler.services.service_tracker._models import ( + SchedulerServiceState, + UserRequestedState, +) + +pytest_simcore_core_services_selection = [ + "redis", +] + + +@pytest.fixture +def app_environment( + disable_rabbitmq_setup: None, + disable_deferred_manager_setup: None, + disable_notifier_setup: None, + app_environment: EnvVarsDict, + redis_service: RedisSettings, + remove_redis_data: None, +) -> EnvVarsDict: + return app_environment + + +async def test_services_tracer_set_as_running_set_as_stopped( + app: FastAPI, + node_id: NodeID, + get_dynamic_service_start: Callable[[NodeID], DynamicServiceStart], + get_dynamic_service_stop: Callable[[NodeID], DynamicServiceStop], +): + async def _remove_service() -> None: + await remove_tracked_service(app, node_id) + assert await get_tracked_service(app, node_id) is None + assert await get_all_tracked_services(app) == {} + + async def _set_as_running() -> None: + await set_request_as_running(app, get_dynamic_service_start(node_id)) + tracked_model = await get_tracked_service(app, node_id) + assert tracked_model + assert tracked_model.requested_state == UserRequestedState.RUNNING + + async def _set_as_stopped() -> None: + await set_request_as_stopped(app, get_dynamic_service_stop(node_id)) + tracked_model = await get_tracked_service(app, node_id) + assert tracked_model + assert tracked_model.requested_state == UserRequestedState.STOPPED + + # request as running then as stopped + await _remove_service() + await _set_as_running() + await _set_as_stopped() + + # request as stopped then as running + await _remove_service() + await _set_as_stopped() + await _set_as_running() + + +@pytest.mark.parametrize("item_count", [100]) +async def test_services_tracer_workflow( + app: FastAPI, + node_id: NodeID, + item_count: NonNegativeInt, + get_dynamic_service_start: Callable[[NodeID], DynamicServiceStart], + get_dynamic_service_stop: Callable[[NodeID], DynamicServiceStop], +): + # ensure more than one service can be tracked + await limited_gather( + *[ + set_request_as_stopped(app, get_dynamic_service_stop(uuid4())) + for _ in range(item_count) + ], + limit=100, + ) + assert len(await get_all_tracked_services(app)) == item_count + + +@pytest.mark.parametrize( + "status", + [ + *[NodeGet.parse_obj(o) for o in NodeGet.Config.schema_extra["examples"]], + *[ + DynamicServiceGet.parse_obj(o) + for o in DynamicServiceGet.Config.schema_extra["examples"] + ], + NodeGetIdle.parse_obj(NodeGetIdle.Config.schema_extra["example"]), + ], +) +async def test_set_if_status_changed( + app: FastAPI, + node_id: NodeID, + status: NodeGet | DynamicServiceGet | NodeGetIdle, + get_dynamic_service_start: Callable[[NodeID], DynamicServiceStart], +): + await set_request_as_running(app, get_dynamic_service_start(node_id)) + + assert await set_if_status_changed_for_service(app, node_id, status) is True + + assert await set_if_status_changed_for_service(app, node_id, status) is False + + model = await get_tracked_service(app, node_id) + assert model + + assert model.service_status == status.json() + + +async def test_set_service_status_task_uid( + app: FastAPI, + node_id: NodeID, + faker: Faker, + get_dynamic_service_start: Callable[[NodeID], DynamicServiceStart], +): + await set_request_as_running(app, get_dynamic_service_start(node_id)) + + task_uid = TaskUID(faker.uuid4()) + await set_service_status_task_uid(app, node_id, task_uid) + + model = await get_tracked_service(app, node_id) + assert model + + assert model.service_status_task_uid == task_uid + + +@pytest.mark.parametrize( + "status, expected_poll_interval", + [ + ( + NodeGet.parse_obj(NodeGet.Config.schema_extra["examples"][1]), + _LOW_RATE_POLL_INTERVAL, + ), + *[ + (DynamicServiceGet.parse_obj(o), NORMAL_RATE_POLL_INTERVAL) + for o in DynamicServiceGet.Config.schema_extra["examples"] + ], + ( + NodeGetIdle.parse_obj(NodeGetIdle.Config.schema_extra["example"]), + _LOW_RATE_POLL_INTERVAL, + ), + ], +) +def test__get_poll_interval( + status: NodeGet | DynamicServiceGet | NodeGetIdle, expected_poll_interval: timedelta +): + assert _get_poll_interval(status) == expected_poll_interval + + +def _get_node_get_from(service_state: ServiceState) -> NodeGet: + dict_data = NodeGet.Config.schema_extra["examples"][1] + assert "service_state" in dict_data + dict_data["service_state"] = service_state + return NodeGet.parse_obj(dict_data) + + +def _get_dynamic_service_get_from( + service_state: ServiceState, +) -> DynamicServiceGet: + dict_data = DynamicServiceGet.Config.schema_extra["examples"][1] + assert "state" in dict_data + dict_data["state"] = service_state + return DynamicServiceGet.parse_obj(dict_data) + + +def _get_node_get_idle() -> NodeGetIdle: + return NodeGetIdle.parse_obj(NodeGetIdle.Config.schema_extra["example"]) + + +def __get_flat_list(nested_list: list[list[Any]]) -> list[Any]: + return [item for sublist in nested_list for item in sublist] + + +class ServiceStatusToSchedulerState(NamedTuple): + requested: UserRequestedState + service_status: NodeGet | DynamicServiceGet | NodeGetIdle + expected: SchedulerServiceState + + +_EXPECTED_TEST_CASES: list[list[ServiceStatusToSchedulerState]] = [ + [ + # UserRequestedState.RUNNING + ServiceStatusToSchedulerState( + UserRequestedState.RUNNING, + status_generator(ServiceState.PENDING), + SchedulerServiceState.STARTING, + ), + ServiceStatusToSchedulerState( + UserRequestedState.RUNNING, + status_generator(ServiceState.PULLING), + SchedulerServiceState.STARTING, + ), + ServiceStatusToSchedulerState( + UserRequestedState.RUNNING, + status_generator(ServiceState.STARTING), + SchedulerServiceState.STARTING, + ), + ServiceStatusToSchedulerState( + UserRequestedState.RUNNING, + status_generator(ServiceState.RUNNING), + SchedulerServiceState.RUNNING, + ), + ServiceStatusToSchedulerState( + UserRequestedState.RUNNING, + status_generator(ServiceState.COMPLETE), + SchedulerServiceState.UNEXPECTED_OUTCOME, + ), + ServiceStatusToSchedulerState( + UserRequestedState.RUNNING, + status_generator(ServiceState.FAILED), + SchedulerServiceState.UNEXPECTED_OUTCOME, + ), + ServiceStatusToSchedulerState( + UserRequestedState.RUNNING, + status_generator(ServiceState.STOPPING), + SchedulerServiceState.UNEXPECTED_OUTCOME, + ), + ServiceStatusToSchedulerState( + UserRequestedState.RUNNING, + _get_node_get_idle(), + SchedulerServiceState.IDLE, + ), + # UserRequestedState.STOPPED + ServiceStatusToSchedulerState( + UserRequestedState.STOPPED, + status_generator(ServiceState.PENDING), + SchedulerServiceState.UNEXPECTED_OUTCOME, + ), + ServiceStatusToSchedulerState( + UserRequestedState.STOPPED, + status_generator(ServiceState.PULLING), + SchedulerServiceState.UNEXPECTED_OUTCOME, + ), + ServiceStatusToSchedulerState( + UserRequestedState.STOPPED, + status_generator(ServiceState.STARTING), + SchedulerServiceState.UNEXPECTED_OUTCOME, + ), + ServiceStatusToSchedulerState( + UserRequestedState.STOPPED, + status_generator(ServiceState.RUNNING), + SchedulerServiceState.STOPPING, + ), + ServiceStatusToSchedulerState( + UserRequestedState.STOPPED, + status_generator(ServiceState.COMPLETE), + SchedulerServiceState.STOPPING, + ), + ServiceStatusToSchedulerState( + UserRequestedState.STOPPED, + status_generator(ServiceState.FAILED), + SchedulerServiceState.UNEXPECTED_OUTCOME, + ), + ServiceStatusToSchedulerState( + UserRequestedState.STOPPED, + status_generator(ServiceState.STOPPING), + SchedulerServiceState.STOPPING, + ), + ServiceStatusToSchedulerState( + UserRequestedState.STOPPED, + _get_node_get_idle(), + SchedulerServiceState.IDLE, + ), + ] + for status_generator in ( + _get_node_get_from, + _get_dynamic_service_get_from, + ) +] +_FLAT_EXPECTED_TEST_CASES: list[ServiceStatusToSchedulerState] = __get_flat_list( + _EXPECTED_TEST_CASES +) +# ensure enum changes do not break above rules +_NODE_STATUS_FORMATS_COUNT: Final[int] = 2 +assert ( + len(_FLAT_EXPECTED_TEST_CASES) + == len(ServiceState) * len(UserRequestedState) * _NODE_STATUS_FORMATS_COUNT +) + + +@pytest.mark.parametrize("service_status_to_scheduler_state", _FLAT_EXPECTED_TEST_CASES) +def test__get_current_scheduler_service_state( + service_status_to_scheduler_state: ServiceStatusToSchedulerState, +): + assert ( + _get_current_scheduler_service_state( + service_status_to_scheduler_state.requested, + service_status_to_scheduler_state.service_status, + ) + == service_status_to_scheduler_state.expected + ) diff --git a/services/dynamic-scheduler/tests/unit/service_tracker/test__models.py b/services/dynamic-scheduler/tests/unit/service_tracker/test__models.py new file mode 100644 index 00000000000..6b8e31321b3 --- /dev/null +++ b/services/dynamic-scheduler/tests/unit/service_tracker/test__models.py @@ -0,0 +1,57 @@ +from datetime import timedelta + +import arrow +import pytest +from faker import Faker +from servicelib.deferred_tasks import TaskUID +from simcore_service_dynamic_scheduler.services.service_tracker._models import ( + SchedulerServiceState, + TrackedServiceModel, + UserRequestedState, +) + + +@pytest.mark.parametrize("requested_state", UserRequestedState) +@pytest.mark.parametrize("current_state", SchedulerServiceState) +@pytest.mark.parametrize("check_status_after", [1, arrow.utcnow().timestamp()]) +@pytest.mark.parametrize("service_status_task_uid", [None, TaskUID("ok")]) +def test_serialization( + faker: Faker, + requested_state: UserRequestedState, + current_state: SchedulerServiceState, + check_status_after: float, + service_status_task_uid: TaskUID | None, +): + tracked_model = TrackedServiceModel( + dynamic_service_start=None, + user_id=None, + project_id=None, + requested_state=requested_state, + current_state=current_state, + service_status=faker.pystr(), + check_status_after=check_status_after, + service_status_task_uid=service_status_task_uid, + ) + + as_bytes = tracked_model.to_bytes() + assert as_bytes + assert TrackedServiceModel.from_bytes(as_bytes) == tracked_model + + +async def test_set_check_status_after_to(): + model = TrackedServiceModel( + dynamic_service_start=None, + user_id=None, + project_id=None, + requested_state=UserRequestedState.RUNNING, + ) + assert model.check_status_after < arrow.utcnow().timestamp() + + delay = timedelta(seconds=4) + + before = (arrow.utcnow() + delay).timestamp() + model.set_check_status_after_to(delay) + after = (arrow.utcnow() + delay).timestamp() + + assert model.check_status_after + assert before < model.check_status_after < after diff --git a/services/dynamic-scheduler/tests/unit/service_tracker/test__tracker.py b/services/dynamic-scheduler/tests/unit/service_tracker/test__tracker.py new file mode 100644 index 00000000000..59739ddf8f6 --- /dev/null +++ b/services/dynamic-scheduler/tests/unit/service_tracker/test__tracker.py @@ -0,0 +1,94 @@ +# pylint:disable=redefined-outer-name +# pylint:disable=unused-argument + +from uuid import uuid4 + +import pytest +from fastapi import FastAPI +from models_library.projects_nodes_io import NodeID +from pydantic import NonNegativeInt +from pytest_simcore.helpers.typing_env import EnvVarsDict +from servicelib.utils import logged_gather +from settings_library.redis import RedisSettings +from simcore_service_dynamic_scheduler.services.service_tracker._models import ( + TrackedServiceModel, + UserRequestedState, +) +from simcore_service_dynamic_scheduler.services.service_tracker._setup import ( + get_tracker, +) +from simcore_service_dynamic_scheduler.services.service_tracker._tracker import Tracker + +pytest_simcore_core_services_selection = [ + "redis", +] + + +@pytest.fixture +def app_environment( + disable_rabbitmq_setup: None, + disable_deferred_manager_setup: None, + disable_notifier_setup: None, + app_environment: EnvVarsDict, + redis_service: RedisSettings, + remove_redis_data: None, +) -> EnvVarsDict: + return app_environment + + +@pytest.fixture +def tracker(app: FastAPI) -> Tracker: + return get_tracker(app) + + +async def test_tracker_workflow(tracker: Tracker): + node_id: NodeID = uuid4() + + # ensure does not already exist + result = await tracker.load(node_id) + assert result is None + + # node creation + model = TrackedServiceModel( + dynamic_service_start=None, + user_id=None, + project_id=None, + requested_state=UserRequestedState.RUNNING, + ) + await tracker.save(node_id, model) + + # check if exists + result = await tracker.load(node_id) + assert result == model + + # remove and check is missing + await tracker.delete(node_id) + result = await tracker.load(node_id) + assert result is None + + +@pytest.mark.parametrize("item_count", [100]) +async def test_tracker_listing(tracker: Tracker, item_count: NonNegativeInt) -> None: + assert await tracker.all() == {} + + model_to_insert = TrackedServiceModel( + dynamic_service_start=None, + user_id=None, + project_id=None, + requested_state=UserRequestedState.RUNNING, + ) + + data_to_insert = {uuid4(): model_to_insert for _ in range(item_count)} + + await logged_gather( + *[tracker.save(k, v) for k, v in data_to_insert.items()], max_concurrency=100 + ) + + response = await tracker.all() + for key in response: + assert isinstance(key, NodeID) + assert response == data_to_insert + + +async def test_remove_missing_key_does_not_raise_error(tracker: Tracker): + await tracker.delete(uuid4()) diff --git a/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py b/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py new file mode 100644 index 00000000000..e3d6acffa39 --- /dev/null +++ b/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py @@ -0,0 +1,415 @@ +# pylint:disable=redefined-outer-name +# pylint:disable=unused-argument + +import json +import re +from collections.abc import AsyncIterable, Callable +from copy import deepcopy +from typing import Any +from unittest.mock import AsyncMock +from uuid import uuid4 + +import pytest +import respx +from fastapi import FastAPI, status +from fastapi.encoders import jsonable_encoder +from httpx import Request, Response +from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet +from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( + DynamicServiceStart, + DynamicServiceStop, +) +from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle +from models_library.projects_nodes_io import NodeID +from pydantic import NonNegativeInt +from pytest_mock import MockerFixture +from pytest_simcore.helpers.typing_env import EnvVarsDict +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings +from simcore_service_dynamic_scheduler.services.service_tracker import ( + get_all_tracked_services, + set_request_as_running, + set_request_as_stopped, +) +from simcore_service_dynamic_scheduler.services.status_monitor import _monitor +from simcore_service_dynamic_scheduler.services.status_monitor._deferred_get_status import ( + DeferredGetStatus, +) +from simcore_service_dynamic_scheduler.services.status_monitor._monitor import Monitor +from simcore_service_dynamic_scheduler.services.status_monitor._setup import get_monitor +from tenacity import AsyncRetrying +from tenacity.retry import retry_if_exception_type +from tenacity.stop import stop_after_delay +from tenacity.wait import wait_fixed + +pytest_simcore_core_services_selection = [ + "rabbit", + "redis", +] + + +@pytest.fixture +def app_environment( + app_environment: EnvVarsDict, + rabbit_service: RabbitSettings, + redis_service: RedisSettings, + remove_redis_data: None, +) -> EnvVarsDict: + return app_environment + + +_DEFAULT_NODE_ID: NodeID = uuid4() + + +def _add_to_dict(dict_data: dict, entries: list[tuple[str, Any]]) -> None: + for key, data in entries: + assert key in dict_data + dict_data[key] = data + + +def _get_node_get_with(state: str, node_id: NodeID = _DEFAULT_NODE_ID) -> NodeGet: + dict_data = deepcopy(NodeGet.Config.schema_extra["examples"][1]) + _add_to_dict( + dict_data, + [ + ("service_state", state), + ("service_uuid", f"{node_id}"), + ], + ) + return NodeGet.parse_obj(dict_data) + + +def _get_dynamic_service_get_legacy_with( + state: str, node_id: NodeID = _DEFAULT_NODE_ID +) -> DynamicServiceGet: + dict_data = deepcopy(DynamicServiceGet.Config.schema_extra["examples"][0]) + _add_to_dict( + dict_data, + [ + ("state", state), + ("uuid", f"{node_id}"), + ("node_uuid", f"{node_id}"), + ], + ) + return DynamicServiceGet.parse_obj(dict_data) + + +def _get_dynamic_service_get_new_style_with( + state: str, node_id: NodeID = _DEFAULT_NODE_ID +) -> DynamicServiceGet: + dict_data = deepcopy(DynamicServiceGet.Config.schema_extra["examples"][1]) + _add_to_dict( + dict_data, + [ + ("state", state), + ("uuid", f"{node_id}"), + ("node_uuid", f"{node_id}"), + ], + ) + return DynamicServiceGet.parse_obj(dict_data) + + +def _get_node_get_idle(node_id: NodeID = _DEFAULT_NODE_ID) -> NodeGetIdle: + dict_data = NodeGetIdle.Config.schema_extra["example"] + _add_to_dict( + dict_data, + [ + ("service_uuid", f"{node_id}"), + ], + ) + return NodeGetIdle.parse_obj(dict_data) + + +class _ResponseTimeline: + def __init__( + self, timeline: list[NodeGet | DynamicServiceGet | NodeGetIdle] + ) -> None: + self._timeline = timeline + + self._client_access_history: dict[NodeID, NonNegativeInt] = {} + + @property + def entries(self) -> list[NodeGet | DynamicServiceGet | NodeGetIdle]: + return self._timeline + + def __len__(self) -> int: + return len(self._timeline) + + def get_status(self, node_id: NodeID) -> NodeGet | DynamicServiceGet | NodeGetIdle: + if node_id not in self._client_access_history: + self._client_access_history[node_id] = 0 + + # always return node idle when timeline finished playing + if self._client_access_history[node_id] >= len(self._timeline): + return _get_node_get_idle() + + status = self._timeline[self._client_access_history[node_id]] + self._client_access_history[node_id] += 1 + return status + + +async def _assert_call_to( + deferred_status_spies: dict[str, AsyncMock], *, method: str, count: NonNegativeInt +) -> None: + async for attempt in AsyncRetrying( + reraise=True, + stop=stop_after_delay(1), + wait=wait_fixed(0.01), + retry=retry_if_exception_type(AssertionError), + ): + with attempt: + call_count = deferred_status_spies[method].call_count + assert ( + call_count == count + ), f"Received calls {call_count} != {count} (expected) to '{method}'" + + +async def _assert_result( + deferred_status_spies: dict[str, AsyncMock], + *, + timeline: list[NodeGet | DynamicServiceGet | NodeGetIdle], +) -> None: + async for attempt in AsyncRetrying( + reraise=True, + stop=stop_after_delay(1), + wait=wait_fixed(0.01), + retry=retry_if_exception_type(AssertionError), + ): + with attempt: + + assert deferred_status_spies["on_result"].call_count == len(timeline) + assert [ + x.args[0] for x in deferred_status_spies["on_result"].call_args_list + ] == timeline + + +async def _assert_notification_count( + mock: AsyncMock, expected_count: NonNegativeInt +) -> None: + async for attempt in AsyncRetrying( + reraise=True, + stop=stop_after_delay(1), + wait=wait_fixed(0.01), + retry=retry_if_exception_type(AssertionError), + ): + with attempt: + assert mock.call_count == expected_count + + +@pytest.fixture +async def mock_director_v2_status( + app: FastAPI, response_timeline: _ResponseTimeline +) -> AsyncIterable[None]: + def _side_effect_node_status_response(request: Request) -> Response: + node_id = NodeID(f"{request.url}".split("/")[-1]) + + service_status = response_timeline.get_status(node_id) + + if isinstance(service_status, NodeGet): + return Response( + status.HTTP_200_OK, + text=json.dumps(jsonable_encoder({"data": service_status.dict()})), + ) + if isinstance(service_status, DynamicServiceGet): + return Response(status.HTTP_200_OK, text=service_status.json()) + if isinstance(service_status, NodeGetIdle): + return Response(status.HTTP_404_NOT_FOUND) + + raise TypeError + + with respx.mock( + base_url=app.state.settings.DYNAMIC_SCHEDULER_DIRECTOR_V2_SETTINGS.api_base_url, + assert_all_called=False, + assert_all_mocked=True, + ) as mock: + mock.get(re.compile(r"/dynamic_services/([\w-]+)")).mock( + side_effect=_side_effect_node_status_response + ) + yield + + +@pytest.fixture +def monitor(mock_director_v2_status: None, app: FastAPI) -> Monitor: + return get_monitor(app) + + +@pytest.fixture +def deferred_status_spies(mocker: MockerFixture) -> dict[str, AsyncMock]: + results: dict[str, AsyncMock] = {} + for method_name in ( + "start", + "on_result", + "on_created", + "run", + "on_finished_with_error", + ): + mock_method = mocker.AsyncMock(wraps=getattr(DeferredGetStatus, method_name)) + mocker.patch.object(DeferredGetStatus, method_name, mock_method) + results[method_name] = mock_method + + return results + + +@pytest.fixture +def remove_tracked_spy(mocker: MockerFixture) -> AsyncMock: + mock_method = mocker.AsyncMock( + wraps=_monitor.service_tracker.remove_tracked_service + ) + return mocker.patch.object( + _monitor.service_tracker, + _monitor.service_tracker.remove_tracked_service.__name__, + mock_method, + ) + + +@pytest.fixture +def node_id() -> NodeID: + return _DEFAULT_NODE_ID + + +@pytest.fixture +def mocked_notify_frontend(mocker: MockerFixture) -> AsyncMock: + return mocker.patch( + "simcore_service_dynamic_scheduler.services.status_monitor._deferred_get_status.notify_service_status_change" + ) + + +@pytest.fixture +def disable_status_monitor_background_task(mocker: MockerFixture) -> None: + mocker.patch( + "simcore_service_dynamic_scheduler.services.status_monitor._monitor.Monitor.setup" + ) + + +@pytest.mark.parametrize( + "user_requests_running, response_timeline, expected_notification_count, remove_tracked_count", + [ + pytest.param( + True, + _ResponseTimeline([_get_node_get_with("running")]), + 1, + 0, + id="requested_running_state_changes_1_no_task_removal", + ), + pytest.param( + True, + _ResponseTimeline( + [_get_dynamic_service_get_legacy_with("running") for _ in range(10)] + ), + 1, + 0, + id="requested_running_state_changes_1_for_multiple_same_state_no_task_removal", + ), + pytest.param( + True, + _ResponseTimeline([_get_node_get_idle()]), + 1, + 0, + id="requested_running_state_idle_no_removal", + ), + pytest.param( + False, + _ResponseTimeline([_get_node_get_idle()]), + 1, + 1, + id="requested_stopped_state_idle_is_removed", + ), + pytest.param( + True, + _ResponseTimeline( + [ + *[_get_node_get_idle() for _ in range(10)], + _get_dynamic_service_get_new_style_with("pending"), + _get_dynamic_service_get_new_style_with("pulling"), + *[ + _get_dynamic_service_get_new_style_with("starting") + for _ in range(10) + ], + _get_dynamic_service_get_new_style_with("running"), + _get_dynamic_service_get_new_style_with("stopping"), + _get_dynamic_service_get_new_style_with("complete"), + _get_node_get_idle(), + ] + ), + 8, + 0, + id="requested_running_state_changes_8_no_removal", + ), + pytest.param( + False, + _ResponseTimeline( + [ + _get_dynamic_service_get_new_style_with("pending"), + _get_dynamic_service_get_new_style_with("pulling"), + *[ + _get_dynamic_service_get_new_style_with("starting") + for _ in range(10) + ], + _get_dynamic_service_get_new_style_with("running"), + _get_dynamic_service_get_new_style_with("stopping"), + _get_dynamic_service_get_new_style_with("complete"), + _get_node_get_idle(), + ] + ), + 7, + 1, + id="requested_stopped_state_changes_7_is_removed", + ), + ], +) +async def test_expected_calls_to_notify_frontend( # pylint:disable=too-many-arguments + disable_status_monitor_background_task: None, + mocked_notify_frontend: AsyncMock, + deferred_status_spies: dict[str, AsyncMock], + remove_tracked_spy: AsyncMock, + app: FastAPI, + monitor: Monitor, + node_id: NodeID, + user_requests_running: bool, + response_timeline: _ResponseTimeline, + expected_notification_count: NonNegativeInt, + remove_tracked_count: NonNegativeInt, + get_dynamic_service_start: Callable[[NodeID], DynamicServiceStart], + get_dynamic_service_stop: Callable[[NodeID], DynamicServiceStop], +): + assert await get_all_tracked_services(app) == {} + + if user_requests_running: + await set_request_as_running(app, get_dynamic_service_start(node_id)) + else: + await set_request_as_stopped(app, get_dynamic_service_stop(node_id)) + + entries_in_timeline = len(response_timeline) + + for i in range(entries_in_timeline): + async for attempt in AsyncRetrying( + reraise=True, stop=stop_after_delay(10), wait=wait_fixed(0.1) + ): + with attempt: + # pylint:disable=protected-access + await monitor._worker_start_get_status_requests() # noqa: SLF001 + for method in ("start", "on_created", "on_result"): + await _assert_call_to( + deferred_status_spies, method=method, count=i + 1 + ) + + await _assert_call_to( + deferred_status_spies, method="run", count=entries_in_timeline + ) + await _assert_call_to( + deferred_status_spies, method="on_finished_with_error", count=0 + ) + + await _assert_result(deferred_status_spies, timeline=response_timeline.entries) + + await _assert_notification_count( + mocked_notify_frontend, expected_notification_count + ) + + async for attempt in AsyncRetrying( + reraise=True, stop=stop_after_delay(1), wait=wait_fixed(0.1) + ): + with attempt: + # pylint:disable=protected-access + await monitor._worker_start_get_status_requests() # noqa: SLF001 + assert remove_tracked_spy.call_count == remove_tracked_count diff --git a/services/dynamic-scheduler/tests/unit/test_services_rabbitmq.py b/services/dynamic-scheduler/tests/unit/test_services_rabbitmq.py index feefc0c1aa4..eadb7c9ee03 100644 --- a/services/dynamic-scheduler/tests/unit/test_services_rabbitmq.py +++ b/services/dynamic-scheduler/tests/unit/test_services_rabbitmq.py @@ -21,6 +21,10 @@ @pytest.fixture def app_environment( disable_redis_setup: None, + disable_service_tracker_setup: None, + disable_deferred_manager_setup: None, + disable_notifier_setup: None, + disable_status_monitor_setup: None, app_environment: EnvVarsDict, rabbit_service: RabbitSettings, ) -> EnvVarsDict: diff --git a/services/dynamic-scheduler/tests/unit/test_services_redis.py b/services/dynamic-scheduler/tests/unit/test_services_redis.py index 7a7d9006385..059a17aeb0f 100644 --- a/services/dynamic-scheduler/tests/unit/test_services_redis.py +++ b/services/dynamic-scheduler/tests/unit/test_services_redis.py @@ -6,7 +6,7 @@ from fastapi import FastAPI from pytest_simcore.helpers.typing_env import EnvVarsDict from settings_library.redis import RedisSettings -from simcore_service_dynamic_scheduler.services.redis import get_redis_client +from simcore_service_dynamic_scheduler.services.redis import get_all_redis_clients pytest_simcore_core_services_selection = [ "redis", @@ -16,6 +16,9 @@ @pytest.fixture def app_environment( disable_rabbitmq_setup: None, + disable_deferred_manager_setup: None, + disable_notifier_setup: None, + disable_status_monitor_setup: None, app_environment: EnvVarsDict, redis_service: RedisSettings, ) -> EnvVarsDict: @@ -23,5 +26,6 @@ def app_environment( async def test_health(app: FastAPI): - redis_client = get_redis_client(app) - assert await redis_client.ping() is True + redis_clients = get_all_redis_clients(app) + for redis_client in redis_clients.values(): + assert await redis_client.ping() is True diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py index 58b06af19e4..c7b1ad4629a 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py @@ -57,6 +57,7 @@ ServiceWaitingForManualInterventionError, ServiceWasNotFoundError, ) +from servicelib.services_utils import get_status_as_dict from simcore_postgres_database.models.users import UserRole from .._meta import API_VTAG as VTAG @@ -208,11 +209,7 @@ async def get_node(request: web.Request) -> web.Response: ) ) - return envelope_json_response( - service_data.dict(by_alias=True) - if isinstance(service_data, DynamicServiceGet) - else service_data.dict() - ) + return envelope_json_response(get_status_as_dict(service_data)) @routes.patch( diff --git a/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py b/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py index 0823f52b1b2..6308141d254 100644 --- a/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py +++ b/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py @@ -55,7 +55,7 @@ def dynamic_service_start() -> DynamicServiceStart: @pytest.mark.parametrize( "expected_response", [ - NodeGet.parse_obj(NodeGet.Config.schema_extra["example"]), + *[NodeGet.parse_obj(x) for x in NodeGet.Config.schema_extra["examples"]], NodeGetIdle.parse_obj(NodeGetIdle.Config.schema_extra["example"]), DynamicServiceGet.parse_obj( DynamicServiceGet.Config.schema_extra["examples"][0] @@ -98,7 +98,7 @@ async def test_get_service_status_raises_rpc_server_error( @pytest.mark.parametrize( "expected_response", [ - NodeGet.parse_obj(NodeGet.Config.schema_extra["example"]), + *[NodeGet.parse_obj(x) for x in NodeGet.Config.schema_extra["examples"]], DynamicServiceGet.parse_obj( DynamicServiceGet.Config.schema_extra["examples"][0] ), diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py index 8613fbc8319..d34adace8ae 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py @@ -1036,7 +1036,7 @@ async def test_project_node_lifetime( # noqa: PLR0915 project_id=user_project["uuid"], node_id=dynamic_node_id ) - node_sample = deepcopy(NodeGet.Config.schema_extra["example"]) + node_sample = deepcopy(NodeGet.Config.schema_extra["examples"][1]) mocked_director_v2_api[ "dynamic_scheduler.api.get_dynamic_service" ].return_value = NodeGet.parse_obj( From 5457f0de39270074324a41bc78ba73e5cdd8473c Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Tue, 24 Sep 2024 15:41:12 +0200 Subject: [PATCH 022/104] =?UTF-8?q?=F0=9F=90=9B=20[Frontend]=20Fix:=20Do?= =?UTF-8?q?=20not=20listen=20to=20output=20related=20backend=20updates=20i?= =?UTF-8?q?f=20the=20node=20is=20a=20frontend=20node=20(#6434)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Dustin Kaiser <8209087+mrnicegyu11@users.noreply.github.com> --- .../client/source/class/osparc/data/model/Study.js | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/services/static-webserver/client/source/class/osparc/data/model/Study.js b/services/static-webserver/client/source/class/osparc/data/model/Study.js index 4b7c879bb53..c62549bcc63 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Study.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Study.js @@ -435,6 +435,7 @@ qx.Class.define("osparc.data.model.Study", { }); }, + // Used for updating some node data through the "nodeUpdated" websocket event nodeUpdated: function(nodeUpdatedData) { const studyId = nodeUpdatedData["project_id"]; if (studyId !== this.getUuid()) { @@ -444,7 +445,10 @@ qx.Class.define("osparc.data.model.Study", { const nodeData = nodeUpdatedData["data"]; const workbench = this.getWorkbench(); const node = workbench.getNode(nodeId); - if (node && nodeData) { + // Do not listen to output related backend updates if the node is a frontend node. + // The frontend controls its output values, progress and states. + // If a File Picker is uploading a file, the backend could override the current state with some older state. + if (node && nodeData && !osparc.data.model.Node.isFrontend(node)) { node.setOutputData(nodeData.outputs); if ("progress" in nodeData) { const progress = Number.parseInt(nodeData["progress"]); From 0b7f87e3561149994eba10be0444c218047922a6 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Tue, 24 Sep 2024 17:14:42 +0200 Subject: [PATCH 023/104] =?UTF-8?q?=E2=9C=85=20e2e:=20new=20workflow=20to?= =?UTF-8?q?=20test=20`tiplite`=20(#6388)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../helpers/pydantic_extension.py | 34 +++++ .../source/class/osparc/product/TIPTeaser.js | 5 + tests/e2e-playwright/.gitignore | 4 +- tests/e2e-playwright/Makefile | 6 +- tests/e2e-playwright/README.md | 28 +++- tests/e2e-playwright/tests/conftest.py | 52 ++++--- .../tests/sim4life/test_sim4life.py | 8 +- .../tests/sim4life/test_template.py | 4 +- .../e2e-playwright/tests/tip/test_ti_plan.py | 139 ++++++++++++------ 9 files changed, 205 insertions(+), 75 deletions(-) create mode 100644 packages/pytest-simcore/src/pytest_simcore/helpers/pydantic_extension.py diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/pydantic_extension.py b/packages/pytest-simcore/src/pytest_simcore/helpers/pydantic_extension.py new file mode 100644 index 00000000000..c1252ed8bb4 --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/pydantic_extension.py @@ -0,0 +1,34 @@ +from pydantic import SecretStr + + +def _mask(value): + """ + Mask the password, showing only the first and last characters + or *** if very short passwords + """ + if len(value) > 2: + masked_value = value[0] + "*" * (len(value) - 2) + value[-1] + else: + # In case of very short passwords + masked_value = "*" * len(value) + return masked_value + + +def _hash(value): + """Uses hash number to mask the password""" + return f"hash:{hash(value)}" + + +class Secret4TestsStr(SecretStr): + """Prints a hint of the secret + TIP: Can be handy for testing + """ + + def _display(self) -> str | bytes: + # SEE overrides _SecretBase._display + value = self.get_secret_value() + return _mask(value) if value else "" + + +assert str(Secret4TestsStr("123456890")) == "1*******0" +assert "1*******0" in repr(Secret4TestsStr("123456890")) diff --git a/services/static-webserver/client/source/class/osparc/product/TIPTeaser.js b/services/static-webserver/client/source/class/osparc/product/TIPTeaser.js index d71ac819f3b..524d5795c7e 100644 --- a/services/static-webserver/client/source/class/osparc/product/TIPTeaser.js +++ b/services/static-webserver/client/source/class/osparc/product/TIPTeaser.js @@ -35,6 +35,11 @@ qx.Class.define("osparc.product.TIPTeaser", { }); this.getChildControl("teaser-text"); + + osparc.utils.Utils.setIdToWidget(this, "tipTeaserWindow"); + + const closeBtn = this.getChildControl("close-button"); + osparc.utils.Utils.setIdToWidget(closeBtn, "tipTeaserWindowCloseBtn"); }, statics: { diff --git a/tests/e2e-playwright/.gitignore b/tests/e2e-playwright/.gitignore index cf83940dd82..23b36998635 100644 --- a/tests/e2e-playwright/.gitignore +++ b/tests/e2e-playwright/.gitignore @@ -1,5 +1,5 @@ -test-results +.e2e-playwright-*.txt assets report.html -.e2e-playwright-*.txt report.xml +test-results diff --git a/tests/e2e-playwright/Makefile b/tests/e2e-playwright/Makefile index 4c8984e9aa5..88a15a845d1 100644 --- a/tests/e2e-playwright/Makefile +++ b/tests/e2e-playwright/Makefile @@ -117,6 +117,7 @@ CLASSIC_TIP_INPUT_FILE := .e2e-playwright-classictip-env.txt $(SLEEPERS_INPUT_FILE) $(JUPYTER_LAB_INPUT_FILE) $(CLASSIC_TIP_INPUT_FILE) $(S4L_INPUT_FILE): @read -p "Enter your product URL: " PRODUCT_URL; \ read -p "Is the product billable [y/n]: " BILLABLE; \ + read -p "Is the product lite [y/n]: " IS_LITE; \ read -p "Is the test running in autoscaled deployment [y/n]: " AUTOSCALED; \ read -p "Enter your username: " USER_NAME; \ read -s -p "Enter your password: " PASSWORD; echo ""; \ @@ -124,6 +125,9 @@ $(SLEEPERS_INPUT_FILE) $(JUPYTER_LAB_INPUT_FILE) $(CLASSIC_TIP_INPUT_FILE) $(S4L if [ "$$BILLABLE" = "y" ]; then \ echo "--product-billable" >> $@; \ fi; \ + if [ "$$IS_LITE" = "y" ]; then \ + echo "--product-lite" >> $@; \ + fi; \ if [ "$$AUTOSCALED" = "y" ]; then \ echo "--autoscaled" >> $@; \ fi; \ @@ -183,4 +187,4 @@ define run_test_on_chrome endef clean: - @rm -rf $(SLEEPERS_INPUT_FILE) $(JUPYTER_LAB_INPUT_FILE) $(CLASSIC_TIP_INPUT_FILE) + -@rm -rf $(SLEEPERS_INPUT_FILE) $(JUPYTER_LAB_INPUT_FILE) $(CLASSIC_TIP_INPUT_FILE) diff --git a/tests/e2e-playwright/README.md b/tests/e2e-playwright/README.md index 0caaa6c6adc..9c8e996b84d 100644 --- a/tests/e2e-playwright/README.md +++ b/tests/e2e-playwright/README.md @@ -1,5 +1,11 @@ + + +## Usage + ### Auto generate new test -`playwright codegen sim4life.io` +``` +playwright codegen sim4life.io +``` ### Run test locally with headed mode ``` @@ -7,16 +13,24 @@ pytest -s tests/sim4life.py --headed --browser chromium --product-billable --pr ``` ### Check test results output -`playwright show-trace test-results/tests-sim4life-py-test-billable-sim4life-chromium/trace.zip` +``` +playwright show-trace test-results/tests-sim4life-py-test-billable-sim4life-chromium/trace.zip +``` ### Run debug mode -`PWDEBUG=1 pytest -s tests/sim4life.py` +``` +PWDEBUG=1 pytest -s tests/sim4life.py +``` ### Run test in different browsers -`pytest -s tests/sim4life.py --tracing on --html=report.html --browser chromium --browser firefox` +``` +pytest -s tests/sim4life.py --tracing on --html=report.html --browser chromium --browser firefox +``` -### or in chrome/msedge -`pytest -s tests/sim4life.py --tracing on --html=report.html --browser-channel chrome` +### or in chrome/ms-edge +``` +pytest -s tests/sim4life.py --tracing on --html=report.html --browser-channel chrome +``` -### Runs in CI +## e2e CI - https://git.speag.com/oSparc/e2e-backend diff --git a/tests/e2e-playwright/tests/conftest.py b/tests/e2e-playwright/tests/conftest.py index 997ac6b7138..1136e4035cb 100644 --- a/tests/e2e-playwright/tests/conftest.py +++ b/tests/e2e-playwright/tests/conftest.py @@ -22,7 +22,7 @@ from playwright.sync_api import APIRequestContext, BrowserContext, Page, WebSocket from playwright.sync_api._generated import Playwright from pydantic import AnyUrl, TypeAdapter -from pytest import Item +from pytest_simcore.helpers.faker_factories import DEFAULT_TEST_PASSWORD from pytest_simcore.helpers.logging_tools import log_context from pytest_simcore.helpers.playwright import ( MINUTE, @@ -36,6 +36,7 @@ decode_socketio_42_message, web_socket_default_log_handler, ) +from pytest_simcore.helpers.pydantic_extension import Secret4TestsStr _PROJECT_CLOSING_TIMEOUT: Final[int] = 10 * MINUTE _OPENING_NEW_EMPTY_PROJECT_MAX_WAIT_TIME: Final[int] = 30 * SECOND @@ -79,6 +80,12 @@ def pytest_addoption(parser: pytest.Parser) -> None: default=False, help="Whether product is billable or not", ) + group.addoption( + "--product-lite", + action="store_true", + default=False, + help="Whether product is lite version or not", + ) group.addoption( "--autoscaled", action="store_true", @@ -116,7 +123,7 @@ def pytest_addoption(parser: pytest.Parser) -> None: # Dictionary to store start times of tests -_test_start_times = {} +_test_start_times: dict[str, datetime.datetime] = {} def pytest_runtest_setup(item): @@ -144,7 +151,7 @@ def _construct_graylog_url( return f"{monitoring_url}/graylog/search?{query}" -def pytest_runtest_makereport(item: Item, call): +def pytest_runtest_makereport(item: pytest.Item, call): """ Hook to add extra information when a test fails. """ @@ -171,7 +178,6 @@ def pytest_runtest_makereport(item: Item, call): ) diagnostics["duration"] = str(end_time - start_time) - # Print the diagnostics report with log_context( logging.WARNING, f"ℹ️ Diagnostics report for {test_name} ---", # noqa: RUF001 @@ -217,23 +223,29 @@ def user_name(request: pytest.FixtureRequest, auto_register: bool, faker: Faker) @pytest.fixture def user_password( request: pytest.FixtureRequest, auto_register: bool, faker: Faker -) -> str: +) -> Secret4TestsStr: if auto_register: - return faker.password(length=12) + return Secret4TestsStr(DEFAULT_TEST_PASSWORD) if osparc_password := request.config.getoption("--password"): assert isinstance(osparc_password, str) - return osparc_password - return os.environ["USER_PASSWORD"] + return Secret4TestsStr(osparc_password) + return Secret4TestsStr(os.environ["USER_PASSWORD"]) @pytest.fixture(scope="session") -def product_billable(request: pytest.FixtureRequest) -> bool: +def is_product_billable(request: pytest.FixtureRequest) -> bool: billable = request.config.getoption("--product-billable") return TypeAdapter(bool).validate_python(billable) @pytest.fixture(scope="session") -def autoscaled(request: pytest.FixtureRequest) -> bool: +def is_product_lite(request: pytest.FixtureRequest) -> bool: + enabled = request.config.getoption("--product-lite") + return TypeAdapter(bool).validate_python(enabled) + + +@pytest.fixture(scope="session") +def is_autoscaled(request: pytest.FixtureRequest) -> bool: autoscaled = request.config.getoption("--autoscaled") return TypeAdapter(bool).validate_python(autoscaled) @@ -280,7 +292,7 @@ def register( page: Page, product_url: AnyUrl, user_name: str, - user_password: str, + user_password: Secret4TestsStr, ) -> Callable[[], AutoRegisteredUser]: def _do() -> AutoRegisteredUser: with log_context( @@ -297,11 +309,13 @@ def _do() -> AutoRegisteredUser: for pass_id in ["registrationPass1Fld", "registrationPass2Fld"]: user_password_box = page.get_by_test_id(pass_id) user_password_box.click() - user_password_box.fill(user_password) + user_password_box.fill(user_password.get_secret_value()) with page.expect_response(re.compile(r"/auth/register")) as response_info: page.get_by_test_id("registrationSubmitBtn").click() assert response_info.value.ok, response_info.value.json() - return AutoRegisteredUser(user_email=user_name, password=user_password) + return AutoRegisteredUser( + user_email=user_name, password=user_password.get_secret_value() + ) return _do @@ -311,7 +325,7 @@ def log_in_and_out( page: Page, product_url: AnyUrl, user_name: str, - user_password: str, + user_password: Secret4TestsStr, auto_register: bool, register: Callable[[], AutoRegisteredUser], ) -> Iterator[WebSocket]: @@ -352,7 +366,7 @@ def log_in_and_out( _user_email_box.fill(user_name) _user_password_box = page.get_by_test_id("loginPasswordFld") _user_password_box.click() - _user_password_box.fill(user_password) + _user_password_box.fill(user_password.get_secret_value()) with page.expect_response(re.compile(r"/login")) as response_info: page.get_by_test_id("loginSubmitBtn").click() assert response_info.value.ok, f"{response_info.value.json()}" @@ -392,7 +406,7 @@ def log_in_and_out( def create_new_project_and_delete( page: Page, log_in_and_out: WebSocket, - product_billable: bool, + is_product_billable: bool, api_request_context: APIRequestContext, product_url: AnyUrl, ) -> Iterator[Callable[[tuple[RunningState], bool], dict[str, Any]]]: @@ -411,7 +425,7 @@ def _( ), "misuse of this fixture! only 1 study can be opened at a time. Otherwise please modify the fixture" with log_context( logging.INFO, - f"Open project in {product_url=} as {product_billable=}", + f"Open project in {product_url=} as {is_product_billable=}", ) as ctx: waiter = SocketIOProjectStateUpdatedWaiter(expected_states=expected_states) timeout = ( @@ -473,7 +487,7 @@ def wait_for_done(response): ... else: open_button.click() - if product_billable: + if is_product_billable: # Open project with default resources page.get_by_test_id("openWithResources").click() project_data = response_info.value.json() @@ -512,7 +526,7 @@ def wait_for_done(response): for project_uuid in created_project_uuids: with log_context( logging.INFO, - f"Delete project with {project_uuid=} in {product_url=} as {product_billable=}", + f"Delete project with {project_uuid=} in {product_url=} as {is_product_billable=}", ): response = api_request_context.delete( f"{product_url}v0/projects/{project_uuid}" diff --git a/tests/e2e-playwright/tests/sim4life/test_sim4life.py b/tests/e2e-playwright/tests/sim4life/test_sim4life.py index b993f262181..96c361bb546 100644 --- a/tests/e2e-playwright/tests/sim4life/test_sim4life.py +++ b/tests/e2e-playwright/tests/sim4life/test_sim4life.py @@ -31,7 +31,7 @@ def test_sim4life( log_in_and_out: WebSocket, service_key: str, use_plus_button: bool, - autoscaled: bool, + is_autoscaled: bool, check_videostreaming: bool, ): if use_plus_button: @@ -49,7 +49,11 @@ def test_sim4life( assert len(node_ids) == 1, "Expected 1 node in the workbench!" resp = wait_for_launched_s4l( - page, node_ids[0], log_in_and_out, autoscaled=autoscaled, copy_workspace=False + page, + node_ids[0], + log_in_and_out, + autoscaled=is_autoscaled, + copy_workspace=False, ) s4l_websocket = resp["websocket"] with web_socket_default_log_handler(s4l_websocket): diff --git a/tests/e2e-playwright/tests/sim4life/test_template.py b/tests/e2e-playwright/tests/sim4life/test_template.py index a4f104a6291..fb9b260c992 100644 --- a/tests/e2e-playwright/tests/sim4life/test_template.py +++ b/tests/e2e-playwright/tests/sim4life/test_template.py @@ -24,7 +24,7 @@ def test_template( create_project_from_template_dashboard: Callable[[str], dict[str, Any]], log_in_and_out: WebSocket, template_id: str, - autoscaled: bool, + is_autoscaled: bool, check_videostreaming: bool, ): project_data = create_project_from_template_dashboard(template_id) @@ -37,7 +37,7 @@ def test_template( assert len(node_ids) == 1, "Expected 1 node in the workbench!" resp = wait_for_launched_s4l( - page, node_ids[0], log_in_and_out, autoscaled=autoscaled, copy_workspace=True + page, node_ids[0], log_in_and_out, autoscaled=is_autoscaled, copy_workspace=True ) s4l_websocket = resp["websocket"] with web_socket_default_log_handler(s4l_websocket): diff --git a/tests/e2e-playwright/tests/tip/test_ti_plan.py b/tests/e2e-playwright/tests/tip/test_ti_plan.py index e721b7f5ab3..cac6bb5b56d 100644 --- a/tests/e2e-playwright/tests/tip/test_ti_plan.py +++ b/tests/e2e-playwright/tests/tip/test_ti_plan.py @@ -86,21 +86,41 @@ def __call__(self, message: str) -> bool: return False -def test_tip( # noqa: PLR0915 +def test_classic_ti_plan( # noqa: PLR0915 page: Page, - create_tip_plan_from_dashboard: Callable[[str], dict[str, Any]], log_in_and_out: WebSocket, - autoscaled: bool, + is_autoscaled: bool, + is_product_lite: bool, + create_tip_plan_from_dashboard: Callable[[str], dict[str, Any]], ): + with log_context(logging.INFO, "Checking 'Access TIP' teaser"): + page.get_by_test_id("userMenuBtn").click() + page.get_by_test_id("userMenuAccessTIPBtn").click() + assert page.get_by_test_id("tipTeaserWindow").is_visible() + page.get_by_test_id("tipTeaserWindowCloseBtn").click() + + # press + button project_data = create_tip_plan_from_dashboard("newTIPlanButton") assert "workbench" in project_data, "Expected workbench to be in project data!" assert isinstance( project_data["workbench"], dict ), "Expected workbench to be a dict!" node_ids: list[str] = list(project_data["workbench"]) - assert len(node_ids) >= 3, "Expected at least 3 nodes in the workbench!" - with log_context(logging.INFO, "Electrode Selector step") as ctx: + if is_product_lite: + expected_number_of_steps = 2 + assert ( + len(node_ids) == expected_number_of_steps + ), f"Expected {expected_number_of_steps=} in the app-mode" + else: + expected_number_of_steps = 3 + assert ( + len(node_ids) >= expected_number_of_steps + ), f"Expected at least {expected_number_of_steps} nodes in the workbench" + + with log_context( + logging.INFO, "Electrode Selector step (1/%s)", expected_number_of_steps + ) as ctx: # NOTE: creating the plan auto-triggers the first service to start, which might already triggers socket events electrode_selector_iframe = wait_for_service_running( page=page, @@ -108,7 +128,7 @@ def test_tip( # noqa: PLR0915 websocket=log_in_and_out, timeout=( _ELECTRODE_SELECTOR_AUTOSCALED_MAX_STARTUP_TIME - if autoscaled + if is_autoscaled else _ELECTRODE_SELECTOR_MAX_STARTUP_TIME ), press_start_button=False, @@ -149,13 +169,15 @@ def test_tip( # noqa: PLR0915 response_body = response.json() ctx.logger.info("the following output was generated: %s", response_body) - with log_context(logging.INFO, "Classic TI step") as ctx: + with log_context( + logging.INFO, "Classic TI step (2/%s)", expected_number_of_steps + ) as ctx: with page.expect_websocket( _JLabWaitForWebSocket(), timeout=_OUTER_EXPECT_TIMEOUT_RATIO * ( _JLAB_AUTOSCALED_MAX_STARTUP_TIME - if autoscaled + if is_autoscaled else _JLAB_MAX_STARTUP_MAX_TIME ), ) as ws_info: @@ -165,7 +187,7 @@ def test_tip( # noqa: PLR0915 websocket=log_in_and_out, timeout=( _JLAB_AUTOSCALED_MAX_STARTUP_TIME - if autoscaled + if is_autoscaled else _JLAB_MAX_STARTUP_MAX_TIME ), press_start_button=False, @@ -193,43 +215,76 @@ def test_tip( # noqa: PLR0915 ) with log_context(logging.INFO, "Create report"): + ti_iframe.get_by_role("button", name="Load Analysis").click() page.wait_for_timeout(_JLAB_REPORTING_MAX_TIME) ti_iframe.get_by_role("button", name="Load").nth(1).click() page.wait_for_timeout(_JLAB_REPORTING_MAX_TIME) - ti_iframe.get_by_role("button", name="Add to Report (0)").nth(0).click() - page.wait_for_timeout(_JLAB_REPORTING_MAX_TIME) - ti_iframe.get_by_role("button", name="Export to S4L").click() - page.wait_for_timeout(_JLAB_REPORTING_MAX_TIME) - ti_iframe.get_by_role("button", name="Add to Report (1)").nth(1).click() - page.wait_for_timeout(_JLAB_REPORTING_MAX_TIME) - ti_iframe.get_by_role("button", name="Export Report").click() - page.wait_for_timeout(_JLAB_REPORTING_MAX_TIME) + + if is_product_lite: + assert ( + not ti_iframe.get_by_role("button", name="Add to Report (0)") + .nth(0) + .is_enabled() + ) + assert not ti_iframe.get_by_role( + "button", name="Export to S4L" + ).is_enabled() + assert not ti_iframe.get_by_role( + "button", name="Export Report" + ).is_enabled() + + else: + ti_iframe.get_by_role("button", name="Add to Report (0)").nth(0).click() + page.wait_for_timeout(_JLAB_REPORTING_MAX_TIME) + ti_iframe.get_by_role("button", name="Export to S4L").click() + page.wait_for_timeout(_JLAB_REPORTING_MAX_TIME) + ti_iframe.get_by_role("button", name="Add to Report (1)").nth(1).click() + page.wait_for_timeout(_JLAB_REPORTING_MAX_TIME) + ti_iframe.get_by_role("button", name="Export Report").click() + page.wait_for_timeout(_JLAB_REPORTING_MAX_TIME) with log_context(logging.INFO, "Check outputs"): - expected_outputs = ["output_1.zip", "TIP_report.pdf", "results.csv"] - text_on_output_button = f"Outputs ({len(expected_outputs)})" - page.get_by_test_id("outputsBtn").get_by_text(text_on_output_button).click() + if is_product_lite: + expected_outputs = ["results.csv"] + text_on_output_button = f"Outputs ({len(expected_outputs)})" + page.get_by_test_id("outputsBtn").get_by_text( + text_on_output_button + ).click() - with log_context(logging.INFO, "Exposure Analysis step"): - with expected_service_running( - page=page, - node_id=node_ids[2], - websocket=log_in_and_out, - timeout=( - _POST_PRO_AUTOSCALED_MAX_STARTUP_TIME - if autoscaled - else _POST_PRO_MAX_STARTUP_TIME - ), - press_start_button=False, - ) as service_running: - app_mode_trigger_next_app(page) - s4l_postpro_iframe = service_running.iframe_locator - assert s4l_postpro_iframe - - with log_context(logging.INFO, "Post process"): - # click on the postpro mode button - s4l_postpro_iframe.get_by_test_id("mode-button-postro").click() - # click on the surface viewer - s4l_postpro_iframe.get_by_test_id("tree-item-ti_field.cache").click() - s4l_postpro_iframe.get_by_test_id("tree-item-SurfaceViewer").nth(0).click() + else: + expected_outputs = ["output_1.zip", "TIP_report.pdf", "results.csv"] + text_on_output_button = f"Outputs ({len(expected_outputs)})" + page.get_by_test_id("outputsBtn").get_by_text( + text_on_output_button + ).click() + + if is_product_lite: + assert expected_number_of_steps == 2 + else: + with log_context( + logging.INFO, "Exposure Analysis step (3/%s)", expected_number_of_steps + ): + with expected_service_running( + page=page, + node_id=node_ids[2], + websocket=log_in_and_out, + timeout=( + _POST_PRO_AUTOSCALED_MAX_STARTUP_TIME + if is_autoscaled + else _POST_PRO_MAX_STARTUP_TIME + ), + press_start_button=False, + ) as service_running: + app_mode_trigger_next_app(page) + s4l_postpro_iframe = service_running.iframe_locator + assert s4l_postpro_iframe + + with log_context(logging.INFO, "Post process"): + # click on the postpro mode button + s4l_postpro_iframe.get_by_test_id("mode-button-postro").click() + # click on the surface viewer + s4l_postpro_iframe.get_by_test_id("tree-item-ti_field.cache").click() + s4l_postpro_iframe.get_by_test_id("tree-item-SurfaceViewer").nth( + 0 + ).click() From 83c79c6d91bf56726b98fa546bf0bcf4ffab179a Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Tue, 24 Sep 2024 18:16:17 +0200 Subject: [PATCH 024/104] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20App=20mode:?= =?UTF-8?q?=20Scrollable=20Instructions=20(#6430)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source/class/osparc/CookiePolicy.js | 2 +- .../class/osparc/auth/ui/RequestAccount.js | 11 +++- .../osparc/node/slideshow/BaseNodeView.js | 12 ++-- .../osparc/product/quickStart/tis/Slides.js | 2 +- .../class/osparc/ui/markdown/Markdown.js | 59 +++++++++++++++---- 5 files changed, 65 insertions(+), 21 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/CookiePolicy.js b/services/static-webserver/client/source/class/osparc/CookiePolicy.js index 3d3b6d7d21c..7a0327c09f6 100644 --- a/services/static-webserver/client/source/class/osparc/CookiePolicy.js +++ b/services/static-webserver/client/source/class/osparc/CookiePolicy.js @@ -66,7 +66,7 @@ qx.Class.define("osparc.CookiePolicy", { return link; }, - getZMTEULALink: function(linkText = "end users license agreement (EULA)") { + getZMTEULALink: function(linkText = "end-users license agreement (EULA)") { const color = qx.theme.manager.Color.getInstance().resolve("text"); const link = `${linkText}`; return link; diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js b/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js index 22035158f76..e47c78d4e53 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js @@ -129,7 +129,8 @@ qx.Class.define("osparc.auth.ui.RequestAccount", { rich: true }); country.add(cItem); - }) + }); + // preselect fetch("https://ipapi.co/json") .then(res => res.json()) .then(data => { @@ -137,6 +138,12 @@ qx.Class.define("osparc.auth.ui.RequestAccount", { if (countryFound) { country.setSelection([countryFound]) } + }) + .catch(err => { + console.error(err); + const emptyItem = new qx.ui.form.ListItem("", null, ""); + country.add(emptyItem); + country.setSelection([emptyItem]); }); this._form.add(country, this.tr("Country"), null, "country"); @@ -321,7 +328,7 @@ qx.Class.define("osparc.auth.ui.RequestAccount", { // Eula link if (osparc.product.Utils.getProductName() !== "osparc") { - const eulaLink = osparc.CookiePolicy.getZMTEULALink("end users license agreement (EULA)"); + const eulaLink = osparc.CookiePolicy.getZMTEULALink("end-users license agreement (EULA)"); const eulaText = "I accept the " + eulaLink + " and I will use the product in accordance with it"; const eula = new qx.ui.form.CheckBox().set({ required: true, diff --git a/services/static-webserver/client/source/class/osparc/node/slideshow/BaseNodeView.js b/services/static-webserver/client/source/class/osparc/node/slideshow/BaseNodeView.js index 72006c173fe..84acc271802 100644 --- a/services/static-webserver/client/source/class/osparc/node/slideshow/BaseNodeView.js +++ b/services/static-webserver/client/source/class/osparc/node/slideshow/BaseNodeView.js @@ -235,20 +235,20 @@ qx.Class.define("osparc.node.slideshow.BaseNodeView", { } const desc = this.getNode().getSlideshowInstructions(); if (desc) { - const descView = new osparc.ui.markdown.Markdown().set({ + const markdownInstructions = new osparc.ui.markdown.Markdown().set({ value: desc, padding: 3, font: "text-14" }); - const scrollContainer = new qx.ui.container.Scroll(); - scrollContainer.add(descView); const title = this.tr("Instructions") + " - " + this.getNode().getLabel(); - const width = 500; - const height = 500; - const win = this.__instructionsWindow = osparc.ui.window.Window.popUpInWindow(scrollContainer, title, width, height).set({ + const width = 600; + const minHeight = 200; + const win = this.__instructionsWindow = osparc.ui.window.Window.popUpInWindow(markdownInstructions, title, width, minHeight).set({ modal: false, clickAwayClose: false }); + markdownInstructions.addListener("resized", () => win.center()); + win.getContentElement().setStyles({ "border-color": qx.theme.manager.Color.getInstance().resolve("strong-main") }); diff --git a/services/static-webserver/client/source/class/osparc/product/quickStart/tis/Slides.js b/services/static-webserver/client/source/class/osparc/product/quickStart/tis/Slides.js index ece202ab13b..4324c5afaad 100644 --- a/services/static-webserver/client/source/class/osparc/product/quickStart/tis/Slides.js +++ b/services/static-webserver/client/source/class/osparc/product/quickStart/tis/Slides.js @@ -26,7 +26,7 @@ qx.Class.define("osparc.product.quickStart.tis.Slides", { footerLinks: function() { const footerLinks = []; - const videoText = osparc.utils.Utils.createHTMLLink("TIP video", "https://youtu.be/-ZE6yOJ3ipw"); + const videoText = osparc.utils.Utils.createHTMLLink("TIP videos", "https://www.youtube.com/playlist?list=PLcJQYcVCSqDu5gXnJj-_vS_spGhZOe-jF"); const videoLabel = new qx.ui.basic.Label(videoText).set({ textAlign: "center", rich : true diff --git a/services/static-webserver/client/source/class/osparc/ui/markdown/Markdown.js b/services/static-webserver/client/source/class/osparc/ui/markdown/Markdown.js index 69faf56e827..70fffb4c3ac 100644 --- a/services/static-webserver/client/source/class/osparc/ui/markdown/Markdown.js +++ b/services/static-webserver/client/source/class/osparc/ui/markdown/Markdown.js @@ -71,6 +71,10 @@ qx.Class.define("osparc.ui.markdown.Markdown", { } }, + events: { + "resized": "qx.event.type.Event", + }, + members: { __loadMarked: null, /** @@ -138,7 +142,26 @@ qx.Class.define("osparc.ui.markdown.Markdown", { } else { this.setMinHeight(elemHeight); } + + const elemMaxWidth = this.__getChildrenElementMaxWidth(domElement.children); + if (this.getMaxWidth() && elemMaxWidth > this.getMaxWidth()) { + this.setWidth(elemMaxWidth); + } else { + this.setMinWidth(elemMaxWidth); + } } + this.fireEvent("resized"); + }, + + __getDomElement: function() { + if (!this.getContentElement || this.getContentElement() === null) { + return null; + } + const domElement = this.getContentElement().getDomElement(); + if (domElement) { + return domElement; + } + return null; }, __getChildrenElementHeight: function(children) { @@ -155,23 +178,37 @@ qx.Class.define("osparc.ui.markdown.Markdown", { if (this.getNoMargin()) { element.style.marginTop = 0; element.style.marginBottom = 0; - const size = qx.bom.element.Dimension.getSize(element); + const size = this.__getElementSize(element); return size.height; } - const size = qx.bom.element.Dimension.getSize(element); + const size = this.__getElementSize(element); // add padding - return size.height + 15; + return size.height + 20; }, - __getDomElement: function() { - if (!this.getContentElement || this.getContentElement() === null) { - return null; + __getChildrenElementMaxWidth: function(children) { + let maxWidth = 0; + for (let i=0; i < children.length; i++) { + maxWidth = Math.max(this.__getElementWidth(children[i]), maxWidth); } - const domElement = this.getContentElement().getDomElement(); - if (domElement) { - return domElement; + return maxWidth; + }, + + __getElementWidth: function(element) { + const size = this.__getElementSize(element); + return size.width; + }, + + __getElementSize: function(element) { + if ( + element && + element.children && + element.children.length && + element.children[0].localName === "img" + ) { + return qx.bom.element.Dimension.getSize(element.children[0]); } - return null; - } + return qx.bom.element.Dimension.getSize(element); + }, } }); From a712accc3f5c0591713a0e550b858a8b9ff8c1b0 Mon Sep 17 00:00:00 2001 From: Andrei Neagu <5694077+GitHK@users.noreply.github.com> Date: Wed, 25 Sep 2024 07:56:23 +0200 Subject: [PATCH 025/104] =?UTF-8?q?=F0=9F=8E=A8=20publish=20port=20events?= =?UTF-8?q?=20to=20frontend=20(#6396)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrei Neagu --- .../api_schemas_dynamic_sidecar/ports.py | 35 ++ .../api_schemas_dynamic_sidecar/socketio.py | 2 + .../simcore_sdk/node_ports_v2/nodeports_v2.py | 65 ++- .../test_node_ports_v2_nodeports2.py | 59 ++- .../unit/test_node_ports_v2_nodeports_v2.py | 2 + ...ixed_dynamic_sidecar_and_legacy_project.py | 1 + .../api/containers_long_running_tasks.py | 2 + .../core/application.py | 2 + .../modules/long_running_tasks.py | 9 + .../modules/nodeports.py | 105 ++++- .../modules/notifications/__init__.py | 9 + .../notifications/_notifications_ports.py | 78 ++++ .../_notifications_system_monitor.py | 17 + .../_notifier.py | 57 ++- .../modules/notifications/_setup.py | 15 + .../_socketio.py | 0 .../modules/outputs/_manager.py | 10 + .../modules/system_monitor/_disk_usage.py | 2 +- .../modules/system_monitor/_setup.py | 4 - .../dynamic-sidecar/tests/unit/conftest.py | 15 + .../tests/unit/test_modules_notifier.py | 400 ++++++++++++++++++ .../unit/test_modules_outputs_event_filter.py | 10 +- .../test_modules_outputs_event_handler.py | 10 +- .../unit/test_modules_outputs_manager.py | 6 +- .../unit/test_modules_outputs_watcher.py | 6 +- .../test_modules_system_monitor__notifier.py | 204 --------- 26 files changed, 866 insertions(+), 259 deletions(-) create mode 100644 packages/models-library/src/models_library/api_schemas_dynamic_sidecar/ports.py create mode 100644 services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/__init__.py create mode 100644 services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifications_ports.py create mode 100644 services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifications_system_monitor.py rename services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/{system_monitor => notifications}/_notifier.py (52%) create mode 100644 services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_setup.py rename services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/{system_monitor => notifications}/_socketio.py (100%) create mode 100644 services/dynamic-sidecar/tests/unit/test_modules_notifier.py delete mode 100644 services/dynamic-sidecar/tests/unit/test_modules_system_monitor__notifier.py diff --git a/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/ports.py b/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/ports.py new file mode 100644 index 00000000000..5863b53b2bc --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/ports.py @@ -0,0 +1,35 @@ +from enum import auto + +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID +from models_library.services_types import ServicePortKey +from models_library.utils.enums import StrAutoEnum +from pydantic import BaseModel + + +class OutputStatus(StrAutoEnum): + UPLOAD_STARTED = auto() + UPLOAD_WAS_ABORTED = auto() + UPLOAD_FINISHED_SUCCESSFULLY = auto() + UPLOAD_FINISHED_WITH_ERRROR = auto() + + +class InputStatus(StrAutoEnum): + DOWNLOAD_STARTED = auto() + DOWNLOAD_WAS_ABORTED = auto() + DOWNLOAD_FINISHED_SUCCESSFULLY = auto() + DOWNLOAD_FINISHED_WITH_ERRROR = auto() + + +class _PortStatusCommon(BaseModel): + project_id: ProjectID + node_id: NodeID + port_key: ServicePortKey + + +class OutputPortStatus(_PortStatusCommon): + status: OutputStatus + + +class InputPortSatus(_PortStatusCommon): + status: InputStatus diff --git a/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/socketio.py b/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/socketio.py index 054b0834bc4..93e34a1682b 100644 --- a/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/socketio.py +++ b/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/socketio.py @@ -1,3 +1,5 @@ from typing import Final SOCKET_IO_SERVICE_DISK_USAGE_EVENT: Final[str] = "serviceDiskUsage" +SOCKET_IO_STATE_OUTPUT_PORTS_EVENT: Final[str] = "stateOutputPorts" +SOCKET_IO_STATE_INPUT_PORTS_EVENT: Final[str] = "stateInputPorts" diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py index 8c78e28a066..9da016b4cea 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py @@ -1,4 +1,6 @@ import logging +from abc import ABC, abstractmethod +from asyncio import CancelledError from collections.abc import Callable, Coroutine from pathlib import Path from typing import Any @@ -27,6 +29,20 @@ log = logging.getLogger(__name__) +class OutputsCallbacks(ABC): + @abstractmethod + async def aborted(self, key: ServicePortKey) -> None: + pass + + @abstractmethod + async def finished_succesfully(self, key: ServicePortKey) -> None: + pass + + @abstractmethod + async def finished_with_error(self, key: ServicePortKey) -> None: + pass + + class Nodeports(BaseModel): """ Represents a node in a project and all its input/output ports @@ -148,6 +164,7 @@ async def set_multiple( ], *, progress_bar: ProgressBarData, + outputs_callbacks: OutputsCallbacks | None, ) -> None: """ Sets the provided values to the respective input or output ports @@ -156,26 +173,44 @@ async def set_multiple( raises ValidationError """ + + async def _set_with_notifications( + port_key: ServicePortKey, + value: ItemConcreteValue | None, + set_kwargs: SetKWargs | None, + sub_progress: ProgressBarData, + ) -> None: + try: + # pylint: disable=protected-access + await self.internal_outputs[port_key]._set( # noqa: SLF001 + value, set_kwargs=set_kwargs, progress_bar=sub_progress + ) + if outputs_callbacks: + await outputs_callbacks.finished_succesfully(port_key) + except UnboundPortError: + # not available try inputs + # if this fails it will raise another exception + # pylint: disable=protected-access + await self.internal_inputs[port_key]._set( # noqa: SLF001 + value, set_kwargs=set_kwargs, progress_bar=sub_progress + ) + except CancelledError: + if outputs_callbacks: + await outputs_callbacks.aborted(port_key) + raise + except Exception: + if outputs_callbacks: + await outputs_callbacks.finished_with_error(port_key) + raise + tasks = [] async with progress_bar.sub_progress( steps=len(port_values.items()), description=IDStr("set multiple") ) as sub_progress: for port_key, (value, set_kwargs) in port_values.items(): - # pylint: disable=protected-access - try: - tasks.append( - self.internal_outputs[port_key]._set( - value, set_kwargs=set_kwargs, progress_bar=sub_progress - ) - ) - except UnboundPortError: - # not available try inputs - # if this fails it will raise another exception - tasks.append( - self.internal_inputs[port_key]._set( - value, set_kwargs=set_kwargs, progress_bar=sub_progress - ) - ) + tasks.append( + _set_with_notifications(port_key, value, set_kwargs, sub_progress) + ) results = await logged_gather(*tasks) await self.save_to_db_cb(self) diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py index a9016609d13..2da7011e9b0 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py @@ -13,6 +13,7 @@ from collections.abc import Awaitable, Callable, Iterable from pathlib import Path from typing import Any +from unittest.mock import AsyncMock from uuid import uuid4 import np_helpers @@ -28,13 +29,14 @@ SimcoreS3FileID, ) from models_library.services_types import ServicePortKey +from pytest_mock import MockerFixture from servicelib.progress_bar import ProgressBarData from settings_library.r_clone import RCloneSettings from simcore_sdk import node_ports_v2 from simcore_sdk.node_ports_common.exceptions import UnboundPortError from simcore_sdk.node_ports_v2 import exceptions from simcore_sdk.node_ports_v2.links import ItemConcreteValue, PortLink -from simcore_sdk.node_ports_v2.nodeports_v2 import Nodeports +from simcore_sdk.node_ports_v2.nodeports_v2 import Nodeports, OutputsCallbacks from simcore_sdk.node_ports_v2.port import Port from utils_port_v2 import CONSTANT_UUID @@ -749,6 +751,34 @@ async def _upload_create_task(item_key: str) -> None: ) +class _Callbacks(OutputsCallbacks): + async def aborted(self, key: ServicePortKey) -> None: + pass + + async def finished_succesfully(self, key: ServicePortKey) -> None: + pass + + async def finished_with_error(self, key: ServicePortKey) -> None: + pass + + +@pytest.fixture +async def output_callbacks() -> _Callbacks: + return _Callbacks() + + +@pytest.fixture +async def spy_outputs_callbaks( + mocker: MockerFixture, output_callbacks: _Callbacks +) -> dict[str, AsyncMock]: + return { + "aborted": mocker.spy(output_callbacks, "aborted"), + "finished_succesfully": mocker.spy(output_callbacks, "finished_succesfully"), + "finished_with_error": mocker.spy(output_callbacks, "finished_with_error"), + } + + +@pytest.mark.parametrize("use_output_callbacks", [True, False]) async def test_batch_update_inputs_outputs( user_id: int, project_id: str, @@ -757,7 +787,12 @@ async def test_batch_update_inputs_outputs( port_count: int, option_r_clone_settings: RCloneSettings | None, faker: Faker, + output_callbacks: _Callbacks, + spy_outputs_callbaks: dict[str, AsyncMock], + use_output_callbacks: bool, ) -> None: + callbacks = output_callbacks if use_output_callbacks else None + outputs = [(f"value_out_{i}", "integer", None) for i in range(port_count)] inputs = [(f"value_in_{i}", "integer", None) for i in range(port_count)] config_dict, _, _ = create_special_configuration(inputs=inputs, outputs=outputs) @@ -771,12 +806,14 @@ async def test_batch_update_inputs_outputs( await check_config_valid(PORTS, config_dict) async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: + port_values = (await PORTS.outputs).values() await PORTS.set_multiple( - { - ServicePortKey(port.key): (k, None) - for k, port in enumerate((await PORTS.outputs).values()) - }, + {ServicePortKey(port.key): (k, None) for k, port in enumerate(port_values)}, progress_bar=progress_bar, + outputs_callbacks=callbacks, + ) + assert len(spy_outputs_callbaks["finished_succesfully"].call_args_list) == ( + len(port_values) if use_output_callbacks else 0 ) # pylint: disable=protected-access assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 @@ -786,6 +823,11 @@ async def test_batch_update_inputs_outputs( for k, port in enumerate((await PORTS.inputs).values(), start=1000) }, progress_bar=progress_bar, + outputs_callbacks=callbacks, + ) + # inputs do not trigger callbacks + assert len(spy_outputs_callbaks["finished_succesfully"].call_args_list) == ( + len(port_values) if use_output_callbacks else 0 ) assert progress_bar._current_steps == pytest.approx(2) # noqa: SLF001 @@ -807,4 +849,11 @@ async def test_batch_update_inputs_outputs( await PORTS.set_multiple( {ServicePortKey("missing_key_in_both"): (123132, None)}, progress_bar=progress_bar, + outputs_callbacks=callbacks, ) + + assert len(spy_outputs_callbaks["finished_succesfully"].call_args_list) == ( + len(port_values) if use_output_callbacks else 0 + ) + assert len(spy_outputs_callbaks["aborted"].call_args_list) == 0 + assert len(spy_outputs_callbaks["finished_with_error"].call_args_list) == 0 diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py index 91609476b9c..f8d09836213 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py @@ -5,6 +5,7 @@ from pathlib import Path from typing import Any, Callable +from unittest.mock import AsyncMock import pytest from faker import Faker @@ -138,6 +139,7 @@ async def mock_node_port_creator_cb(*args, **kwargs): + list(original_outputs.values()) }, progress_bar=progress_bar, + outputs_callbacks=AsyncMock(), ) assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 diff --git a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py index 646cb788ad7..d590985680d 100644 --- a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py +++ b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py @@ -229,6 +229,7 @@ async def _mocked_context_manger(*args, **kwargs) -> AsyncIterator[None]: ) +@pytest.mark.flaky(max_runs=3) async def test_legacy_and_dynamic_sidecar_run( initialized_app: FastAPI, wait_for_catalog_service: Callable[[UserID, str], Awaitable[None]], diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers_long_running_tasks.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers_long_running_tasks.py index ae04a620c8a..52b0e2e7ad6 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers_long_running_tasks.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers_long_running_tasks.py @@ -209,6 +209,7 @@ async def ports_inputs_pull_task( request: Request, tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], app: Annotated[FastAPI, Depends(get_application)], + settings: Annotated[ApplicationSettings, Depends(get_settings)], mounted_volumes: Annotated[MountedVolumes, Depends(get_mounted_volumes)], inputs_state: Annotated[InputsState, Depends(get_inputs_state)], port_keys: list[str] | None = None, @@ -223,6 +224,7 @@ async def ports_inputs_pull_task( port_keys=port_keys, mounted_volumes=mounted_volumes, app=app, + settings=settings, inputs_pulling_enabled=inputs_state.inputs_pulling_enabled, ) except TaskAlreadyRunningError as e: diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py index f5910ffbffe..20029cac7fc 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py @@ -19,6 +19,7 @@ from ..modules.attribute_monitor import setup_attribute_monitor from ..modules.inputs import setup_inputs from ..modules.mounted_fs import MountedVolumes, setup_mounted_fs +from ..modules.notifications import setup_notifications from ..modules.outputs import setup_outputs from ..modules.prometheus_metrics import setup_prometheus_metrics from ..modules.resource_tracking import setup_resource_tracking @@ -172,6 +173,7 @@ def create_app(): setup_rabbitmq(app) setup_background_log_fetcher(app) setup_resource_tracking(app) + setup_notifications(app) setup_system_monitor(app) setup_mounted_fs(app) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py index a8277415b06..0134d481f78 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py @@ -52,6 +52,7 @@ from ..models.shared_store import SharedStore from ..modules import nodeports, user_services_preferences from ..modules.mounted_fs import MountedVolumes +from ..modules.notifications._notifications_ports import PortNotifier from ..modules.outputs import OutputsManager, event_propagation_disabled from .long_running_tasksutils import run_before_shutdown_actions from .resource_tracking import send_service_started, send_service_stopped @@ -472,6 +473,7 @@ async def task_ports_inputs_pull( port_keys: list[str] | None, mounted_volumes: MountedVolumes, app: FastAPI, + settings: ApplicationSettings, *, inputs_pulling_enabled: bool, ) -> int: @@ -505,6 +507,12 @@ async def task_ports_inputs_pull( post_sidecar_log_message, app, log_level=logging.INFO ), progress_bar=root_progress, + port_notifier=PortNotifier( + app, + settings.DY_SIDECAR_USER_ID, + settings.DY_SIDECAR_PROJECT_ID, + settings.DY_SIDECAR_NODE_ID, + ), ) await post_sidecar_log_message( app, "Finished pulling inputs", log_level=logging.INFO @@ -541,6 +549,7 @@ async def task_ports_outputs_pull( post_sidecar_log_message, app, log_level=logging.INFO ), progress_bar=root_progress, + port_notifier=None, ) await post_sidecar_log_message( app, "Finished pulling outputs", log_level=logging.INFO diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py index 2213dd1d4ac..0ad00f2c18d 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py @@ -4,6 +4,7 @@ import shutil import sys import time +from asyncio import CancelledError from collections import deque from collections.abc import Coroutine from contextlib import AsyncExitStack @@ -24,16 +25,17 @@ from servicelib.file_utils import remove_directory from servicelib.logging_utils import log_context from servicelib.progress_bar import ProgressBarData -from servicelib.utils import logged_gather +from servicelib.utils import limited_gather from simcore_sdk import node_ports_v2 from simcore_sdk.node_ports_common.file_io_utils import LogRedirectCB from simcore_sdk.node_ports_v2 import Port from simcore_sdk.node_ports_v2.links import ItemConcreteValue -from simcore_sdk.node_ports_v2.nodeports_v2 import Nodeports +from simcore_sdk.node_ports_v2.nodeports_v2 import Nodeports, OutputsCallbacks from simcore_sdk.node_ports_v2.port import SetKWargs from simcore_sdk.node_ports_v2.port_utils import is_file_type from ..core.settings import ApplicationSettings, get_settings +from ..modules.notifications import PortNotifier class PortTypeName(str, Enum): @@ -70,13 +72,27 @@ def _get_size_of_value(value: tuple[ItemConcreteValue | None, SetKWargs | None]) ) -# NOTE: outputs_manager guarantees that no parallel calls -# to this function occur -async def upload_outputs( +class OutputCallbacksWrapper(OutputsCallbacks): + def __init__(self, port_notifier: PortNotifier) -> None: + self.port_notifier = port_notifier + + async def aborted(self, key: ServicePortKey) -> None: + await self.port_notifier.send_output_port_upload_was_aborted(key) + + async def finished_succesfully(self, key: ServicePortKey) -> None: + await self.port_notifier.send_output_port_upload_finished_successfully(key) + + async def finished_with_error(self, key: ServicePortKey) -> None: + await self.port_notifier.send_output_port_upload_finished_with_error(key) + + +# NOTE: outputs_manager guarantees that no parallel calls to this function occur +async def upload_outputs( # pylint:disable=too-many-statements # noqa: PLR0915, C901 outputs_path: Path, port_keys: list[str], io_log_redirect_cb: LogRedirectCB | None, progress_bar: ProgressBarData, + port_notifier: PortNotifier, ) -> None: # pylint: disable=too-many-branches logger.debug("uploading data to simcore...") @@ -97,12 +113,17 @@ async def upload_outputs( ServicePortKey, tuple[ItemConcreteValue | None, SetKWargs | None] ] = {} archiving_tasks: deque[Coroutine[None, None, None]] = deque() - ports_to_set = [ + ports_to_set: list[Port] = [ port_value for port_value in (await PORTS.outputs).values() if (not port_keys) or (port_value.key in port_keys) ] + await limited_gather( + *(port_notifier.send_output_port_upload_sarted(p.key) for p in ports_to_set), + limit=4, + ) + async with AsyncExitStack() as stack: sub_progress = await stack.enter_async_context( progress_bar.sub_progress( @@ -147,13 +168,34 @@ async def upload_outputs( # when having multiple directories it is important to # run the compression in parallel to guarantee better performance + async def _archive_dir_notified( + dir_to_compress: Path, destination: Path, port_key: ServicePortKey + ) -> None: + # Errors and cancellation can also be triggered from archving as well + try: + await archive_dir( + dir_to_compress=dir_to_compress, + destination=destination, + compress=False, + store_relative_path=True, + progress_bar=sub_progress, + ) + except CancelledError: + await port_notifier.send_output_port_upload_was_aborted( + port_key + ) + raise + except Exception: + await port_notifier.send_output_port_upload_finished_with_error( + port_key + ) + raise + archiving_tasks.append( - archive_dir( + _archive_dir_notified( dir_to_compress=src_folder, destination=tmp_file, - compress=False, - store_relative_path=True, - progress_bar=sub_progress, + port_key=port.key, ) ) ports_values[port.key] = ( @@ -176,9 +218,13 @@ async def upload_outputs( logger.debug("No file %s to fetch port values from", data_file) if archiving_tasks: - await logged_gather(*archiving_tasks) + await limited_gather(*archiving_tasks, limit=4) - await PORTS.set_multiple(ports_values, progress_bar=sub_progress) + await PORTS.set_multiple( + ports_values, + progress_bar=sub_progress, + outputs_callbacks=OutputCallbacksWrapper(port_notifier), + ) elapsed_time = time.perf_counter() - start_time total_bytes = sum(_get_size_of_value(x) for x in ports_values.values()) @@ -264,6 +310,7 @@ async def download_target_ports( port_keys: list[str], io_log_redirect_cb: LogRedirectCB, progress_bar: ProgressBarData, + port_notifier: PortNotifier | None, ) -> ByteSize: logger.debug("retrieving data from simcore...") start_time = time.perf_counter() @@ -279,22 +326,46 @@ async def download_target_ports( ) # let's gather all the data - ports_to_get = [ + ports_to_get: list[Port] = [ port_value for port_value in (await getattr(PORTS, port_type_name.value)).values() if (not port_keys) or (port_value.key in port_keys) ] + + async def _get_date_from_port_notified( + port: Port, progress_bar: ProgressBarData + ) -> tuple[Port, ItemConcreteValue | None, ByteSize]: + assert port_notifier is not None + await port_notifier.send_input_port_download_started(port.key) + try: + result = await _get_data_from_port( + port, target_dir=target_dir, progress_bar=progress_bar + ) + await port_notifier.send_input_port_download_finished_succesfully(port.key) + return result + + except CancelledError: + await port_notifier.send_input_port_download_was_aborted(port.key) + raise + except Exception: + await port_notifier.send_input_port_download_finished_with_error(port.key) + raise + async with progress_bar.sub_progress( steps=len(ports_to_get), description=IDStr("downloading") ) as sub_progress: - results = await logged_gather( + results = await limited_gather( *[ - _get_data_from_port( - port, target_dir=target_dir, progress_bar=sub_progress + ( + _get_data_from_port( + port, target_dir=target_dir, progress_bar=sub_progress + ) + if port_type_name == PortTypeName.OUTPUTS + else _get_date_from_port_notified(port, progress_bar=sub_progress) ) for port in ports_to_get ], - max_concurrency=2, + limit=2, ) # parse results data = { diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/__init__.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/__init__.py new file mode 100644 index 00000000000..18254b1d23c --- /dev/null +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/__init__.py @@ -0,0 +1,9 @@ +from ._notifications_ports import PortNotifier +from ._notifications_system_monitor import publish_disk_usage +from ._setup import setup_notifications + +__all__: tuple[str, ...] = ( + "PortNotifier", + "publish_disk_usage", + "setup_notifications", +) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifications_ports.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifications_ports.py new file mode 100644 index 00000000000..ae48f19a973 --- /dev/null +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifications_ports.py @@ -0,0 +1,78 @@ +from dataclasses import dataclass + +from fastapi import FastAPI +from models_library.api_schemas_dynamic_sidecar.ports import InputStatus, OutputStatus +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID +from models_library.services_types import ServicePortKey +from models_library.users import UserID + +from ._notifier import Notifier + + +@dataclass +class PortNotifier: + app: FastAPI + user_id: UserID + project_id: ProjectID + node_id: NodeID + + async def _send_output_port_status( + self, port_key: ServicePortKey, status: OutputStatus + ) -> None: + notifier: Notifier = Notifier.get_from_app_state(self.app) + await notifier.notify_output_port_status( + self.user_id, self.project_id, self.node_id, port_key, status + ) + + async def _send_input_port_status( + self, port_key: ServicePortKey, status: InputStatus + ) -> None: + notifier: Notifier = Notifier.get_from_app_state(self.app) + await notifier.notify_input_port_status( + self.user_id, self.project_id, self.node_id, port_key, status + ) + + async def send_output_port_upload_sarted(self, port_key: ServicePortKey) -> None: + await self._send_output_port_status(port_key, OutputStatus.UPLOAD_STARTED) + + async def send_output_port_upload_was_aborted( + self, port_key: ServicePortKey + ) -> None: + await self._send_output_port_status(port_key, OutputStatus.UPLOAD_WAS_ABORTED) + + async def send_output_port_upload_finished_successfully( + self, port_key: ServicePortKey + ) -> None: + await self._send_output_port_status( + port_key, OutputStatus.UPLOAD_FINISHED_SUCCESSFULLY + ) + + async def send_output_port_upload_finished_with_error( + self, port_key: ServicePortKey + ) -> None: + await self._send_output_port_status( + port_key, OutputStatus.UPLOAD_FINISHED_WITH_ERRROR + ) + + async def send_input_port_download_started(self, port_key: ServicePortKey) -> None: + await self._send_input_port_status(port_key, InputStatus.DOWNLOAD_STARTED) + + async def send_input_port_download_was_aborted( + self, port_key: ServicePortKey + ) -> None: + await self._send_input_port_status(port_key, InputStatus.DOWNLOAD_WAS_ABORTED) + + async def send_input_port_download_finished_succesfully( + self, port_key: ServicePortKey + ) -> None: + await self._send_input_port_status( + port_key, InputStatus.DOWNLOAD_FINISHED_SUCCESSFULLY + ) + + async def send_input_port_download_finished_with_error( + self, port_key: ServicePortKey + ) -> None: + await self._send_input_port_status( + port_key, InputStatus.DOWNLOAD_FINISHED_WITH_ERRROR + ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifications_system_monitor.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifications_system_monitor.py new file mode 100644 index 00000000000..840c47d729e --- /dev/null +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifications_system_monitor.py @@ -0,0 +1,17 @@ +from pathlib import Path + +from fastapi import FastAPI +from models_library.api_schemas_dynamic_sidecar.telemetry import DiskUsage +from models_library.projects_nodes_io import NodeID +from models_library.users import UserID + +from ._notifier import Notifier + + +async def publish_disk_usage( + app: FastAPI, *, user_id: UserID, node_id: NodeID, usage: dict[Path, DiskUsage] +) -> None: + notifier: Notifier = Notifier.get_from_app_state(app) + await notifier.notify_service_disk_usage( + user_id=user_id, node_id=node_id, usage=usage + ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_notifier.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifier.py similarity index 52% rename from services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_notifier.py rename to services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifier.py index 9f97a889bac..0d61e1b388b 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_notifier.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifier.py @@ -4,15 +4,25 @@ import socketio # type: ignore[import-untyped] from fastapi import FastAPI from fastapi.encoders import jsonable_encoder +from models_library.api_schemas_dynamic_sidecar.ports import ( + InputPortSatus, + InputStatus, + OutputPortStatus, + OutputStatus, +) from models_library.api_schemas_dynamic_sidecar.socketio import ( SOCKET_IO_SERVICE_DISK_USAGE_EVENT, + SOCKET_IO_STATE_INPUT_PORTS_EVENT, + SOCKET_IO_STATE_OUTPUT_PORTS_EVENT, ) from models_library.api_schemas_dynamic_sidecar.telemetry import ( DiskUsage, ServiceDiskUsage, ) from models_library.api_schemas_webserver.socketio import SocketIORoomStr +from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID +from models_library.services_types import ServicePortKey from models_library.users import UserID from servicelib.fastapi.app_state import SingletonInAppStateMixin @@ -32,14 +42,47 @@ async def notify_service_disk_usage( room=SocketIORoomStr.from_user_id(user_id), ) + async def notify_output_port_status( + self, + user_id: UserID, + project_id: ProjectID, + node_id: NodeID, + port_key: ServicePortKey, + output_status: OutputStatus, + ) -> None: + await self._sio_manager.emit( + SOCKET_IO_STATE_OUTPUT_PORTS_EVENT, + data=jsonable_encoder( + OutputPortStatus( + project_id=project_id, + node_id=node_id, + port_key=port_key, + status=output_status, + ) + ), + room=SocketIORoomStr.from_user_id(user_id), + ) -async def publish_disk_usage( - app: FastAPI, *, user_id: UserID, node_id: NodeID, usage: dict[Path, DiskUsage] -) -> None: - notifier: Notifier = Notifier.get_from_app_state(app) - await notifier.notify_service_disk_usage( - user_id=user_id, node_id=node_id, usage=usage - ) + async def notify_input_port_status( + self, + user_id: UserID, + project_id: ProjectID, + node_id: NodeID, + port_key: ServicePortKey, + input_status: InputStatus, + ) -> None: + await self._sio_manager.emit( + SOCKET_IO_STATE_INPUT_PORTS_EVENT, + data=jsonable_encoder( + InputPortSatus( + project_id=project_id, + node_id=node_id, + port_key=port_key, + status=input_status, + ) + ), + room=SocketIORoomStr.from_user_id(user_id), + ) def setup_notifier(app: FastAPI): diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_setup.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_setup.py new file mode 100644 index 00000000000..6de0fae307f --- /dev/null +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_setup.py @@ -0,0 +1,15 @@ +import logging + +from fastapi import FastAPI +from servicelib.logging_utils import log_context + +from ..notifications._notifier import setup_notifier +from ..notifications._socketio import setup_socketio + +_logger = logging.getLogger(__name__) + + +def setup_notifications(app: FastAPI) -> None: + with log_context(_logger, logging.INFO, "setup notifications"): + setup_socketio(app) + setup_notifier(app) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_socketio.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_socketio.py similarity index 100% rename from services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_socketio.py rename to services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_socketio.py diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py index 307f8b3d933..d4a8ac8d07a 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py @@ -18,6 +18,7 @@ from ...core.rabbitmq import post_log_message, post_progress_message from ...core.settings import ApplicationSettings +from ...modules.notifications._notifications_ports import PortNotifier from ..nodeports import upload_outputs from ._context import OutputsContext @@ -100,6 +101,7 @@ class OutputsManager: # pylint: disable=too-many-instance-attributes def __init__( self, outputs_context: OutputsContext, + port_notifier: PortNotifier, io_log_redirect_cb: LogRedirectCB | None, progress_cb: progress_bar.AsyncReportCB | None, *, @@ -108,6 +110,7 @@ def __init__( task_monitor_interval_s: PositiveFloat = 1.0, ): self.outputs_context = outputs_context + self.port_notifier = port_notifier self.io_log_redirect_cb = io_log_redirect_cb self.upload_upon_api_request = upload_upon_api_request self.task_cancellation_timeout_s = task_cancellation_timeout_s @@ -138,6 +141,7 @@ async def _upload_ports() -> None: port_keys=port_keys, io_log_redirect_cb=self.io_log_redirect_cb, progress_bar=root_progress, + port_notifier=self.port_notifier, ) task_name = f"outputs_manager_port_keys-{'_'.join(port_keys)}" @@ -271,6 +275,12 @@ async def on_startup() -> None: progress_cb=partial( post_progress_message, app, ProgressType.SERVICE_OUTPUTS_PUSHING ), + port_notifier=PortNotifier( + app, + settings.DY_SIDECAR_USER_ID, + settings.DY_SIDECAR_PROJECT_ID, + settings.DY_SIDECAR_NODE_ID, + ), ) await outputs_manager.start() diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_disk_usage.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_disk_usage.py index 1ecc04fdaea..90b06450e6f 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_disk_usage.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_disk_usage.py @@ -15,7 +15,7 @@ from ...core.settings import ApplicationSettings from ..mounted_fs import MountedVolumes -from ._notifier import publish_disk_usage +from ..notifications import publish_disk_usage _logger = logging.getLogger(__name__) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_setup.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_setup.py index e460f7a9ee3..aa0d36a72b9 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_setup.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_setup.py @@ -5,8 +5,6 @@ from ...core.settings import SystemMonitorSettings from ._disk_usage import setup_disk_usage -from ._notifier import setup_notifier -from ._socketio import setup_socketio _logger = logging.getLogger(__name__) @@ -19,6 +17,4 @@ def setup_system_monitor(app: FastAPI) -> None: _logger.warning("system monitor disabled") return - setup_socketio(app) # required by notifier - setup_notifier(app) setup_disk_usage(app) diff --git a/services/dynamic-sidecar/tests/unit/conftest.py b/services/dynamic-sidecar/tests/unit/conftest.py index b6e590f71eb..ee2c106bb69 100644 --- a/services/dynamic-sidecar/tests/unit/conftest.py +++ b/services/dynamic-sidecar/tests/unit/conftest.py @@ -17,6 +17,10 @@ docker_compose_down, ) from simcore_service_dynamic_sidecar.core.docker_utils import docker_client +from simcore_service_dynamic_sidecar.core.settings import ApplicationSettings +from simcore_service_dynamic_sidecar.modules.notifications._notifications_ports import ( + PortNotifier, +) from tenacity import retry from tenacity.after import after_log from tenacity.stop import stop_after_delay @@ -142,3 +146,14 @@ def mock_rabbitmq_envs( }, ) return mock_environment + + +@pytest.fixture +def port_notifier(app: FastAPI) -> PortNotifier: + settings: ApplicationSettings = app.state.settings + return PortNotifier( + app, + settings.DY_SIDECAR_USER_ID, + settings.DY_SIDECAR_PROJECT_ID, + settings.DY_SIDECAR_NODE_ID, + ) diff --git a/services/dynamic-sidecar/tests/unit/test_modules_notifier.py b/services/dynamic-sidecar/tests/unit/test_modules_notifier.py new file mode 100644 index 00000000000..654d2bb1619 --- /dev/null +++ b/services/dynamic-sidecar/tests/unit/test_modules_notifier.py @@ -0,0 +1,400 @@ +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + +from collections.abc import AsyncIterable, Callable +from contextlib import AsyncExitStack, _AsyncGeneratorContextManager +from pathlib import Path +from typing import Final +from unittest.mock import AsyncMock + +import pytest +import socketio +from asgi_lifespan import LifespanManager +from fastapi import FastAPI +from fastapi.encoders import jsonable_encoder +from models_library.api_schemas_dynamic_sidecar.ports import ( + InputPortSatus, + InputStatus, + OutputPortStatus, + OutputStatus, +) +from models_library.api_schemas_dynamic_sidecar.socketio import ( + SOCKET_IO_SERVICE_DISK_USAGE_EVENT, + SOCKET_IO_STATE_INPUT_PORTS_EVENT, + SOCKET_IO_STATE_OUTPUT_PORTS_EVENT, +) +from models_library.api_schemas_dynamic_sidecar.telemetry import ( + DiskUsage, + ServiceDiskUsage, +) +from models_library.api_schemas_webserver.socketio import SocketIORoomStr +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID +from models_library.services_types import ServicePortKey +from models_library.users import UserID +from pydantic import ByteSize, NonNegativeInt, parse_obj_as +from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from servicelib.utils import logged_gather +from settings_library.rabbit import RabbitSettings +from simcore_service_dynamic_sidecar.core.application import create_app +from simcore_service_dynamic_sidecar.core.settings import ApplicationSettings +from simcore_service_dynamic_sidecar.modules.notifications import ( + PortNotifier, + publish_disk_usage, +) +from simcore_service_dynamic_sidecar.modules.system_monitor._disk_usage import ( + DiskUsageMonitor, +) +from socketio import AsyncServer +from tenacity import AsyncRetrying +from tenacity.stop import stop_after_delay +from tenacity.wait import wait_fixed + +pytest_simcore_core_services_selection = [ + "rabbit", +] + +_NUMBER_OF_CLIENTS: Final[NonNegativeInt] = 10 + + +@pytest.fixture +def mock_environment( + monkeypatch: pytest.MonkeyPatch, + rabbit_service: RabbitSettings, + mock_environment: EnvVarsDict, +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + "DY_SIDECAR_SYSTEM_MONITOR_TELEMETRY_ENABLE": "true", + "RABBIT_HOST": rabbit_service.RABBIT_HOST, + "RABBIT_PASSWORD": rabbit_service.RABBIT_PASSWORD.get_secret_value(), + "RABBIT_PORT": f"{rabbit_service.RABBIT_PORT}", + "RABBIT_SECURE": f"{rabbit_service.RABBIT_SECURE}", + "RABBIT_USER": rabbit_service.RABBIT_USER, + }, + ) + + +@pytest.fixture +async def app( + mock_environment: EnvVarsDict, + mock_registry_service: AsyncMock, + mock_storage_check: None, + mock_postgres_check: None, + mocker: MockerFixture, +) -> AsyncIterable[FastAPI]: + mocker.patch( + "simcore_service_dynamic_sidecar.modules.system_monitor._disk_usage._get_monitored_paths", + return_value=[], + ) + + app: FastAPI = create_app() + async with LifespanManager(app): + yield app + + +@pytest.fixture +async def disk_usage_monitor(app: FastAPI) -> DiskUsageMonitor: + return app.state.disk_usage_monitor + + +@pytest.fixture +async def socketio_server( + app: FastAPI, + socketio_server_factory: Callable[ + [RabbitSettings], _AsyncGeneratorContextManager[AsyncServer] + ], +) -> AsyncIterable[AsyncServer]: + # Same configuration as simcore_service_webserver/socketio/server.py + settings: ApplicationSettings = app.state.settings + assert settings.RABBIT_SETTINGS + + async with socketio_server_factory(settings.RABBIT_SETTINGS) as server: + yield server + + +@pytest.fixture +def room_name(user_id: UserID) -> SocketIORoomStr: + return SocketIORoomStr.from_user_id(user_id) + + +async def _assert_call_count(mock: AsyncMock, *, call_count: int) -> None: + async for attempt in AsyncRetrying( + wait=wait_fixed(0.1), stop=stop_after_delay(5), reraise=True + ): + with attempt: + assert mock.call_count == call_count + + +def _get_mocked_disk_usage(byte_size_str: str) -> DiskUsage: + return DiskUsage( + total=ByteSize(0), + used=ByteSize(0), + free=ByteSize.validate(byte_size_str), + used_percent=0, + ) + + +def _get_on_service_disk_usage_spy( + socketio_client: socketio.AsyncClient, +) -> AsyncMock: + # emulates front-end receiving message + + async def on_service_status(data): + assert parse_obj_as(ServiceDiskUsage, data) is not None + + on_event_spy = AsyncMock(wraps=on_service_status) + socketio_client.on(SOCKET_IO_SERVICE_DISK_USAGE_EVENT, on_event_spy) + + return on_event_spy + + +@pytest.mark.parametrize( + "usage", + [ + pytest.param({}, id="empty"), + pytest.param({Path("/"): _get_mocked_disk_usage("1kb")}, id="one_entry"), + pytest.param( + { + Path("/"): _get_mocked_disk_usage("1kb"), + Path("/tmp"): _get_mocked_disk_usage("2kb"), # noqa: S108 + }, + id="two_entries", + ), + ], +) +async def test_notifier_publish_disk_usage( + disk_usage_monitor: DiskUsageMonitor, + socketio_server_events: dict[str, AsyncMock], + app: FastAPI, + user_id: UserID, + usage: dict[Path, DiskUsage], + node_id: NodeID, + socketio_client_factory: Callable[ + [], _AsyncGeneratorContextManager[socketio.AsyncClient] + ], +): + # web server spy events + server_connect = socketio_server_events["connect"] + server_disconnect = socketio_server_events["disconnect"] + server_on_check = socketio_server_events["on_check"] + + async with AsyncExitStack() as socketio_frontend_clients: + frontend_clients: list[socketio.AsyncClient] = await logged_gather( + *[ + socketio_frontend_clients.enter_async_context(socketio_client_factory()) + for _ in range(_NUMBER_OF_CLIENTS) + ] + ) + await _assert_call_count(server_connect, call_count=_NUMBER_OF_CLIENTS) + + # client emits and check it was received + await logged_gather( + *[ + frontend_client.emit("check", data="an_event") + for frontend_client in frontend_clients + ] + ) + await _assert_call_count(server_on_check, call_count=_NUMBER_OF_CLIENTS) + + # attach spy to client + on_service_disk_usage_events: list[AsyncMock] = [ + _get_on_service_disk_usage_spy(c) for c in frontend_clients + ] + + # server publishes a message + await publish_disk_usage(app, user_id=user_id, node_id=node_id, usage=usage) + + # check that all clients received it + for on_service_disk_usage_event in on_service_disk_usage_events: + await _assert_call_count(on_service_disk_usage_event, call_count=1) + on_service_disk_usage_event.assert_awaited_once_with( + jsonable_encoder(ServiceDiskUsage(node_id=node_id, usage=usage)) + ) + + await _assert_call_count(server_disconnect, call_count=_NUMBER_OF_CLIENTS) + + +@pytest.fixture +def port_key() -> ServicePortKey: + return ServicePortKey("test_port") + + +def _get_on_input_port_spy( + socketio_client: socketio.AsyncClient, +) -> AsyncMock: + # emulates front-end receiving message + + async def on_service_status(data): + assert parse_obj_as(ServiceDiskUsage, data) is not None + + on_event_spy = AsyncMock(wraps=on_service_status) + socketio_client.on(SOCKET_IO_STATE_INPUT_PORTS_EVENT, on_event_spy) + + return on_event_spy + + +@pytest.mark.parametrize("input_status", InputStatus) +async def test_notifier_send_input_port_status( + socketio_server_events: dict[str, AsyncMock], + app: FastAPI, + user_id: UserID, + project_id: ProjectID, + node_id: NodeID, + port_key: ServicePortKey, + socketio_client_factory: Callable[ + [], _AsyncGeneratorContextManager[socketio.AsyncClient] + ], + input_status: InputStatus, +): + # web server spy events + server_connect = socketio_server_events["connect"] + server_disconnect = socketio_server_events["disconnect"] + server_on_check = socketio_server_events["on_check"] + + async with AsyncExitStack() as socketio_frontend_clients: + frontend_clients: list[socketio.AsyncClient] = await logged_gather( + *[ + socketio_frontend_clients.enter_async_context(socketio_client_factory()) + for _ in range(_NUMBER_OF_CLIENTS) + ] + ) + await _assert_call_count(server_connect, call_count=_NUMBER_OF_CLIENTS) + + # client emits and check it was received + await logged_gather( + *[ + frontend_client.emit("check", data="an_event") + for frontend_client in frontend_clients + ] + ) + await _assert_call_count(server_on_check, call_count=_NUMBER_OF_CLIENTS) + + # attach spy to client + on_input_port_events: list[AsyncMock] = [ + _get_on_input_port_spy(c) for c in frontend_clients + ] + + port_notifier = PortNotifier(app, user_id, project_id, node_id) + + # server publishes a message + match input_status: + case InputStatus.DOWNLOAD_STARTED: + await port_notifier.send_input_port_download_started(port_key) + case InputStatus.DOWNLOAD_WAS_ABORTED: + await port_notifier.send_input_port_download_was_aborted(port_key) + case InputStatus.DOWNLOAD_FINISHED_SUCCESSFULLY: + await port_notifier.send_input_port_download_finished_succesfully( + port_key + ) + case InputStatus.DOWNLOAD_FINISHED_WITH_ERRROR: + await port_notifier.send_input_port_download_finished_with_error( + port_key + ) + + # check that all clients received it + for on_input_port_event in on_input_port_events: + await _assert_call_count(on_input_port_event, call_count=1) + on_input_port_event.assert_awaited_once_with( + jsonable_encoder( + InputPortSatus( + project_id=project_id, + node_id=node_id, + port_key=port_key, + status=input_status, + ) + ) + ) + + await _assert_call_count(server_disconnect, call_count=_NUMBER_OF_CLIENTS) + + +def _get_on_output_port_spy( + socketio_client: socketio.AsyncClient, +) -> AsyncMock: + # emulates front-end receiving message + + async def on_service_status(data): + assert parse_obj_as(ServiceDiskUsage, data) is not None + + on_event_spy = AsyncMock(wraps=on_service_status) + socketio_client.on(SOCKET_IO_STATE_OUTPUT_PORTS_EVENT, on_event_spy) + + return on_event_spy + + +@pytest.mark.parametrize("output_status", OutputStatus) +async def test_notifier_send_output_port_status( + socketio_server_events: dict[str, AsyncMock], + app: FastAPI, + user_id: UserID, + project_id: ProjectID, + node_id: NodeID, + port_key: ServicePortKey, + socketio_client_factory: Callable[ + [], _AsyncGeneratorContextManager[socketio.AsyncClient] + ], + output_status: OutputStatus, +): + # web server spy events + server_connect = socketio_server_events["connect"] + server_disconnect = socketio_server_events["disconnect"] + server_on_check = socketio_server_events["on_check"] + + async with AsyncExitStack() as socketio_frontend_clients: + frontend_clients: list[socketio.AsyncClient] = await logged_gather( + *[ + socketio_frontend_clients.enter_async_context(socketio_client_factory()) + for _ in range(_NUMBER_OF_CLIENTS) + ] + ) + await _assert_call_count(server_connect, call_count=_NUMBER_OF_CLIENTS) + + # client emits and check it was received + await logged_gather( + *[ + frontend_client.emit("check", data="an_event") + for frontend_client in frontend_clients + ] + ) + await _assert_call_count(server_on_check, call_count=_NUMBER_OF_CLIENTS) + + # attach spy to client + on_output_port_events: list[AsyncMock] = [ + _get_on_output_port_spy(c) for c in frontend_clients + ] + + port_notifier = PortNotifier(app, user_id, project_id, node_id) + + # server publishes a message + match output_status: + case OutputStatus.UPLOAD_STARTED: + await port_notifier.send_output_port_upload_sarted(port_key) + case OutputStatus.UPLOAD_WAS_ABORTED: + await port_notifier.send_output_port_upload_was_aborted(port_key) + case OutputStatus.UPLOAD_FINISHED_SUCCESSFULLY: + await port_notifier.send_output_port_upload_finished_successfully( + port_key + ) + case OutputStatus.UPLOAD_FINISHED_WITH_ERRROR: + await port_notifier.send_output_port_upload_finished_with_error( + port_key + ) + + # check that all clients received it + for on_output_port_event in on_output_port_events: + await _assert_call_count(on_output_port_event, call_count=1) + on_output_port_event.assert_awaited_once_with( + jsonable_encoder( + OutputPortStatus( + project_id=project_id, + node_id=node_id, + port_key=port_key, + status=output_status, + ) + ) + ) + + await _assert_call_count(server_disconnect, call_count=_NUMBER_OF_CLIENTS) diff --git a/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_filter.py b/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_filter.py index 024d966e424..38b217bab8f 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_filter.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_filter.py @@ -9,6 +9,9 @@ import pytest from pydantic import ByteSize, NonNegativeFloat, NonNegativeInt, parse_obj_as from pytest_mock.plugin import MockerFixture +from simcore_service_dynamic_sidecar.modules.notifications._notifications_ports import ( + PortNotifier, +) from simcore_service_dynamic_sidecar.modules.outputs._context import OutputsContext from simcore_service_dynamic_sidecar.modules.outputs._event_filter import ( BaseDelayPolicy, @@ -56,10 +59,13 @@ async def outputs_context(outputs_path: Path, port_keys: list[str]) -> OutputsCo @pytest.fixture async def outputs_manager( - outputs_context: OutputsContext, + outputs_context: OutputsContext, port_notifier: PortNotifier ) -> AsyncIterator[OutputsManager]: outputs_manager = OutputsManager( - outputs_context=outputs_context, io_log_redirect_cb=None, progress_cb=None + outputs_context=outputs_context, + port_notifier=port_notifier, + io_log_redirect_cb=None, + progress_cb=None, ) await outputs_manager.start() yield outputs_manager diff --git a/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_handler.py b/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_handler.py index 5f02a500a4d..35ccc7d72df 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_handler.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_handler.py @@ -10,6 +10,9 @@ import pytest from aioprocessing.queues import AioQueue from pydantic import PositiveFloat +from simcore_service_dynamic_sidecar.modules.notifications._notifications_ports import ( + PortNotifier, +) from simcore_service_dynamic_sidecar.modules.outputs._context import OutputsContext from simcore_service_dynamic_sidecar.modules.outputs._event_handler import ( EventHandlerObserver, @@ -39,10 +42,13 @@ async def outputs_context( @pytest.fixture async def outputs_manager( - outputs_context: OutputsContext, + outputs_context: OutputsContext, port_notifier: PortNotifier ) -> AsyncIterable[OutputsManager]: outputs_manager = OutputsManager( - outputs_context, io_log_redirect_cb=None, progress_cb=None + outputs_context, + port_notifier=port_notifier, + io_log_redirect_cb=None, + progress_cb=None, ) await outputs_manager.start() diff --git a/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py b/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py index 40a3db6d3f9..3bf17d09f92 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py @@ -22,6 +22,9 @@ from simcore_sdk.node_ports_common.file_io_utils import LogRedirectCB from simcore_service_dynamic_sidecar.core.settings import ApplicationSettings from simcore_service_dynamic_sidecar.modules.mounted_fs import MountedVolumes +from simcore_service_dynamic_sidecar.modules.notifications._notifications_ports import ( + PortNotifier, +) from simcore_service_dynamic_sidecar.modules.outputs._context import ( OutputsContext, setup_outputs_context, @@ -165,10 +168,11 @@ async def outputs_context( @pytest.fixture async def outputs_manager( - outputs_context: OutputsContext, + outputs_context: OutputsContext, port_notifier: PortNotifier ) -> AsyncIterator[OutputsManager]: outputs_manager = OutputsManager( outputs_context=outputs_context, + port_notifier=port_notifier, io_log_redirect_cb=None, task_monitor_interval_s=0.01, progress_cb=None, diff --git a/services/dynamic-sidecar/tests/unit/test_modules_outputs_watcher.py b/services/dynamic-sidecar/tests/unit/test_modules_outputs_watcher.py index f209e4877a7..7f9b81587c2 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_outputs_watcher.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_outputs_watcher.py @@ -26,6 +26,9 @@ ) from pytest_mock import MockerFixture from simcore_service_dynamic_sidecar.modules.mounted_fs import MountedVolumes +from simcore_service_dynamic_sidecar.modules.notifications._notifications_ports import ( + PortNotifier, +) from simcore_service_dynamic_sidecar.modules.outputs import ( _watcher as outputs_watcher_core, ) @@ -90,10 +93,11 @@ async def outputs_context( @pytest.fixture async def outputs_manager( - outputs_context: OutputsContext, + outputs_context: OutputsContext, port_notifier: PortNotifier ) -> AsyncIterable[OutputsManager]: outputs_manager = OutputsManager( outputs_context=outputs_context, + port_notifier=port_notifier, io_log_redirect_cb=None, task_monitor_interval_s=TICK_INTERVAL, progress_cb=None, diff --git a/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__notifier.py b/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__notifier.py deleted file mode 100644 index 73184a1b3cb..00000000000 --- a/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__notifier.py +++ /dev/null @@ -1,204 +0,0 @@ -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name - -from collections.abc import AsyncIterable, Callable -from contextlib import AsyncExitStack, _AsyncGeneratorContextManager -from pathlib import Path -from unittest.mock import AsyncMock - -import pytest -import socketio -from asgi_lifespan import LifespanManager -from fastapi import FastAPI -from fastapi.encoders import jsonable_encoder -from models_library.api_schemas_dynamic_sidecar.socketio import ( - SOCKET_IO_SERVICE_DISK_USAGE_EVENT, -) -from models_library.api_schemas_dynamic_sidecar.telemetry import ( - DiskUsage, - ServiceDiskUsage, -) -from models_library.api_schemas_webserver.socketio import SocketIORoomStr -from models_library.projects_nodes_io import NodeID -from models_library.users import UserID -from pydantic import ByteSize, NonNegativeInt, parse_obj_as -from pytest_mock import MockerFixture -from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict -from servicelib.utils import logged_gather -from settings_library.rabbit import RabbitSettings -from simcore_service_dynamic_sidecar.core.application import create_app -from simcore_service_dynamic_sidecar.core.settings import ApplicationSettings -from simcore_service_dynamic_sidecar.modules.system_monitor._disk_usage import ( - DiskUsageMonitor, -) -from simcore_service_dynamic_sidecar.modules.system_monitor._notifier import ( - publish_disk_usage, -) -from socketio import AsyncServer -from tenacity import AsyncRetrying -from tenacity.stop import stop_after_delay -from tenacity.wait import wait_fixed - -pytest_simcore_core_services_selection = [ - "rabbit", -] - - -@pytest.fixture -def mock_environment( - monkeypatch: pytest.MonkeyPatch, - rabbit_service: RabbitSettings, - mock_environment: EnvVarsDict, -) -> EnvVarsDict: - return setenvs_from_dict( - monkeypatch, - { - "DY_SIDECAR_SYSTEM_MONITOR_TELEMETRY_ENABLE": "true", - "RABBIT_HOST": rabbit_service.RABBIT_HOST, - "RABBIT_PASSWORD": rabbit_service.RABBIT_PASSWORD.get_secret_value(), - "RABBIT_PORT": f"{rabbit_service.RABBIT_PORT}", - "RABBIT_SECURE": f"{rabbit_service.RABBIT_SECURE}", - "RABBIT_USER": rabbit_service.RABBIT_USER, - }, - ) - - -@pytest.fixture -async def app( - mock_environment: EnvVarsDict, - mock_registry_service: AsyncMock, - mock_storage_check: None, - mock_postgres_check: None, - mocker: MockerFixture, -) -> AsyncIterable[FastAPI]: - mocker.patch( - "simcore_service_dynamic_sidecar.modules.system_monitor._disk_usage._get_monitored_paths", - return_value=[], - ) - - app: FastAPI = create_app() - async with LifespanManager(app): - yield app - - -@pytest.fixture -async def disk_usage_monitor(app: FastAPI) -> DiskUsageMonitor: - return app.state.disk_usage_monitor - - -@pytest.fixture -async def socketio_server( - app: FastAPI, - socketio_server_factory: Callable[ - [RabbitSettings], _AsyncGeneratorContextManager[AsyncServer] - ], -) -> AsyncIterable[AsyncServer]: - # Same configuration as simcore_service_webserver/socketio/server.py - settings: ApplicationSettings = app.state.settings - assert settings.RABBIT_SETTINGS - - async with socketio_server_factory(settings.RABBIT_SETTINGS) as server: - yield server - - -@pytest.fixture -def room_name(user_id: UserID) -> SocketIORoomStr: - return SocketIORoomStr.from_user_id(user_id) - - -def _get_on_service_disk_usage_event( - socketio_client: socketio.AsyncClient, -) -> AsyncMock: - # emulates front-end receiving message - - async def on_service_status(data): - assert parse_obj_as(ServiceDiskUsage, data) is not None - - on_event_spy = AsyncMock(wraps=on_service_status) - socketio_client.on(SOCKET_IO_SERVICE_DISK_USAGE_EVENT, on_event_spy) - - return on_event_spy - - -async def _assert_call_count(mock: AsyncMock, *, call_count: int) -> None: - async for attempt in AsyncRetrying( - wait=wait_fixed(0.1), stop=stop_after_delay(5), reraise=True - ): - with attempt: - assert mock.call_count == call_count - - -def _get_mocked_disk_usage(byte_size_str: str) -> DiskUsage: - return DiskUsage( - total=ByteSize(0), - used=ByteSize(0), - free=ByteSize.validate(byte_size_str), - used_percent=0, - ) - - -@pytest.mark.parametrize( - "usage", - [ - pytest.param({}, id="empty"), - pytest.param({Path("/"): _get_mocked_disk_usage("1kb")}, id="one_entry"), - pytest.param( - { - Path("/"): _get_mocked_disk_usage("1kb"), - Path("/tmp"): _get_mocked_disk_usage("2kb"), # noqa: S108 - }, - id="two_entries", - ), - ], -) -async def test_notifier_publish_message( - disk_usage_monitor: DiskUsageMonitor, - socketio_server_events: dict[str, AsyncMock], - app: FastAPI, - user_id: UserID, - usage: dict[Path, DiskUsage], - node_id: NodeID, - socketio_client_factory: Callable[ - [], _AsyncGeneratorContextManager[socketio.AsyncClient] - ], -): - # web server spy events - server_connect = socketio_server_events["connect"] - server_disconnect = socketio_server_events["disconnect"] - server_on_check = socketio_server_events["on_check"] - - number_of_clients: NonNegativeInt = 10 - async with AsyncExitStack() as socketio_frontend_clients: - frontend_clients: list[socketio.AsyncClient] = await logged_gather( - *[ - socketio_frontend_clients.enter_async_context(socketio_client_factory()) - for _ in range(number_of_clients) - ] - ) - await _assert_call_count(server_connect, call_count=number_of_clients) - - # client emits and check it was received - await logged_gather( - *[ - frontend_client.emit("check", data="an_event") - for frontend_client in frontend_clients - ] - ) - await _assert_call_count(server_on_check, call_count=number_of_clients) - - # attach spy to client - on_service_disk_usage_events: list[AsyncMock] = [ - _get_on_service_disk_usage_event(c) for c in frontend_clients - ] - - # server publishes a message - await publish_disk_usage(app, user_id=user_id, node_id=node_id, usage=usage) - - # check that all clients received it - for on_service_disk_usage_event in on_service_disk_usage_events: - await _assert_call_count(on_service_disk_usage_event, call_count=1) - on_service_disk_usage_event.assert_awaited_once_with( - jsonable_encoder(ServiceDiskUsage(node_id=node_id, usage=usage)) - ) - - await _assert_call_count(server_disconnect, call_count=number_of_clients) From 8a8319f82bac8032c32adf7937d280f44a993f8b Mon Sep 17 00:00:00 2001 From: Andrei Neagu <5694077+GitHK@users.noreply.github.com> Date: Wed, 25 Sep 2024 08:36:10 +0200 Subject: [PATCH 026/104] =?UTF-8?q?=F0=9F=94=92=EF=B8=8F=20Strip=20credent?= =?UTF-8?q?ials=20form=20image=20VCS=20(#6433)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrei Neagu --- .../service-integration/requirements/_base.in | 1 + .../requirements/_base.txt | 5 ++++ .../service_integration/cli/_compose_spec.py | 12 ++++++-- .../tests/test_cli__compose_spec.py | 28 +++++++++++++++++++ 4 files changed, 44 insertions(+), 2 deletions(-) create mode 100644 packages/service-integration/tests/test_cli__compose_spec.py diff --git a/packages/service-integration/requirements/_base.in b/packages/service-integration/requirements/_base.in index fee8aa856e2..6e288d49e0b 100644 --- a/packages/service-integration/requirements/_base.in +++ b/packages/service-integration/requirements/_base.in @@ -13,3 +13,4 @@ jsonschema # pytest-plugin pytest # pytest-plugin pyyaml typer[all] +yarl diff --git a/packages/service-integration/requirements/_base.txt b/packages/service-integration/requirements/_base.txt index 131e231b537..b745227bd5f 100644 --- a/packages/service-integration/requirements/_base.txt +++ b/packages/service-integration/requirements/_base.txt @@ -35,6 +35,7 @@ idna==3.7 # via # email-validator # requests + # yarl iniconfig==2.0.0 # via pytest jinja2==3.1.4 @@ -57,6 +58,8 @@ markupsafe==2.1.5 # via jinja2 mdurl==0.1.2 # via markdown-it-py +multidict==6.1.0 + # via yarl orjson==3.10.7 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -121,3 +124,5 @@ urllib3==2.2.2 # -c requirements/../../../requirements/constraints.txt # docker # requests +yarl==1.12.1 + # via -r requirements/_base.in diff --git a/packages/service-integration/src/service_integration/cli/_compose_spec.py b/packages/service-integration/src/service_integration/cli/_compose_spec.py index 117a4afa5ef..a42936c3695 100644 --- a/packages/service-integration/src/service_integration/cli/_compose_spec.py +++ b/packages/service-integration/src/service_integration/cli/_compose_spec.py @@ -8,6 +8,7 @@ import yaml from models_library.utils.labels_annotations import to_labels from rich.console import Console +from yarl import URL from ..compose_spec_model import ComposeSpecification from ..errors import UndefinedOciImageSpecError @@ -34,6 +35,13 @@ def _run_git(*args) -> str: ).stdout.strip() +def _strip_credentials(url: str) -> str: + if (yarl_url := URL(url)) and yarl_url.is_absolute(): + stripped_url = URL(url).with_user(None).with_password(None) + return f"{stripped_url}" + return url + + def _run_git_or_empty_string(*args) -> str: try: return _run_git(*args) @@ -118,8 +126,8 @@ def create_docker_compose_image_spec( extra_labels[f"{LS_LABEL_PREFIX}.vcs-ref"] = _run_git_or_empty_string( "rev-parse", "HEAD" ) - extra_labels[f"{LS_LABEL_PREFIX}.vcs-url"] = _run_git_or_empty_string( - "config", "--get", "remote.origin.url" + extra_labels[f"{LS_LABEL_PREFIX}.vcs-url"] = _strip_credentials( + _run_git_or_empty_string("config", "--get", "remote.origin.url") ) return create_image_spec( diff --git a/packages/service-integration/tests/test_cli__compose_spec.py b/packages/service-integration/tests/test_cli__compose_spec.py new file mode 100644 index 00000000000..5fe98689a14 --- /dev/null +++ b/packages/service-integration/tests/test_cli__compose_spec.py @@ -0,0 +1,28 @@ +import pytest +from service_integration.cli._compose_spec import _strip_credentials + + +@pytest.mark.parametrize( + "url, expected_url", + [ + ( + "schema.veshttps://user:password@example.com/some/repo.git", + "schema.veshttps://example.com/some/repo.git", + ), + ( + "https://user:password@example.com/some/repo.git", + "https://example.com/some/repo.git", + ), + ( + "ssh://user:password@example.com/some/repo.git", + "ssh://example.com/some/repo.git", + ), + ( + "git@git.speag.com:some/repo.git", + "git@git.speag.com:some/repo.git", + ), + ("any_str", "any_str"), + ], +) +def test__strip_credentials(url: str, expected_url: str): + assert _strip_credentials(url) == expected_url From 699e05b489680c5577c96b09bb34dd8cd0176537 Mon Sep 17 00:00:00 2001 From: Dustin Kaiser Date: Wed, 2 Oct 2024 08:31:06 +0200 Subject: [PATCH 027/104] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Sep=20tests=20and?= =?UTF-8?q?=20tooling=20upgrade=20(#6431)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrei Neagu --- .pylintrc | 2 + api/tests/conftest.py | 3 +- api/tests/requirements.txt | 18 ++--- packages/aws-library/requirements/_base.txt | 77 +++++++++---------- packages/aws-library/requirements/_test.txt | 50 ++++++------ packages/aws-library/requirements/_tools.txt | 20 ++--- packages/aws-library/tests/test_ec2_client.py | 2 +- .../requirements/_base.txt | 24 +++--- .../requirements/_test.txt | 4 +- .../requirements/_tools.txt | 20 ++--- .../models-library/requirements/_base.txt | 6 +- .../models-library/requirements/_test.txt | 12 +-- .../models-library/requirements/_tools.txt | 24 +++--- .../requirements/_base.txt | 20 ++--- .../requirements/_test.txt | 16 ++-- .../requirements/_tools.txt | 20 ++--- .../postgres-database/requirements/_base.txt | 14 ++-- .../requirements/_migration.txt | 12 +-- .../postgres-database/requirements/_test.txt | 16 ++-- .../postgres-database/requirements/_tools.txt | 20 ++--- .../requirements/_base.txt | 16 ++-- .../requirements/_test.txt | 10 +-- .../requirements/_tools.txt | 20 ++--- .../service-library/requirements/_aiohttp.txt | 44 +++++------ .../service-library/requirements/_base.txt | 66 ++++++++-------- .../service-library/requirements/_fastapi.txt | 30 ++++---- .../service-library/requirements/_test.txt | 24 +++--- .../service-library/requirements/_tools.txt | 20 ++--- .../src/servicelib/aiohttp/tracing.py | 26 +++++-- .../settings-library/requirements/_base.txt | 6 +- .../settings-library/requirements/_test.txt | 4 +- .../settings-library/requirements/_tools.txt | 20 ++--- packages/simcore-sdk/requirements/_base.txt | 69 ++++++++--------- packages/simcore-sdk/requirements/_test.txt | 62 +++++++-------- packages/simcore-sdk/requirements/_tools.txt | 20 ++--- .../test_node_ports_v2_nodeports2.py | 3 +- .../test_node_ports_common_file_io_utils.py | 3 +- .../tests/unit/test_node_ports_v2_port.py | 7 +- services/agent/requirements/_test.txt | 30 ++++---- services/agent/requirements/_tools.txt | 18 ++--- services/api-server/requirements/_test.txt | 34 ++++---- services/api-server/requirements/_tools.txt | 20 ++--- .../exceptions/backend_errors.py | 4 +- services/autoscaling/requirements/_test.txt | 36 ++++----- services/autoscaling/requirements/_tools.txt | 20 ++--- ...test_modules_auto_scaling_computational.py | 5 +- .../unit/test_modules_auto_scaling_dynamic.py | 5 +- .../unit/test_modules_buffer_machine_core.py | 3 +- .../tests/unit/test_utils_rabbitmq.py | 5 +- services/catalog/requirements/_test.txt | 8 +- services/catalog/requirements/_tools.txt | 20 ++--- .../catalog/tests/unit/with_dbs/conftest.py | 11 +-- .../test_api_rest_services_specifications.py | 3 +- .../tests/unit/with_dbs/test_api_rpc.py | 5 +- .../clusters-keeper/requirements/_test.txt | 34 ++++---- .../clusters-keeper/requirements/_tools.txt | 20 ++--- services/dask-sidecar/requirements/_test.txt | 28 +++---- services/dask-sidecar/requirements/_tools.txt | 20 ++--- .../datcore-adapter/requirements/_test.txt | 12 +-- .../datcore-adapter/requirements/_tools.txt | 20 ++--- services/director-v2/requirements/_test.txt | 28 +++---- services/director-v2/requirements/_tools.txt | 20 ++--- .../api/routes/computations.py | 2 +- .../modules/comp_scheduler/base_scheduler.py | 5 +- ...t_dynamic_sidecar_nodeports_integration.py | 3 +- ...ixed_dynamic_sidecar_and_legacy_project.py | 3 +- .../with_dbs/test_api_route_computations.py | 1 + .../dynamic-scheduler/requirements/_test.txt | 4 +- .../dynamic-scheduler/requirements/_tools.txt | 18 ++--- .../test_services_status_monitor__monitor.py | 1 + .../dynamic-sidecar/requirements/_test.txt | 14 ++-- .../dynamic-sidecar/requirements/_tools.txt | 18 ++--- .../attribute_monitor/_watchdog_extensions.py | 4 +- .../modules/outputs/_watchdog_extensions.py | 4 +- services/dynamic-sidecar/tests/conftest.py | 1 + services/efs-guardian/requirements/_test.txt | 32 ++++---- services/efs-guardian/requirements/_tools.txt | 20 ++--- services/invitations/requirements/_test.txt | 6 +- services/invitations/requirements/_tools.txt | 20 ++--- services/migration/requirements/_test.txt | 14 ++-- services/migration/requirements/_tools.txt | 22 +++--- .../requirements/_test.txt | 8 +- .../requirements/_tools.txt | 22 +++--- .../tests/system/requirements/_test.txt | 14 ++-- .../tests/system/requirements/_tools.txt | 20 ++--- services/payments/requirements/_test.txt | 10 +-- services/payments/requirements/_tools.txt | 18 ++--- .../api/test__one_time_payment_workflows.py | 3 +- .../api/test__payment_method_workflows.py | 3 +- .../tests/unit/test_db_payments_users_repo.py | 6 +- .../tests/unit/test_rpc_payments_methods.py | 1 + .../tests/unit/test_services_payments.py | 1 + .../requirements/_test.txt | 30 ++++---- .../requirements/_tools.txt | 20 ++--- services/storage/requirements/_test.txt | 38 ++++----- services/storage/requirements/_tools.txt | 20 ++--- .../storage/tests/unit/test_dsm_dsmcleaner.py | 9 ++- .../storage/tests/unit/test_handlers_files.py | 9 ++- services/web/server/requirements/_test.txt | 10 +-- services/web/server/requirements/_tools.txt | 18 ++--- .../catalog/_models.py | 2 +- .../isolated/test_garbage_collector_core.py | 3 +- .../02/test_projects_crud_handlers__delete.py | 3 +- .../02/test_projects_states_handlers.py | 1 + .../test_resource_manager.py | 3 +- tests/e2e-playwright/requirements/_test.txt | 22 +++--- tests/e2e-playwright/requirements/_tools.txt | 20 ++--- tests/e2e/requirements/requirements.txt | 6 +- .../requirements/requirements.txt | 4 +- tests/public-api/requirements/_base.txt | 16 ++-- tests/public-api/requirements/_test.txt | 20 ++--- tests/public-api/requirements/_tools.txt | 20 ++--- tests/swarm-deploy/requirements/_test.txt | 71 +++++++++-------- tests/swarm-deploy/requirements/_tools.txt | 22 +++--- 114 files changed, 974 insertions(+), 930 deletions(-) diff --git a/.pylintrc b/.pylintrc index 9454261215a..9f0e88f06ef 100644 --- a/.pylintrc +++ b/.pylintrc @@ -466,6 +466,8 @@ max-statements=50 # Minimum number of public methods for a class (see R0903). min-public-methods=2 +# Minimum number of public methods for a class (see R0903). +max-positional-arguments=12 [EXCEPTIONS] diff --git a/api/tests/conftest.py b/api/tests/conftest.py index 1f0319393b7..8be5481a3c5 100644 --- a/api/tests/conftest.py +++ b/api/tests/conftest.py @@ -1,5 +1,6 @@ -# pylint: disable=unused-argument # pylint: disable=redefined-outer-name +# pylint: disable=too-many-positional-arguments +# pylint: disable=unused-argument # pylint: disable=unused-variable import logging diff --git a/api/tests/requirements.txt b/api/tests/requirements.txt index ec2da14450d..04cf811ff2a 100644 --- a/api/tests/requirements.txt +++ b/api/tests/requirements.txt @@ -11,7 +11,7 @@ attrs==24.2.0 # aiohttp # jsonschema # referencing -certifi==2024.7.4 +certifi==2024.8.30 # via # -c ../../requirements/constraints.txt # requests @@ -25,7 +25,7 @@ frozenlist==1.4.1 # via # aiohttp # aiosignal -idna==3.7 +idna==3.10 # via # requests # yarl @@ -50,13 +50,13 @@ lazy-object-proxy==1.10.0 # via openapi-spec-validator markupsafe==2.1.5 # via werkzeug -more-itertools==10.4.0 +more-itertools==10.5.0 # via openapi-core -multidict==6.0.5 +multidict==6.1.0 # via # aiohttp # yarl -openapi-core==0.19.3 +openapi-core==0.19.4 # via -r requirements.in openapi-schema-validator==0.6.2 # via @@ -74,7 +74,7 @@ pathable==0.4.3 # via jsonschema-path pluggy==1.5.0 # via pytest -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements.in # pytest-asyncio @@ -114,11 +114,11 @@ six==1.16.0 # rfc3339-validator termcolor==2.4.0 # via pytest-sugar -urllib3==2.2.2 +urllib3==2.2.3 # via # -c ../../requirements/constraints.txt # requests -werkzeug==3.0.3 +werkzeug==3.0.4 # via openapi-core -yarl==1.9.4 +yarl==1.12.1 # via aiohttp diff --git a/packages/aws-library/requirements/_base.txt b/packages/aws-library/requirements/_base.txt index 1c4e64828a3..79052f3c4d9 100644 --- a/packages/aws-library/requirements/_base.txt +++ b/packages/aws-library/requirements/_base.txt @@ -10,7 +10,7 @@ aiocache==0.12.2 # -r requirements/_base.in aiodebug==2.3.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -aiodocker==0.22.2 +aiodocker==0.23.0 # via -r requirements/../../../packages/service-library/requirements/_base.in aiofiles==24.1.0 # via @@ -28,13 +28,13 @@ aiohttp==3.10.5 # -c requirements/../../../requirements/constraints.txt # aiobotocore # aiodocker -aioitertools==0.11.0 +aioitertools==0.12.0 # via aiobotocore -aiormq==6.8.0 +aiormq==6.8.1 # via aio-pika aiosignal==1.3.1 # via aiohttp -anyio==4.4.0 +anyio==4.6.0 # via # fast-depends # faststream @@ -56,9 +56,9 @@ botocore==1.34.131 # aiobotocore # boto3 # s3transfer -botocore-stubs==1.35.2 +botocore-stubs==1.35.25 # via types-aiobotocore -certifi==2024.7.4 +certifi==2024.8.30 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -81,9 +81,9 @@ dnspython==2.6.1 # via email-validator email-validator==2.2.0 # via pydantic -fast-depends==2.4.8 +fast-depends==2.4.11 # via faststream -faststream==0.5.18 +faststream==0.5.23 # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.4.1 # via @@ -93,15 +93,15 @@ googleapis-common-protos==1.65.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -grpcio==1.66.0 +grpcio==1.66.1 # via opentelemetry-exporter-otlp-proto-grpc -idna==3.7 +idna==3.10 # via # anyio # email-validator # requests # yarl -importlib-metadata==8.0.0 +importlib-metadata==8.4.0 # via opentelemetry-api jmespath==1.0.1 # via @@ -117,11 +117,11 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -multidict==6.0.5 +multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.26.0 +opentelemetry-api==1.27.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -130,35 +130,35 @@ opentelemetry-api==1.26.0 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.26.0 +opentelemetry-exporter-otlp==1.27.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.26.0 +opentelemetry-exporter-otlp-proto-common==1.27.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.26.0 +opentelemetry-exporter-otlp-proto-grpc==1.27.0 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.26.0 +opentelemetry-exporter-otlp-proto-http==1.27.0 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.47b0 +opentelemetry-instrumentation==0.48b0 # via opentelemetry-instrumentation-requests -opentelemetry-instrumentation-requests==0.47b0 +opentelemetry-instrumentation-requests==0.48b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.26.0 +opentelemetry-proto==1.27.0 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.26.0 +opentelemetry-sdk==1.27.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.47b0 +opentelemetry-semantic-conventions==0.48b0 # via # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.47b0 +opentelemetry-util-http==0.48b0 # via opentelemetry-instrumentation-requests orjson==3.10.7 # via @@ -172,13 +172,13 @@ orjson==3.10.7 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in pamqp==3.3.0 # via aiormq -protobuf==4.25.4 +protobuf==4.25.5 # via # googleapis-common-protos # opentelemetry-proto psutil==6.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -pydantic==1.10.17 +pydantic==1.10.18 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -195,7 +195,7 @@ pydantic==1.10.17 # fast-depends pygments==2.18.0 # via rich -pyinstrument==4.7.2 +pyinstrument==4.7.3 # via -r requirements/../../../packages/service-library/requirements/_base.in python-dateutil==2.9.0.post0 # via @@ -226,7 +226,7 @@ referencing==0.29.3 # jsonschema-specifications requests==2.32.3 # via opentelemetry-exporter-otlp-proto-http -rich==13.7.1 +rich==13.8.1 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -237,7 +237,7 @@ rpds-py==0.20.0 # referencing s3transfer==0.10.2 # via boto3 -setuptools==74.0.0 +setuptools==75.1.0 # via opentelemetry-instrumentation sh==2.0.7 # via -r requirements/_base.in @@ -253,22 +253,21 @@ toolz==0.12.1 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.66.5 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.12.4 +typer==0.12.5 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in - # faststream -types-aiobotocore==2.13.2 +types-aiobotocore==2.15.1 # via -r requirements/_base.in -types-aiobotocore-ec2==2.13.2 +types-aiobotocore-ec2==2.15.1 # via types-aiobotocore -types-aiobotocore-s3==2.13.2 +types-aiobotocore-s3==2.15.1 # via types-aiobotocore -types-aiobotocore-ssm==2.13.2 +types-aiobotocore-ssm==2.15.1 # via types-aiobotocore -types-awscrt==0.21.2 +types-awscrt==0.21.5 # via botocore-stubs -types-python-dateutil==2.9.0.20240821 +types-python-dateutil==2.9.0.20240906 # via arrow typing-extensions==4.12.2 # via @@ -281,7 +280,7 @@ typing-extensions==4.12.2 # types-aiobotocore-ec2 # types-aiobotocore-s3 # types-aiobotocore-ssm -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -296,10 +295,10 @@ wrapt==1.16.0 # aiobotocore # deprecated # opentelemetry-instrumentation -yarl==1.9.4 +yarl==1.12.1 # via # aio-pika # aiohttp # aiormq -zipp==3.20.1 +zipp==3.20.2 # via importlib-metadata diff --git a/packages/aws-library/requirements/_test.txt b/packages/aws-library/requirements/_test.txt index 53608f2e480..68df09cd6f4 100644 --- a/packages/aws-library/requirements/_test.txt +++ b/packages/aws-library/requirements/_test.txt @@ -7,7 +7,7 @@ attrs==24.2.0 # -c requirements/_base.txt # jsonschema # referencing -aws-sam-translator==1.89.0 +aws-sam-translator==1.91.0 # via cfn-lint aws-xray-sdk==2.14.0 # via moto @@ -18,7 +18,7 @@ boto3==1.34.131 # -c requirements/_base.txt # aws-sam-translator # moto -boto3-stubs==1.35.2 +boto3-stubs==1.35.25 # via types-boto3 botocore==1.34.131 # via @@ -27,20 +27,20 @@ botocore==1.34.131 # boto3 # moto # s3transfer -botocore-stubs==1.35.2 +botocore-stubs==1.35.25 # via # -c requirements/_base.txt # boto3-stubs # types-aioboto3 # types-aiobotocore -certifi==2024.7.4 +certifi==2024.8.30 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # requests -cffi==1.17.0 +cffi==1.17.1 # via cryptography -cfn-lint==1.10.3 +cfn-lint==1.15.0 # via moto charset-normalizer==3.3.2 # via @@ -54,30 +54,30 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -cryptography==43.0.0 +cryptography==43.0.1 # via # -c requirements/../../../requirements/constraints.txt # joserfc # moto docker==7.1.0 # via moto -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in flask==3.0.3 # via # flask-cors # moto -flask-cors==4.0.1 +flask-cors==5.0.0 # via moto flexcache==0.3 # via pint flexparser==0.3.1 # via pint -graphql-core==3.2.3 +graphql-core==3.2.4 # via moto icdiff==2.0.7 # via pytest-icdiff -idna==3.7 +idna==3.10 # via # -c requirements/_base.txt # requests @@ -97,7 +97,7 @@ jmespath==1.0.1 # botocore joserfc==1.0.0 # via moto -jsondiff==2.2.0 +jsondiff==2.2.1 # via moto jsonpatch==1.33 # via cfn-lint @@ -124,7 +124,7 @@ markupsafe==2.1.5 # via # jinja2 # werkzeug -moto==5.0.13 +moto==5.0.15 # via -r requirements/_test.in mpmath==1.3.0 # via sympy @@ -150,18 +150,18 @@ pprintpp==0.4.0 # via pytest-icdiff py-cpuinfo==9.0.0 # via pytest-benchmark -py-partiql-parser==0.5.5 +py-partiql-parser==0.5.6 # via moto pycparser==2.22 # via cffi -pydantic==1.10.17 +pydantic==1.10.18 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator -pyparsing==3.1.2 +pyparsing==3.1.4 # via moto -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -213,7 +213,7 @@ referencing==0.29.3 # jsonschema # jsonschema-path # jsonschema-specifications -regex==2024.7.24 +regex==2024.9.11 # via cfn-lint requests==2.32.3 # via @@ -235,7 +235,7 @@ s3transfer==0.10.2 # via # -c requirements/_base.txt # boto3 -setuptools==74.0.0 +setuptools==75.1.0 # via # -c requirements/_base.txt # moto @@ -244,23 +244,23 @@ six==1.16.0 # -c requirements/_base.txt # python-dateutil # rfc3339-validator -sympy==1.13.2 +sympy==1.13.3 # via cfn-lint termcolor==2.4.0 # via pytest-sugar types-aioboto3==13.1.1 # via -r requirements/_test.in -types-aiobotocore==2.13.2 +types-aiobotocore==2.15.1 # via # -c requirements/_base.txt # types-aioboto3 -types-awscrt==0.21.2 +types-awscrt==0.21.5 # via # -c requirements/_base.txt # botocore-stubs types-boto3==1.0.2 # via -r requirements/_test.in -types-s3transfer==0.10.1 +types-s3transfer==0.10.2 # via # boto3-stubs # types-aioboto3 @@ -276,7 +276,7 @@ typing-extensions==4.12.2 # pydantic # types-aioboto3 # types-aiobotocore -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -284,7 +284,7 @@ urllib3==2.2.2 # docker # requests # responses -werkzeug==3.0.3 +werkzeug==3.0.4 # via # flask # moto diff --git a/packages/aws-library/requirements/_tools.txt b/packages/aws-library/requirements/_tools.txt index 36623b0c709..861338d5b7f 100644 --- a/packages/aws-library/requirements/_tools.txt +++ b/packages/aws-library/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -18,9 +18,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -47,14 +47,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -66,9 +66,9 @@ pyyaml==6.0.2 # -c requirements/_base.txt # -c requirements/_test.txt # pre-commit -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt -setuptools==74.0.0 +setuptools==75.1.0 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -80,7 +80,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit wheel==0.44.0 # via pip-tools diff --git a/packages/aws-library/tests/test_ec2_client.py b/packages/aws-library/tests/test_ec2_client.py index 2b1d8fca376..625555e9f5d 100644 --- a/packages/aws-library/tests/test_ec2_client.py +++ b/packages/aws-library/tests/test_ec2_client.py @@ -130,7 +130,7 @@ async def test_get_ec2_instance_capabilities_empty_list_returns_all_options( instance_types = await simcore_ec2_api.get_ec2_instance_capabilities(set()) assert instance_types # NOTE: this might need adaptation when moto is updated - assert 700 < len(instance_types) < 807 + assert 700 < len(instance_types) < 828 async def test_get_ec2_instance_capabilities_with_invalid_type_raises( diff --git a/packages/dask-task-models-library/requirements/_base.txt b/packages/dask-task-models-library/requirements/_base.txt index 327a04c5678..ff32942482a 100644 --- a/packages/dask-task-models-library/requirements/_base.txt +++ b/packages/dask-task-models-library/requirements/_base.txt @@ -13,21 +13,21 @@ cloudpickle==3.0.0 # via # dask # distributed -dask==2024.8.1 +dask==2024.9.0 # via # -r requirements/_base.in # distributed -distributed==2024.8.1 +distributed==2024.9.0 # via dask dnspython==2.6.1 # via email-validator email-validator==2.2.0 # via pydantic -fsspec==2024.6.1 +fsspec==2024.9.0 # via dask -idna==3.7 +idna==3.10 # via email-validator -importlib-metadata==8.4.0 +importlib-metadata==8.5.0 # via dask jinja2==3.1.4 # via @@ -49,7 +49,7 @@ markupsafe==2.1.5 # via jinja2 mdurl==0.1.2 # via markdown-it-py -msgpack==1.0.8 +msgpack==1.1.0 # via distributed orjson==3.10.7 # via @@ -65,7 +65,7 @@ partd==1.4.2 # via dask psutil==6.0.0 # via distributed -pydantic==1.10.17 +pydantic==1.10.18 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -88,7 +88,7 @@ referencing==0.35.1 # via # jsonschema # jsonschema-specifications -rich==13.7.1 +rich==13.8.1 # via # -r requirements/../../../packages/settings-library/requirements/_base.in # typer @@ -111,15 +111,15 @@ toolz==0.12.1 # partd tornado==6.4.1 # via distributed -typer==0.12.4 +typer==0.12.5 # via -r requirements/../../../packages/settings-library/requirements/_base.in -types-python-dateutil==2.9.0.20240821 +types-python-dateutil==2.9.0.20240906 # via arrow typing-extensions==4.12.2 # via # pydantic # typer -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -127,5 +127,5 @@ urllib3==2.2.2 # distributed zict==3.0.0 # via distributed -zipp==3.20.0 +zipp==3.20.2 # via importlib-metadata diff --git a/packages/dask-task-models-library/requirements/_test.txt b/packages/dask-task-models-library/requirements/_test.txt index 521d13265d9..b0593212939 100644 --- a/packages/dask-task-models-library/requirements/_test.txt +++ b/packages/dask-task-models-library/requirements/_test.txt @@ -4,7 +4,7 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in flexcache==0.3 # via pint @@ -25,7 +25,7 @@ pluggy==1.5.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio diff --git a/packages/dask-task-models-library/requirements/_tools.txt b/packages/dask-task-models-library/requirements/_tools.txt index b9ee0a3c96d..779105b3894 100644 --- a/packages/dask-task-models-library/requirements/_tools.txt +++ b/packages/dask-task-models-library/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -17,9 +17,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -47,14 +47,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -66,9 +66,9 @@ pyyaml==6.0.2 # -c requirements/_base.txt # -c requirements/_test.txt # pre-commit -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt -setuptools==73.0.1 +setuptools==75.1.0 # via pip-tools tomlkit==0.13.2 # via pylint @@ -77,7 +77,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit wheel==0.44.0 # via pip-tools diff --git a/packages/models-library/requirements/_base.txt b/packages/models-library/requirements/_base.txt index f900e7715f0..d21b94e9cb7 100644 --- a/packages/models-library/requirements/_base.txt +++ b/packages/models-library/requirements/_base.txt @@ -8,7 +8,7 @@ dnspython==2.6.1 # via email-validator email-validator==2.2.0 # via pydantic -idna==3.7 +idna==3.10 # via email-validator jsonschema==4.23.0 # via -r requirements/_base.in @@ -18,7 +18,7 @@ orjson==3.10.7 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in -pydantic==1.10.17 +pydantic==1.10.18 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in @@ -34,7 +34,7 @@ rpds-py==0.20.0 # referencing six==1.16.0 # via python-dateutil -types-python-dateutil==2.9.0.20240821 +types-python-dateutil==2.9.0.20240906 # via arrow typing-extensions==4.12.2 # via pydantic diff --git a/packages/models-library/requirements/_test.txt b/packages/models-library/requirements/_test.txt index 1cdf223883d..b0e97313b9f 100644 --- a/packages/models-library/requirements/_test.txt +++ b/packages/models-library/requirements/_test.txt @@ -8,7 +8,7 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in flexcache==0.3 # via pint @@ -16,13 +16,13 @@ flexparser==0.3.1 # via pint icdiff==2.0.7 # via pytest-icdiff -idna==3.7 +idna==3.10 # via # -c requirements/_base.txt # yarl iniconfig==2.0.0 # via pytest -multidict==6.0.5 +multidict==6.1.0 # via yarl packaging==24.1 # via @@ -36,7 +36,7 @@ pprintpp==0.4.0 # via pytest-icdiff psutil==6.0.0 # via -r requirements/_test.in -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -87,7 +87,7 @@ termcolor==2.4.0 # via pytest-sugar types-jsonschema==4.23.0.20240813 # via -r requirements/_test.in -types-pyyaml==6.0.12.20240808 +types-pyyaml==6.0.12.20240917 # via -r requirements/_test.in typing-extensions==4.12.2 # via @@ -95,5 +95,5 @@ typing-extensions==4.12.2 # flexcache # flexparser # pint -yarl==1.9.4 +yarl==1.12.1 # via -r requirements/_test.in diff --git a/packages/models-library/requirements/_tools.txt b/packages/models-library/requirements/_tools.txt index 0efdb113971..c6baceac354 100644 --- a/packages/models-library/requirements/_tools.txt +++ b/packages/models-library/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -17,9 +17,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -31,7 +31,7 @@ mccabe==0.7.0 # via pylint mdurl==0.1.2 # via markdown-it-py -mypy==1.11.1 +mypy==1.11.2 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -50,7 +50,7 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint @@ -59,7 +59,7 @@ pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt pygments==2.18.0 # via rich -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -70,17 +70,17 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_test.txt # pre-commit -rich==13.7.1 +rich==13.8.1 # via typer -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt -setuptools==73.0.1 +setuptools==75.1.0 # via pip-tools shellingham==1.5.4 # via typer tomlkit==0.13.2 # via pylint -typer==0.12.4 +typer==0.12.5 # via -r requirements/_tools.in typing-extensions==4.12.2 # via @@ -88,7 +88,7 @@ typing-extensions==4.12.2 # -c requirements/_test.txt # mypy # typer -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit wheel==0.44.0 # via pip-tools diff --git a/packages/notifications-library/requirements/_base.txt b/packages/notifications-library/requirements/_base.txt index b7e4320e9e0..abc242615c5 100644 --- a/packages/notifications-library/requirements/_base.txt +++ b/packages/notifications-library/requirements/_base.txt @@ -2,7 +2,7 @@ aiofiles==24.1.0 # via -r requirements/_base.in aiosmtplib==3.0.2 # via -r requirements/_base.in -alembic==1.13.2 +alembic==1.13.3 # via -r requirements/../../../packages/postgres-database/requirements/_base.in arrow==1.3.0 # via -r requirements/../../../packages/models-library/requirements/_base.in @@ -20,9 +20,9 @@ dnspython==2.6.1 # via email-validator email-validator==2.2.0 # via pydantic -greenlet==3.0.3 +greenlet==3.1.1 # via sqlalchemy -idna==3.7 +idna==3.10 # via # email-validator # yarl @@ -52,7 +52,7 @@ markupsafe==2.1.5 # mako mdurl==0.1.2 # via markdown-it-py -multidict==6.0.5 +multidict==6.1.0 # via yarl orjson==3.10.7 # via @@ -63,7 +63,7 @@ orjson==3.10.7 # -r requirements/../../../packages/models-library/requirements/_base.in psycopg2-binary==2.9.9 # via sqlalchemy -pydantic==1.10.17 +pydantic==1.10.18 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt @@ -80,7 +80,7 @@ referencing==0.35.1 # via # jsonschema # jsonschema-specifications -rich==13.7.1 +rich==13.8.1 # via # -r requirements/../../../packages/settings-library/requirements/_base.in # typer @@ -92,7 +92,7 @@ shellingham==1.5.4 # via typer six==1.16.0 # via python-dateutil -sqlalchemy==1.4.53 +sqlalchemy==1.4.54 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt @@ -100,14 +100,14 @@ sqlalchemy==1.4.53 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # alembic -typer==0.12.4 +typer==0.12.5 # via -r requirements/../../../packages/settings-library/requirements/_base.in -types-python-dateutil==2.9.0.20240821 +types-python-dateutil==2.9.0.20240906 # via arrow typing-extensions==4.12.2 # via # alembic # pydantic # typer -yarl==1.9.4 +yarl==1.12.1 # via -r requirements/../../../packages/postgres-database/requirements/_base.in diff --git a/packages/notifications-library/requirements/_test.txt b/packages/notifications-library/requirements/_test.txt index 25211dd50f2..15f7a507550 100644 --- a/packages/notifications-library/requirements/_test.txt +++ b/packages/notifications-library/requirements/_test.txt @@ -1,4 +1,4 @@ -certifi==2024.7.4 +certifi==2024.8.30 # via # -c requirements/../../../requirements/constraints.txt # requests @@ -10,21 +10,21 @@ coverage==7.6.1 # pytest-cov docker==7.1.0 # via -r requirements/_test.in -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in -greenlet==3.0.3 +greenlet==3.1.1 # via # -c requirements/_base.txt # sqlalchemy icdiff==2.0.7 # via pytest-icdiff -idna==3.7 +idna==3.10 # via # -c requirements/_base.txt # requests iniconfig==2.0.0 # via pytest -mypy==1.11.1 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -36,7 +36,7 @@ pluggy==1.5.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -77,7 +77,7 @@ six==1.16.0 # via # -c requirements/_base.txt # python-dateutil -sqlalchemy==1.4.53 +sqlalchemy==1.4.54 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -95,7 +95,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # mypy # sqlalchemy2-stubs -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt # docker diff --git a/packages/notifications-library/requirements/_tools.txt b/packages/notifications-library/requirements/_tools.txt index 8204f34a33c..fa8bee59633 100644 --- a/packages/notifications-library/requirements/_tools.txt +++ b/packages/notifications-library/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -17,9 +17,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -49,14 +49,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -67,9 +67,9 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_test.txt # pre-commit -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt -setuptools==73.0.1 +setuptools==75.1.0 # via pip-tools tomlkit==0.13.2 # via pylint @@ -78,7 +78,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit wheel==0.44.0 # via pip-tools diff --git a/packages/postgres-database/requirements/_base.txt b/packages/postgres-database/requirements/_base.txt index aaf19732f53..bded83bb4f9 100644 --- a/packages/postgres-database/requirements/_base.txt +++ b/packages/postgres-database/requirements/_base.txt @@ -1,12 +1,12 @@ -alembic==1.13.2 +alembic==1.13.3 # via -r requirements/_base.in async-timeout==4.0.3 # via asyncpg asyncpg==0.29.0 # via sqlalchemy -greenlet==3.0.3 +greenlet==3.1.1 # via sqlalchemy -idna==3.7 +idna==3.10 # via yarl mako==1.3.5 # via @@ -14,15 +14,15 @@ mako==1.3.5 # alembic markupsafe==2.1.5 # via mako -multidict==6.0.5 +multidict==6.1.0 # via yarl psycopg2-binary==2.9.9 # via sqlalchemy -pydantic==1.10.17 +pydantic==1.10.18 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in -sqlalchemy==1.4.53 +sqlalchemy==1.4.54 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in @@ -31,5 +31,5 @@ typing-extensions==4.12.2 # via # alembic # pydantic -yarl==1.9.4 +yarl==1.12.1 # via -r requirements/_base.in diff --git a/packages/postgres-database/requirements/_migration.txt b/packages/postgres-database/requirements/_migration.txt index 914d0820310..a0dd4d6577f 100644 --- a/packages/postgres-database/requirements/_migration.txt +++ b/packages/postgres-database/requirements/_migration.txt @@ -1,8 +1,8 @@ -alembic==1.13.2 +alembic==1.13.3 # via # -c requirements/_base.txt # -r requirements/_migration.in -certifi==2024.7.4 +certifi==2024.8.30 # via # -c requirements/../../../requirements/constraints.txt # requests @@ -12,11 +12,11 @@ click==8.1.7 # via -r requirements/_migration.in docker==7.1.0 # via -r requirements/_migration.in -greenlet==3.0.3 +greenlet==3.1.1 # via # -c requirements/_base.txt # sqlalchemy -idna==3.7 +idna==3.10 # via # -c requirements/_base.txt # requests @@ -31,7 +31,7 @@ markupsafe==2.1.5 # mako requests==2.32.3 # via docker -sqlalchemy==1.4.53 +sqlalchemy==1.4.54 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -42,7 +42,7 @@ typing-extensions==4.12.2 # via # -c requirements/_base.txt # alembic -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_migration.in diff --git a/packages/postgres-database/requirements/_test.txt b/packages/postgres-database/requirements/_test.txt index 8bd80b78b95..91705ca63c5 100644 --- a/packages/postgres-database/requirements/_test.txt +++ b/packages/postgres-database/requirements/_test.txt @@ -10,16 +10,16 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in -greenlet==3.0.3 +greenlet==3.1.1 # via # -c requirements/_base.txt # -c requirements/_migration.txt # sqlalchemy iniconfig==2.0.0 # via pytest -mypy==1.11.1 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -32,7 +32,7 @@ psycopg2-binary==2.9.9 # -c requirements/_base.txt # aiopg # sqlalchemy -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -59,7 +59,7 @@ pyyaml==6.0.2 # -r requirements/_test.in six==1.16.0 # via python-dateutil -sqlalchemy==1.4.53 +sqlalchemy==1.4.54 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -68,11 +68,11 @@ sqlalchemy==1.4.53 # aiopg sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy -types-docker==7.1.0.20240821 +types-docker==7.1.0.20240827 # via -r requirements/_test.in types-psycopg2==2.9.21.20240819 # via -r requirements/_test.in -types-requests==2.32.0.20240712 +types-requests==2.32.0.20240914 # via types-docker typing-extensions==4.12.2 # via @@ -80,7 +80,7 @@ typing-extensions==4.12.2 # -c requirements/_migration.txt # mypy # sqlalchemy2-stubs -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_migration.txt diff --git a/packages/postgres-database/requirements/_tools.txt b/packages/postgres-database/requirements/_tools.txt index 9247bc4b1a9..6d01f81c8e1 100644 --- a/packages/postgres-database/requirements/_tools.txt +++ b/packages/postgres-database/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -16,9 +16,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -26,7 +26,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -48,14 +48,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -66,9 +66,9 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_test.txt # pre-commit -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt -setuptools==73.0.1 +setuptools==75.1.0 # via pip-tools tomlkit==0.13.2 # via pylint @@ -77,7 +77,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit wheel==0.44.0 # via pip-tools diff --git a/packages/service-integration/requirements/_base.txt b/packages/service-integration/requirements/_base.txt index b745227bd5f..b91836c5138 100644 --- a/packages/service-integration/requirements/_base.txt +++ b/packages/service-integration/requirements/_base.txt @@ -9,7 +9,7 @@ attrs==24.2.0 # referencing binaryornot==0.4.4 # via cookiecutter -certifi==2024.7.4 +certifi==2024.8.30 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -31,7 +31,7 @@ docker==7.1.0 # via -r requirements/_base.in email-validator==2.2.0 # via pydantic -idna==3.7 +idna==3.10 # via # email-validator # requests @@ -69,14 +69,14 @@ packaging==24.1 # via pytest pluggy==1.5.0 # via pytest -pydantic==1.10.17 +pydantic==1.10.18 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in pygments==2.18.0 # via rich -pytest==8.3.2 +pytest==8.3.3 # via -r requirements/_base.in python-dateutil==2.9.0.post0 # via arrow @@ -96,7 +96,7 @@ requests==2.32.3 # via # cookiecutter # docker -rich==13.7.1 +rich==13.8.1 # via # cookiecutter # typer @@ -110,15 +110,15 @@ six==1.16.0 # via python-dateutil text-unidecode==1.3 # via python-slugify -typer==0.12.4 +typer==0.12.5 # via -r requirements/_base.in -types-python-dateutil==2.9.0.20240821 +types-python-dateutil==2.9.0.20240906 # via arrow typing-extensions==4.12.2 # via # pydantic # typer -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt diff --git a/packages/service-integration/requirements/_test.txt b/packages/service-integration/requirements/_test.txt index fa704698091..dad76c6a0cd 100644 --- a/packages/service-integration/requirements/_test.txt +++ b/packages/service-integration/requirements/_test.txt @@ -19,7 +19,7 @@ pluggy==1.5.0 # via # -c requirements/_base.txt # pytest -pytest==8.3.2 +pytest==8.3.3 # via # -c requirements/_base.txt # -r requirements/_test.in @@ -44,15 +44,15 @@ rpds-py==0.20.0 # referencing termcolor==2.4.0 # via pytest-sugar -types-docker==7.1.0.20240821 +types-docker==7.1.0.20240827 # via -r requirements/_test.in types-jsonschema==4.23.0.20240813 # via -r requirements/_test.in -types-pyyaml==6.0.12.20240808 +types-pyyaml==6.0.12.20240917 # via -r requirements/_test.in -types-requests==2.32.0.20240712 +types-requests==2.32.0.20240914 # via types-docker -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/packages/service-integration/requirements/_tools.txt b/packages/service-integration/requirements/_tools.txt index 354746f70e4..6cfab1a3f28 100644 --- a/packages/service-integration/requirements/_tools.txt +++ b/packages/service-integration/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -17,9 +17,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -47,14 +47,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -65,9 +65,9 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # pre-commit -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt -setuptools==73.0.1 +setuptools==75.1.0 # via pip-tools tomlkit==0.13.2 # via pylint @@ -75,7 +75,7 @@ typing-extensions==4.12.2 # via # -c requirements/_base.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit wheel==0.44.0 # via pip-tools diff --git a/packages/service-library/requirements/_aiohttp.txt b/packages/service-library/requirements/_aiohttp.txt index 990f33561e3..1536a562252 100644 --- a/packages/service-library/requirements/_aiohttp.txt +++ b/packages/service-library/requirements/_aiohttp.txt @@ -18,7 +18,7 @@ attrs==24.2.0 # aiohttp # jsonschema # referencing -certifi==2024.7.4 +certifi==2024.8.30 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -34,13 +34,13 @@ frozenlist==1.4.1 # via # aiohttp # aiosignal -greenlet==3.0.3 +greenlet==3.1.1 # via sqlalchemy -idna==3.7 +idna==3.10 # via # requests # yarl -importlib-metadata==8.0.0 +importlib-metadata==8.4.0 # via opentelemetry-api isodate==0.6.1 # via openapi-core @@ -62,13 +62,13 @@ lazy-object-proxy==1.10.0 # via openapi-spec-validator markupsafe==2.1.5 # via werkzeug -more-itertools==10.4.0 +more-itertools==10.5.0 # via openapi-core -multidict==6.0.5 +multidict==6.1.0 # via # aiohttp # yarl -openapi-core==0.19.3 +openapi-core==0.19.4 # via -r requirements/_aiohttp.in openapi-schema-validator==0.6.2 # via @@ -76,7 +76,7 @@ openapi-schema-validator==0.6.2 # openapi-spec-validator openapi-spec-validator==0.7.1 # via openapi-core -opentelemetry-api==1.26.0 +opentelemetry-api==1.27.0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-aiohttp-client @@ -84,26 +84,26 @@ opentelemetry-api==1.26.0 # opentelemetry-instrumentation-aiopg # opentelemetry-instrumentation-dbapi # opentelemetry-semantic-conventions -opentelemetry-instrumentation==0.47b0 +opentelemetry-instrumentation==0.48b0 # via # opentelemetry-instrumentation-aiohttp-client # opentelemetry-instrumentation-aiohttp-server # opentelemetry-instrumentation-aiopg # opentelemetry-instrumentation-dbapi -opentelemetry-instrumentation-aiohttp-client==0.47b0 +opentelemetry-instrumentation-aiohttp-client==0.48b0 # via -r requirements/_aiohttp.in -opentelemetry-instrumentation-aiohttp-server==0.47b0 +opentelemetry-instrumentation-aiohttp-server==0.48b0 # via -r requirements/_aiohttp.in -opentelemetry-instrumentation-aiopg==0.47b0 +opentelemetry-instrumentation-aiopg==0.48b0 # via -r requirements/_aiohttp.in -opentelemetry-instrumentation-dbapi==0.47b0 +opentelemetry-instrumentation-dbapi==0.48b0 # via opentelemetry-instrumentation-aiopg -opentelemetry-semantic-conventions==0.47b0 +opentelemetry-semantic-conventions==0.48b0 # via # opentelemetry-instrumentation-aiohttp-client # opentelemetry-instrumentation-aiohttp-server # opentelemetry-instrumentation-dbapi -opentelemetry-util-http==0.47b0 +opentelemetry-util-http==0.48b0 # via # opentelemetry-instrumentation-aiohttp-client # opentelemetry-instrumentation-aiohttp-server @@ -111,7 +111,7 @@ parse==1.20.2 # via openapi-core pathable==0.4.3 # via jsonschema-path -prometheus-client==0.20.0 +prometheus-client==0.21.0 # via -r requirements/_aiohttp.in psycopg2-binary==2.9.9 # via @@ -137,25 +137,25 @@ rpds-py==0.20.0 # via # jsonschema # referencing -setuptools==74.0.0 +setuptools==75.1.0 # via opentelemetry-instrumentation six==1.16.0 # via # isodate # rfc3339-validator -sqlalchemy==1.4.53 +sqlalchemy==1.4.54 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # aiopg -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # requests -werkzeug==3.0.3 +werkzeug==3.0.4 # via # -r requirements/_aiohttp.in # openapi-core @@ -167,7 +167,7 @@ wrapt==1.16.0 # opentelemetry-instrumentation-aiohttp-server # opentelemetry-instrumentation-aiopg # opentelemetry-instrumentation-dbapi -yarl==1.9.4 +yarl==1.12.1 # via aiohttp -zipp==3.20.1 +zipp==3.20.2 # via importlib-metadata diff --git a/packages/service-library/requirements/_base.txt b/packages/service-library/requirements/_base.txt index 4216f1f186d..64b33447725 100644 --- a/packages/service-library/requirements/_base.txt +++ b/packages/service-library/requirements/_base.txt @@ -4,7 +4,7 @@ aiocache==0.12.2 # via -r requirements/_base.in aiodebug==2.3.0 # via -r requirements/_base.in -aiodocker==0.22.2 +aiodocker==0.23.0 # via -r requirements/_base.in aiofiles==24.1.0 # via -r requirements/_base.in @@ -16,11 +16,11 @@ aiohttp==3.10.5 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # aiodocker -aiormq==6.8.0 +aiormq==6.8.1 # via aio-pika aiosignal==1.3.1 # via aiohttp -anyio==4.4.0 +anyio==4.6.0 # via # fast-depends # faststream @@ -33,7 +33,7 @@ attrs==24.2.0 # aiohttp # jsonschema # referencing -certifi==2024.7.4 +certifi==2024.8.30 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -53,9 +53,9 @@ dnspython==2.6.1 # via email-validator email-validator==2.2.0 # via pydantic -fast-depends==2.4.8 +fast-depends==2.4.11 # via faststream -faststream==0.5.18 +faststream==0.5.23 # via -r requirements/_base.in frozenlist==1.4.1 # via @@ -65,15 +65,15 @@ googleapis-common-protos==1.65.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -grpcio==1.66.0 +grpcio==1.66.1 # via opentelemetry-exporter-otlp-proto-grpc -idna==3.7 +idna==3.10 # via # anyio # email-validator # requests # yarl -importlib-metadata==8.0.0 +importlib-metadata==8.4.0 # via opentelemetry-api jsonschema==4.23.0 # via -r requirements/../../../packages/models-library/requirements/_base.in @@ -83,11 +83,11 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -multidict==6.0.5 +multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.26.0 +opentelemetry-api==1.27.0 # via # -r requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -96,35 +96,35 @@ opentelemetry-api==1.26.0 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.26.0 +opentelemetry-exporter-otlp==1.27.0 # via -r requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.26.0 +opentelemetry-exporter-otlp-proto-common==1.27.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.26.0 +opentelemetry-exporter-otlp-proto-grpc==1.27.0 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.26.0 +opentelemetry-exporter-otlp-proto-http==1.27.0 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.47b0 +opentelemetry-instrumentation==0.48b0 # via opentelemetry-instrumentation-requests -opentelemetry-instrumentation-requests==0.47b0 +opentelemetry-instrumentation-requests==0.48b0 # via -r requirements/_base.in -opentelemetry-proto==1.26.0 +opentelemetry-proto==1.27.0 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.26.0 +opentelemetry-sdk==1.27.0 # via # -r requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.47b0 +opentelemetry-semantic-conventions==0.48b0 # via # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.47b0 +opentelemetry-util-http==0.48b0 # via opentelemetry-instrumentation-requests orjson==3.10.7 # via @@ -134,13 +134,13 @@ orjson==3.10.7 # -r requirements/../../../packages/models-library/requirements/_base.in pamqp==3.3.0 # via aiormq -protobuf==4.25.4 +protobuf==4.25.5 # via # googleapis-common-protos # opentelemetry-proto psutil==6.0.0 # via -r requirements/_base.in -pydantic==1.10.17 +pydantic==1.10.18 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -151,7 +151,7 @@ pydantic==1.10.17 # fast-depends pygments==2.18.0 # via rich -pyinstrument==4.7.2 +pyinstrument==4.7.3 # via -r requirements/_base.in python-dateutil==2.9.0.post0 # via arrow @@ -174,7 +174,7 @@ referencing==0.29.3 # jsonschema-specifications requests==2.32.3 # via opentelemetry-exporter-otlp-proto-http -rich==13.7.1 +rich==13.8.1 # via # -r requirements/../../../packages/settings-library/requirements/_base.in # typer @@ -182,7 +182,7 @@ rpds-py==0.20.0 # via # jsonschema # referencing -setuptools==74.0.0 +setuptools==75.1.0 # via opentelemetry-instrumentation shellingham==1.5.4 # via typer @@ -196,11 +196,9 @@ toolz==0.12.1 # via -r requirements/_base.in tqdm==4.66.5 # via -r requirements/_base.in -typer==0.12.4 - # via - # -r requirements/../../../packages/settings-library/requirements/_base.in - # faststream -types-python-dateutil==2.9.0.20240821 +typer==0.12.5 + # via -r requirements/../../../packages/settings-library/requirements/_base.in +types-python-dateutil==2.9.0.20240906 # via arrow typing-extensions==4.12.2 # via @@ -209,7 +207,7 @@ typing-extensions==4.12.2 # opentelemetry-sdk # pydantic # typer -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -219,10 +217,10 @@ wrapt==1.16.0 # via # deprecated # opentelemetry-instrumentation -yarl==1.9.4 +yarl==1.12.1 # via # aio-pika # aiohttp # aiormq -zipp==3.20.1 +zipp==3.20.2 # via importlib-metadata diff --git a/packages/service-library/requirements/_fastapi.txt b/packages/service-library/requirements/_fastapi.txt index 3fceb62b1f6..8a3aed37600 100644 --- a/packages/service-library/requirements/_fastapi.txt +++ b/packages/service-library/requirements/_fastapi.txt @@ -1,10 +1,10 @@ -anyio==4.4.0 +anyio==4.6.0 # via # httpx # starlette asgiref==3.8.1 # via opentelemetry-instrumentation-asgi -certifi==2024.7.4 +certifi==2024.8.30 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -30,54 +30,54 @@ h11==0.14.0 # uvicorn httpcore==1.0.5 # via httpx -httpx==0.27.0 +httpx==0.27.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/_fastapi.in -idna==3.7 +idna==3.10 # via # anyio # httpx -importlib-metadata==8.0.0 +importlib-metadata==8.4.0 # via opentelemetry-api -opentelemetry-api==1.26.0 +opentelemetry-api==1.27.0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi # opentelemetry-semantic-conventions -opentelemetry-instrumentation==0.47b0 +opentelemetry-instrumentation==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-asgi==0.47b0 +opentelemetry-instrumentation-asgi==0.48b0 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-fastapi==0.47b0 +opentelemetry-instrumentation-fastapi==0.48b0 # via -r requirements/_fastapi.in -opentelemetry-semantic-conventions==0.47b0 +opentelemetry-semantic-conventions==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi -opentelemetry-util-http==0.47b0 +opentelemetry-util-http==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi -prometheus-client==0.20.0 +prometheus-client==0.21.0 # via # -r requirements/_fastapi.in # prometheus-fastapi-instrumentator prometheus-fastapi-instrumentator==6.1.0 # via -r requirements/_fastapi.in -pydantic==1.10.17 +pydantic==1.10.18 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../requirements/constraints.txt # fastapi -setuptools==74.0.0 +setuptools==75.1.0 # via opentelemetry-instrumentation sniffio==1.3.1 # via @@ -99,5 +99,5 @@ wrapt==1.16.0 # via # deprecated # opentelemetry-instrumentation -zipp==3.20.1 +zipp==3.20.2 # via importlib-metadata diff --git a/packages/service-library/requirements/_test.txt b/packages/service-library/requirements/_test.txt index 42d1479b5d2..a6a72dcd500 100644 --- a/packages/service-library/requirements/_test.txt +++ b/packages/service-library/requirements/_test.txt @@ -14,7 +14,7 @@ aiosignal==1.3.1 # -c requirements/_aiohttp.txt # -c requirements/_base.txt # aiohttp -anyio==4.4.0 +anyio==4.6.0 # via # -c requirements/_base.txt # -c requirements/_fastapi.txt @@ -29,7 +29,7 @@ attrs==24.2.0 # jsonschema # pytest-docker # referencing -certifi==2024.7.4 +certifi==2024.8.30 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_aiohttp.txt @@ -51,7 +51,7 @@ docker==7.1.0 # via -r requirements/_test.in execnet==2.1.1 # via pytest-xdist -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in flaky==3.8.1 # via -r requirements/_test.in @@ -61,7 +61,7 @@ frozenlist==1.4.1 # -c requirements/_base.txt # aiohttp # aiosignal -greenlet==3.0.3 +greenlet==3.1.1 # via # -c requirements/_aiohttp.txt # sqlalchemy @@ -73,14 +73,14 @@ httpcore==1.0.5 # via # -c requirements/_fastapi.txt # httpx -httpx==0.27.0 +httpx==0.27.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_fastapi.txt # respx icdiff==2.0.7 # via pytest-icdiff -idna==3.7 +idna==3.10 # via # -c requirements/_aiohttp.txt # -c requirements/_base.txt @@ -111,13 +111,13 @@ lazy-object-proxy==1.10.0 # via # -c requirements/_aiohttp.txt # openapi-spec-validator -multidict==6.0.5 +multidict==6.1.0 # via # -c requirements/_aiohttp.txt # -c requirements/_base.txt # aiohttp # yarl -mypy==1.11.1 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -143,7 +143,7 @@ pprintpp==0.4.0 # via pytest-icdiff py-cpuinfo==9.0.0 # via pytest-benchmark -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-aiohttp @@ -231,7 +231,7 @@ sniffio==1.3.1 # anyio # asgi-lifespan # httpx -sqlalchemy==1.4.53 +sqlalchemy==1.4.54 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_aiohttp.txt @@ -254,14 +254,14 @@ typing-extensions==4.12.2 # -c requirements/_fastapi.txt # mypy # sqlalchemy2-stubs -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_aiohttp.txt # -c requirements/_base.txt # docker # requests -yarl==1.9.4 +yarl==1.12.1 # via # -c requirements/_aiohttp.txt # -c requirements/_base.txt diff --git a/packages/service-library/requirements/_tools.txt b/packages/service-library/requirements/_tools.txt index 31b97e4b2d7..dff0e09f6bb 100644 --- a/packages/service-library/requirements/_tools.txt +++ b/packages/service-library/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -17,9 +17,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -49,14 +49,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -68,9 +68,9 @@ pyyaml==6.0.2 # -c requirements/_base.txt # -c requirements/_test.txt # pre-commit -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt -setuptools==74.0.0 +setuptools==75.1.0 # via # -c requirements/_base.txt # pip-tools @@ -81,7 +81,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit wheel==0.44.0 # via pip-tools diff --git a/packages/service-library/src/servicelib/aiohttp/tracing.py b/packages/service-library/src/servicelib/aiohttp/tracing.py index b8394618be2..80407332812 100644 --- a/packages/service-library/src/servicelib/aiohttp/tracing.py +++ b/packages/service-library/src/servicelib/aiohttp/tracing.py @@ -1,6 +1,7 @@ """ Adds aiohttp middleware for tracing using opentelemetry instrumentation. """ + import logging from aiohttp import web @@ -8,19 +9,23 @@ from opentelemetry.exporter.otlp.proto.http.trace_exporter import ( OTLPSpanExporter as OTLPSpanExporterHTTP, ) -from opentelemetry.instrumentation.aiohttp_client import AioHttpClientInstrumentor -from opentelemetry.instrumentation.aiohttp_server import ( +from opentelemetry.instrumentation.aiohttp_client import ( # pylint:disable=no-name-in-module + AioHttpClientInstrumentor, +) +from opentelemetry.instrumentation.aiohttp_server import ( # pylint:disable=no-name-in-module AioHttpServerInstrumentor, middleware, ) -from opentelemetry.instrumentation.aiopg import AiopgInstrumentor +from opentelemetry.instrumentation.aiopg import ( # pylint:disable=no-name-in-module + AiopgInstrumentor, +) from opentelemetry.instrumentation.requests import RequestsInstrumentor from opentelemetry.sdk.resources import Resource from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor from settings_library.tracing import TracingSettings -log = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) def setup_tracing( @@ -32,15 +37,22 @@ def setup_tracing( """ Sets up this service for a distributed tracing system (opentelemetry) """ + _ = app opentelemetry_collector_endpoint = ( tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT ) opentelemetry_collector_port = tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_PORT if not opentelemetry_collector_endpoint and not opentelemetry_collector_port: - log.warning("Skipping opentelemetry tracing setup") + _logger.warning("Skipping opentelemetry tracing setup") return if not opentelemetry_collector_endpoint or not opentelemetry_collector_port: - msg = f"Variable opentelemetry_collector_endpoint [{tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT}] or opentelemetry_collector_port [{tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_PORT}] unset. Tracing options incomplete." + msg = ( + "Variable opentelemetry_collector_endpoint " + f"[{tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT}] " + "or opentelemetry_collector_port " + f"[{tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_PORT}] " + "unset. Provide both or remove both." + ) raise RuntimeError(msg) resource = Resource(attributes={"service.name": service_name}) trace.set_tracer_provider(TracerProvider(resource=resource)) @@ -49,7 +61,7 @@ def setup_tracing( f"{opentelemetry_collector_endpoint}:{opentelemetry_collector_port}/v1/traces" ) - log.info( + _logger.info( "Trying to connect service %s to tracing collector at %s.", service_name, tracing_destination, diff --git a/packages/settings-library/requirements/_base.txt b/packages/settings-library/requirements/_base.txt index 900c4fea2aa..a97700778c3 100644 --- a/packages/settings-library/requirements/_base.txt +++ b/packages/settings-library/requirements/_base.txt @@ -4,19 +4,19 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -pydantic==1.10.17 +pydantic==1.10.18 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in pygments==2.18.0 # via rich -rich==13.7.1 +rich==13.8.1 # via # -r requirements/_base.in # typer shellingham==1.5.4 # via typer -typer==0.12.4 +typer==0.12.5 # via -r requirements/_base.in typing-extensions==4.12.2 # via diff --git a/packages/settings-library/requirements/_test.txt b/packages/settings-library/requirements/_test.txt index 9a1422f4bc4..d4aa9b9224c 100644 --- a/packages/settings-library/requirements/_test.txt +++ b/packages/settings-library/requirements/_test.txt @@ -2,7 +2,7 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in iniconfig==2.0.0 # via pytest @@ -12,7 +12,7 @@ packaging==24.1 # pytest-sugar pluggy==1.5.0 # via pytest -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-cov diff --git a/packages/settings-library/requirements/_tools.txt b/packages/settings-library/requirements/_tools.txt index d14257822b0..643018f4428 100644 --- a/packages/settings-library/requirements/_tools.txt +++ b/packages/settings-library/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -17,9 +17,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -46,14 +46,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -63,9 +63,9 @@ pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt # pre-commit -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt -setuptools==73.0.1 +setuptools==75.1.0 # via pip-tools tomlkit==0.13.2 # via pylint @@ -73,7 +73,7 @@ typing-extensions==4.12.2 # via # -c requirements/_base.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit wheel==0.44.0 # via pip-tools diff --git a/packages/simcore-sdk/requirements/_base.txt b/packages/simcore-sdk/requirements/_base.txt index 65fcbaa084c..a2d040670d0 100644 --- a/packages/simcore-sdk/requirements/_base.txt +++ b/packages/simcore-sdk/requirements/_base.txt @@ -6,7 +6,7 @@ aiocache==0.12.2 # -r requirements/_base.in aiodebug==2.3.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -aiodocker==0.22.2 +aiodocker==0.23.0 # via -r requirements/../../../packages/service-library/requirements/_base.in aiofiles==24.1.0 # via @@ -27,13 +27,13 @@ aiohttp==3.10.5 # aiodocker aiopg==1.4.0 # via -r requirements/_base.in -aiormq==6.8.0 +aiormq==6.8.1 # via aio-pika aiosignal==1.3.1 # via aiohttp -alembic==1.13.2 +alembic==1.13.3 # via -r requirements/../../../packages/postgres-database/requirements/_base.in -anyio==4.4.0 +anyio==4.6.0 # via # fast-depends # faststream @@ -55,7 +55,7 @@ attrs==24.2.0 # aiohttp # jsonschema # referencing -certifi==2024.7.4 +certifi==2024.8.30 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt @@ -79,9 +79,9 @@ dnspython==2.6.1 # via email-validator email-validator==2.2.0 # via pydantic -fast-depends==2.4.8 +fast-depends==2.4.11 # via faststream -faststream==0.5.18 +faststream==0.5.23 # via -r requirements/../../../packages/service-library/requirements/_base.in flexcache==0.3 # via pint @@ -95,17 +95,17 @@ googleapis-common-protos==1.65.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -greenlet==3.0.3 +greenlet==3.1.1 # via sqlalchemy -grpcio==1.66.0 +grpcio==1.66.1 # via opentelemetry-exporter-otlp-proto-grpc -idna==3.7 +idna==3.10 # via # anyio # email-validator # requests # yarl -importlib-metadata==8.0.0 +importlib-metadata==8.4.0 # via opentelemetry-api jsonschema==4.23.0 # via @@ -129,11 +129,11 @@ markupsafe==2.1.5 # via mako mdurl==0.1.2 # via markdown-it-py -multidict==6.0.5 +multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.26.0 +opentelemetry-api==1.27.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -142,35 +142,35 @@ opentelemetry-api==1.26.0 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.26.0 +opentelemetry-exporter-otlp==1.27.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.26.0 +opentelemetry-exporter-otlp-proto-common==1.27.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.26.0 +opentelemetry-exporter-otlp-proto-grpc==1.27.0 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.26.0 +opentelemetry-exporter-otlp-proto-http==1.27.0 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.47b0 +opentelemetry-instrumentation==0.48b0 # via opentelemetry-instrumentation-requests -opentelemetry-instrumentation-requests==0.47b0 +opentelemetry-instrumentation-requests==0.48b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.26.0 +opentelemetry-proto==1.27.0 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.26.0 +opentelemetry-sdk==1.27.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.47b0 +opentelemetry-semantic-conventions==0.48b0 # via # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.47b0 +opentelemetry-util-http==0.48b0 # via opentelemetry-instrumentation-requests orjson==3.10.7 # via @@ -189,7 +189,7 @@ pamqp==3.3.0 # via aiormq pint==0.24.3 # via -r requirements/_base.in -protobuf==4.25.4 +protobuf==4.25.5 # via # googleapis-common-protos # opentelemetry-proto @@ -199,7 +199,7 @@ psycopg2-binary==2.9.9 # via # aiopg # sqlalchemy -pydantic==1.10.17 +pydantic==1.10.18 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt @@ -218,7 +218,7 @@ pydantic==1.10.17 # fast-depends pygments==2.18.0 # via rich -pyinstrument==4.7.2 +pyinstrument==4.7.3 # via -r requirements/../../../packages/service-library/requirements/_base.in python-dateutil==2.9.0.post0 # via arrow @@ -249,7 +249,7 @@ referencing==0.29.3 # jsonschema-specifications requests==2.32.3 # via opentelemetry-exporter-otlp-proto-http -rich==13.7.1 +rich==13.8.1 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -258,7 +258,7 @@ rpds-py==0.20.0 # via # jsonschema # referencing -setuptools==74.0.0 +setuptools==75.1.0 # via opentelemetry-instrumentation shellingham==1.5.4 # via typer @@ -266,7 +266,7 @@ six==1.16.0 # via python-dateutil sniffio==1.3.1 # via anyio -sqlalchemy==1.4.53 +sqlalchemy==1.4.54 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt @@ -288,12 +288,11 @@ tqdm==4.66.5 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in -typer==0.12.4 +typer==0.12.5 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in - # faststream -types-python-dateutil==2.9.0.20240821 +types-python-dateutil==2.9.0.20240906 # via arrow typing-extensions==4.12.2 # via @@ -306,7 +305,7 @@ typing-extensions==4.12.2 # pint # pydantic # typer -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt @@ -320,11 +319,11 @@ wrapt==1.16.0 # via # deprecated # opentelemetry-instrumentation -yarl==1.9.4 +yarl==1.12.1 # via # -r requirements/../../../packages/postgres-database/requirements/_base.in # aio-pika # aiohttp # aiormq -zipp==3.20.1 +zipp==3.20.2 # via importlib-metadata diff --git a/packages/simcore-sdk/requirements/_test.txt b/packages/simcore-sdk/requirements/_test.txt index 16e63f1e6be..783f629f778 100644 --- a/packages/simcore-sdk/requirements/_test.txt +++ b/packages/simcore-sdk/requirements/_test.txt @@ -16,7 +16,7 @@ aiohttp==3.10.5 # -c requirements/_base.txt # aiobotocore # aioresponses -aioitertools==0.11.0 +aioitertools==0.12.0 # via aiobotocore aioresponses==0.7.6 # via -r requirements/_test.in @@ -24,7 +24,7 @@ aiosignal==1.3.1 # via # -c requirements/_base.txt # aiohttp -alembic==1.13.2 +alembic==1.13.3 # via # -c requirements/_base.txt # -r requirements/_test.in @@ -36,7 +36,7 @@ attrs==24.2.0 # aiohttp # jsonschema # referencing -aws-sam-translator==1.89.0 +aws-sam-translator==1.91.0 # via cfn-lint aws-xray-sdk==2.14.0 # via moto @@ -54,16 +54,16 @@ botocore==1.34.131 # boto3 # moto # s3transfer -botocore-stubs==1.35.2 +botocore-stubs==1.35.25 # via types-aiobotocore -certifi==2024.7.4 +certifi==2024.8.30 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # requests -cffi==1.17.0 +cffi==1.17.1 # via cryptography -cfn-lint==1.10.3 +cfn-lint==1.15.0 # via moto charset-normalizer==3.3.2 # via @@ -78,7 +78,7 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -cryptography==43.0.0 +cryptography==43.0.1 # via # -c requirements/../../../requirements/constraints.txt # joserfc @@ -89,7 +89,7 @@ docker==7.1.0 # moto execnet==2.1.1 # via pytest-xdist -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in flaky==3.8.1 # via -r requirements/_test.in @@ -97,22 +97,22 @@ flask==3.0.3 # via # flask-cors # moto -flask-cors==4.0.1 +flask-cors==5.0.0 # via moto frozenlist==1.4.1 # via # -c requirements/_base.txt # aiohttp # aiosignal -graphql-core==3.2.3 +graphql-core==3.2.4 # via moto -greenlet==3.0.3 +greenlet==3.1.1 # via # -c requirements/_base.txt # sqlalchemy icdiff==2.0.7 # via pytest-icdiff -idna==3.7 +idna==3.10 # via # -c requirements/_base.txt # requests @@ -132,7 +132,7 @@ jmespath==1.0.1 # botocore joserfc==1.0.0 # via moto -jsondiff==2.2.0 +jsondiff==2.2.1 # via moto jsonpatch==1.33 # via cfn-lint @@ -166,16 +166,16 @@ markupsafe==2.1.5 # jinja2 # mako # werkzeug -moto==5.0.13 +moto==5.0.15 # via -r requirements/_test.in mpmath==1.3.0 # via sympy -multidict==6.0.5 +multidict==6.1.0 # via # -c requirements/_base.txt # aiohttp # yarl -mypy==1.11.1 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -198,18 +198,18 @@ ply==3.11 # via jsonpath-ng pprintpp==0.4.0 # via pytest-icdiff -py-partiql-parser==0.5.5 +py-partiql-parser==0.5.6 # via moto pycparser==2.22 # via cffi -pydantic==1.10.17 +pydantic==1.10.18 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator -pyparsing==3.1.2 +pyparsing==3.1.4 # via moto -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -260,7 +260,7 @@ referencing==0.29.3 # jsonschema # jsonschema-path # jsonschema-specifications -regex==2024.7.24 +regex==2024.9.11 # via cfn-lint requests==2.32.3 # via @@ -281,7 +281,7 @@ rpds-py==0.20.0 # referencing s3transfer==0.10.2 # via boto3 -setuptools==74.0.0 +setuptools==75.1.0 # via # -c requirements/_base.txt # moto @@ -290,7 +290,7 @@ six==1.16.0 # -c requirements/_base.txt # python-dateutil # rfc3339-validator -sqlalchemy==1.4.53 +sqlalchemy==1.4.54 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -298,17 +298,17 @@ sqlalchemy==1.4.53 # alembic sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy -sympy==1.13.2 +sympy==1.13.3 # via cfn-lint termcolor==2.4.0 # via pytest-sugar -types-aiobotocore==2.13.2 +types-aiobotocore==2.15.1 # via -r requirements/_test.in -types-aiobotocore-s3==2.13.2 +types-aiobotocore-s3==2.15.1 # via types-aiobotocore types-aiofiles==24.1.0.20240626 # via -r requirements/_test.in -types-awscrt==0.21.2 +types-awscrt==0.21.5 # via botocore-stubs types-tqdm==4.66.0.20240417 # via -r requirements/_test.in @@ -323,7 +323,7 @@ typing-extensions==4.12.2 # sqlalchemy2-stubs # types-aiobotocore # types-aiobotocore-s3 -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -331,7 +331,7 @@ urllib3==2.2.2 # docker # requests # responses -werkzeug==3.0.3 +werkzeug==3.0.4 # via # flask # moto @@ -342,7 +342,7 @@ wrapt==1.16.0 # aws-xray-sdk xmltodict==0.13.0 # via moto -yarl==1.9.4 +yarl==1.12.1 # via # -c requirements/_base.txt # aiohttp diff --git a/packages/simcore-sdk/requirements/_tools.txt b/packages/simcore-sdk/requirements/_tools.txt index f6ad9dfbd42..d1b323ae5dd 100644 --- a/packages/simcore-sdk/requirements/_tools.txt +++ b/packages/simcore-sdk/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -18,9 +18,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -51,14 +51,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -70,9 +70,9 @@ pyyaml==6.0.2 # -c requirements/_base.txt # -c requirements/_test.txt # pre-commit -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt -setuptools==74.0.0 +setuptools==75.1.0 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -84,7 +84,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit wheel==0.44.0 # via pip-tools diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py index 2da7011e9b0..73fb423d101 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py @@ -1,9 +1,10 @@ # pylint: disable=pointless-statement +# pylint: disable=protected-access # pylint: disable=redefined-outer-name # pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable -# pylint: disable=protected-access import filecmp diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py b/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py index 2d32d345ffa..c32c055afe4 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py @@ -18,6 +18,7 @@ FileUploadSchema, UploadedPart, ) +from models_library.basic_types import IDStr from moto.server import ThreadedMotoServer from pydantic import AnyUrl, ByteSize, parse_obj_as from pytest_mock import MockerFixture @@ -271,7 +272,7 @@ async def test_upload_file_to_presigned_links( assert effective_chunk_size <= used_chunk_size upload_links = await create_upload_links(num_links, used_chunk_size) assert len(upload_links.urls) == num_links - async with ProgressBarData(num_steps=1) as progress_bar: + async with ProgressBarData(num_steps=1, description=IDStr("")) as progress_bar: uploaded_parts: list[UploadedPart] = await upload_file_to_presigned_links( session=client_session, file_upload_links=upload_links, diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py index 8485e19b74b..49fa694742e 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py @@ -1,9 +1,10 @@ -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name # pylint:disable=no-member # pylint:disable=protected-access +# pylint:disable=redefined-outer-name # pylint:disable=too-many-arguments +# pylint:disable=too-many-positional-arguments +# pylint:disable=unused-argument +# pylint:disable=unused-variable import os diff --git a/services/agent/requirements/_test.txt b/services/agent/requirements/_test.txt index 9a8a524999d..fbcf8d85409 100644 --- a/services/agent/requirements/_test.txt +++ b/services/agent/requirements/_test.txt @@ -7,7 +7,7 @@ aiohttp==3.8.5 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aiobotocore -aioitertools==0.11.0 +aioitertools==0.12.0 # via aiobotocore aiosignal==1.2.0 # via @@ -51,9 +51,9 @@ certifi==2023.11.17 # httpcore # httpx # requests -cffi==1.17.0 +cffi==1.17.1 # via cryptography -cfn-lint==1.10.3 +cfn-lint==1.15.0 # via moto charset-normalizer==2.1.1 # via @@ -68,7 +68,7 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -cryptography==43.0.0 +cryptography==43.0.1 # via # -c requirements/../../../requirements/constraints.txt # moto @@ -81,20 +81,20 @@ ecdsa==0.19.0 # moto # python-jose # sshpubkeys -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in flask==3.0.3 # via # flask-cors # moto -flask-cors==4.0.1 +flask-cors==5.0.0 # via moto frozenlist==1.3.1 # via # -c requirements/_base.txt # aiohttp # aiosignal -graphql-core==3.2.3 +graphql-core==3.2.4 # via moto h11==0.14.0 # via @@ -129,7 +129,7 @@ jmespath==1.0.1 # via # boto3 # botocore -jsondiff==2.2.0 +jsondiff==2.2.1 # via moto jsonpatch==1.33 # via cfn-lint @@ -170,7 +170,7 @@ pluggy==1.5.0 # via pytest py-partiql-parser==0.4.0 # via moto -pyasn1==0.6.0 +pyasn1==0.6.1 # via # python-jose # rsa @@ -181,13 +181,13 @@ pydantic==1.10.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator -pyparsing==3.1.2 +pyparsing==3.1.4 # via moto pyrsistent==0.19.2 # via # -c requirements/_base.txt # jsonschema -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -219,7 +219,7 @@ pyyaml==6.0.2 # moto # openapi-spec-validator # responses -regex==2024.7.24 +regex==2024.9.11 # via cfn-lint requests==2.32.3 # via @@ -253,7 +253,7 @@ sniffio==1.3.0 # httpx sshpubkeys==3.3.1 # via moto -sympy==1.13.2 +sympy==1.13.3 # via cfn-lint typing-extensions==4.4.0 # via @@ -261,14 +261,14 @@ typing-extensions==4.4.0 # aws-sam-translator # cfn-lint # pydantic -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt # botocore # docker # requests # responses -werkzeug==3.0.3 +werkzeug==3.0.4 # via # flask # moto diff --git a/services/agent/requirements/_tools.txt b/services/agent/requirements/_tools.txt index 84228c21c49..6937ce6b8b5 100644 --- a/services/agent/requirements/_tools.txt +++ b/services/agent/requirements/_tools.txt @@ -1,10 +1,10 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via # -r requirements/../../../requirements/devenv.txt # -r requirements/_tools.in -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -20,9 +20,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -51,14 +51,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -70,7 +70,7 @@ pyyaml==6.0.2 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt setuptools==69.2.0 # via @@ -84,9 +84,9 @@ typing-extensions==4.4.0 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit -watchdog==4.0.2 +watchdog==5.0.2 # via -r requirements/_tools.in wheel==0.44.0 # via pip-tools diff --git a/services/api-server/requirements/_test.txt b/services/api-server/requirements/_test.txt index d171a96e8f0..f4201ab9d4d 100644 --- a/services/api-server/requirements/_test.txt +++ b/services/api-server/requirements/_test.txt @@ -33,19 +33,19 @@ aws-sam-translator==1.55.0 # cfn-lint aws-xray-sdk==2.14.0 # via moto -boto3==1.35.2 +boto3==1.35.25 # via # aws-sam-translator # moto -boto3-stubs==1.35.2 +boto3-stubs==1.35.25 # via types-boto3 -botocore==1.35.2 +botocore==1.35.25 # via # aws-xray-sdk # boto3 # moto # s3transfer -botocore-stubs==1.35.2 +botocore-stubs==1.35.25 # via boto3-stubs certifi==2024.2.2 # via @@ -89,20 +89,20 @@ ecdsa==0.19.0 # moto # python-jose # sshpubkeys -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in flask==2.1.3 # via # flask-cors # moto -flask-cors==4.0.1 +flask-cors==5.0.0 # via moto frozenlist==1.4.1 # via # -c requirements/_base.txt # aiohttp # aiosignal -graphql-core==3.2.3 +graphql-core==3.2.4 # via moto greenlet==3.0.3 # via @@ -147,11 +147,11 @@ jmespath==1.0.1 # botocore jschema-to-python==1.2.3 # via cfn-lint -jsondiff==2.2.0 +jsondiff==2.2.1 # via moto jsonpatch==1.33 # via cfn-lint -jsonpickle==3.2.2 +jsonpickle==3.3.0 # via jschema-to-python jsonpointer==3.0.0 # via jsonpatch @@ -187,7 +187,7 @@ multidict==6.0.5 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.11.1 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -203,13 +203,13 @@ packaging==24.0 # via # -c requirements/_base.txt # pytest -pbr==6.0.0 +pbr==6.1.0 # via # jschema-to-python # sarif-om pluggy==1.5.0 # via pytest -pyasn1==0.6.0 +pyasn1==0.6.1 # via # python-jose # rsa @@ -221,13 +221,13 @@ pyinstrument==4.6.2 # via # -c requirements/_base.txt # -r requirements/_test.in -pyparsing==3.1.2 +pyparsing==3.1.4 # via moto pyrsistent==0.20.0 # via # -c requirements/_base.txt # jsonschema -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -254,7 +254,7 @@ python-dateutil==2.9.0.post0 # moto python-jose==3.3.0 # via moto -pytz==2024.1 +pytz==2024.2 # via moto pyyaml==6.0.1 # via @@ -314,11 +314,11 @@ sshpubkeys==3.3.1 # via moto types-aiofiles==24.1.0.20240626 # via -r requirements/_test.in -types-awscrt==0.21.2 +types-awscrt==0.21.5 # via botocore-stubs types-boto3==1.0.2 # via -r requirements/_test.in -types-s3transfer==0.10.1 +types-s3transfer==0.10.2 # via boto3-stubs typing-extensions==4.10.0 # via diff --git a/services/api-server/requirements/_tools.txt b/services/api-server/requirements/_tools.txt index a741d4f592a..f7033c1523f 100644 --- a/services/api-server/requirements/_tools.txt +++ b/services/api-server/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -20,9 +20,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -41,7 +41,7 @@ markupsafe==2.1.5 # jinja2 mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -64,14 +64,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -84,7 +84,7 @@ pyyaml==6.0.1 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt setuptools==69.2.0 # via @@ -98,9 +98,9 @@ typing-extensions==4.10.0 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit -watchdog==4.0.2 +watchdog==5.0.2 # via -r requirements/_tools.in wheel==0.44.0 # via pip-tools diff --git a/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py b/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py index ffaa1fd5618..0a23d0400f7 100644 --- a/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py +++ b/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py @@ -12,7 +12,9 @@ class BaseBackEndError(ApiServerBaseError): @classmethod def named_fields(cls) -> set[str]: - return set(parse.compile(cls.msg_template).named_fields) + return set( + parse.compile(cls.msg_template).named_fields # pylint: disable=no-member + ) class ListSolversOrStudiesError(BaseBackEndError): diff --git a/services/autoscaling/requirements/_test.txt b/services/autoscaling/requirements/_test.txt index 8f2bda059cd..47379c4d69f 100644 --- a/services/autoscaling/requirements/_test.txt +++ b/services/autoscaling/requirements/_test.txt @@ -40,7 +40,7 @@ certifi==2024.2.2 # httpcore # httpx # requests -cffi==1.17.0 +cffi==1.17.1 # via cryptography cfn-lint==1.10.3 # via moto @@ -56,28 +56,28 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -cryptography==43.0.0 +cryptography==43.0.1 # via # -c requirements/../../../requirements/constraints.txt # joserfc # moto -deepdiff==7.0.1 +deepdiff==8.0.1 # via -r requirements/_test.in docker==7.1.0 # via # -r requirements/_test.in # moto -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in -fakeredis==2.23.5 +fakeredis==2.24.1 # via -r requirements/_test.in flask==3.0.3 # via # flask-cors # moto -flask-cors==4.0.1 +flask-cors==5.0.0 # via moto -graphql-core==3.2.3 +graphql-core==3.2.4 # via moto h11==0.14.0 # via @@ -118,7 +118,7 @@ jmespath==1.0.1 # botocore joserfc==1.0.0 # via moto -jsondiff==2.2.0 +jsondiff==2.2.1 # via moto jsonpatch==1.33 # via cfn-lint @@ -148,7 +148,7 @@ markupsafe==2.1.5 # -c requirements/_base.txt # jinja2 # werkzeug -moto==5.0.13 +moto==5.0.15 # via -r requirements/_test.in mpmath==1.3.0 # via sympy @@ -158,7 +158,7 @@ openapi-schema-validator==0.6.2 # via openapi-spec-validator openapi-spec-validator==0.7.1 # via moto -ordered-set==4.1.0 +orderly-set==5.2.2 # via deepdiff packaging==24.0 # via @@ -177,7 +177,7 @@ psutil==6.0.0 # via # -c requirements/_base.txt # -r requirements/_test.in -py-partiql-parser==0.5.5 +py-partiql-parser==0.5.6 # via moto pycparser==2.22 # via cffi @@ -186,9 +186,9 @@ pydantic==1.10.15 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator -pyparsing==3.1.2 +pyparsing==3.1.4 # via moto -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -238,7 +238,7 @@ referencing==0.29.3 # jsonschema # jsonschema-path # jsonschema-specifications -regex==2024.7.24 +regex==2024.9.11 # via cfn-lint requests==2.32.3 # via @@ -281,7 +281,7 @@ sortedcontainers==2.4.0 # via # -c requirements/_base.txt # fakeredis -sympy==1.13.2 +sympy==1.13.3 # via cfn-lint termcolor==2.4.0 # via pytest-sugar @@ -293,7 +293,7 @@ types-aiobotocore-ec2==2.13.0 # via # -c requirements/_base.txt # types-aiobotocore -types-aiobotocore-iam==2.13.2 +types-aiobotocore-iam==2.13.3 # via types-aiobotocore types-aiobotocore-s3==2.13.0 # via @@ -307,7 +307,7 @@ types-awscrt==0.20.9 # via # -c requirements/_base.txt # botocore-stubs -types-pyyaml==6.0.12.20240808 +types-pyyaml==6.0.12.20240917 # via -r requirements/_test.in typing-extensions==4.11.0 # via @@ -328,7 +328,7 @@ urllib3==2.2.1 # docker # requests # responses -werkzeug==3.0.3 +werkzeug==3.0.4 # via # flask # moto diff --git a/services/autoscaling/requirements/_tools.txt b/services/autoscaling/requirements/_tools.txt index 4ec61eba91a..97a49efc2eb 100644 --- a/services/autoscaling/requirements/_tools.txt +++ b/services/autoscaling/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -18,9 +18,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -48,14 +48,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -68,7 +68,7 @@ pyyaml==6.0.1 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt setuptools==74.0.0 # via @@ -82,9 +82,9 @@ typing-extensions==4.11.0 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit -watchdog==4.0.2 +watchdog==5.0.2 # via -r requirements/_tools.in wheel==0.44.0 # via pip-tools diff --git a/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py b/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py index 327b006802d..5811b43b2f0 100644 --- a/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py +++ b/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py @@ -1,9 +1,10 @@ # pylint: disable=no-value-for-parameter # pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable # pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments # pylint: disable=too-many-statements +# pylint: disable=unused-argument +# pylint: disable=unused-variable import asyncio diff --git a/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py b/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py index 8f8c2ac3fe0..3a79a11c853 100644 --- a/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py +++ b/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py @@ -1,9 +1,10 @@ # pylint: disable=no-value-for-parameter # pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable # pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments # pylint: disable=too-many-statements +# pylint: disable=unused-argument +# pylint: disable=unused-variable import asyncio import datetime diff --git a/services/autoscaling/tests/unit/test_modules_buffer_machine_core.py b/services/autoscaling/tests/unit/test_modules_buffer_machine_core.py index 28d26b7dfe0..26ac271db29 100644 --- a/services/autoscaling/tests/unit/test_modules_buffer_machine_core.py +++ b/services/autoscaling/tests/unit/test_modules_buffer_machine_core.py @@ -1,8 +1,9 @@ # pylint: disable=no-value-for-parameter # pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable -# pylint: disable=too-many-arguments import datetime import json diff --git a/services/autoscaling/tests/unit/test_utils_rabbitmq.py b/services/autoscaling/tests/unit/test_utils_rabbitmq.py index 6b6308399d0..1c5920f9dc7 100644 --- a/services/autoscaling/tests/unit/test_utils_rabbitmq.py +++ b/services/autoscaling/tests/unit/test_utils_rabbitmq.py @@ -1,7 +1,8 @@ -# pylint:disable=unused-variable -# pylint:disable=unused-argument +# pylint: disable=too-many-positional-arguments # pylint:disable=redefined-outer-name # pylint:disable=too-many-arguments +# pylint:disable=unused-argument +# pylint:disable=unused-variable from collections.abc import Awaitable, Callable diff --git a/services/catalog/requirements/_test.txt b/services/catalog/requirements/_test.txt index 8b278768891..6fdd398def3 100644 --- a/services/catalog/requirements/_test.txt +++ b/services/catalog/requirements/_test.txt @@ -43,7 +43,7 @@ coverage==7.6.1 # via pytest-cov docker==7.1.0 # via -r requirements/_test.in -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in frozenlist==1.4.1 # via @@ -98,7 +98,7 @@ multidict==6.0.5 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.11.1 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -112,7 +112,7 @@ ptvsd==4.3.2 # via -r requirements/_test.in py-cpuinfo==9.0.0 # via pytest-benchmark -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-aiohttp @@ -177,7 +177,7 @@ sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy types-psycopg2==2.9.21.20240819 # via -r requirements/_test.in -types-pyyaml==6.0.12.20240808 +types-pyyaml==6.0.12.20240917 # via -r requirements/_test.in typing-extensions==4.10.0 # via diff --git a/services/catalog/requirements/_tools.txt b/services/catalog/requirements/_tools.txt index 3ee1fe551b5..c0a526c1310 100644 --- a/services/catalog/requirements/_tools.txt +++ b/services/catalog/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -18,9 +18,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -51,14 +51,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -69,7 +69,7 @@ pyyaml==6.0.1 # -c requirements/_base.txt # pre-commit # watchdog -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt setuptools==74.0.0 # via @@ -82,9 +82,9 @@ typing-extensions==4.10.0 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit -watchdog==4.0.2 +watchdog==5.0.2 # via -r requirements/_tools.in wheel==0.44.0 # via pip-tools diff --git a/services/catalog/tests/unit/with_dbs/conftest.py b/services/catalog/tests/unit/with_dbs/conftest.py index 1681622314f..e31913ab9bb 100644 --- a/services/catalog/tests/unit/with_dbs/conftest.py +++ b/services/catalog/tests/unit/with_dbs/conftest.py @@ -1,6 +1,7 @@ # pylint: disable=not-context-manager # pylint: disable=protected-access # pylint: disable=redefined-outer-name +# pylint: disable=too-many-positional-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable @@ -109,7 +110,7 @@ async def product( """ # NOTE: this fixture ignores products' group-id but it is fine for this test context assert product["group_id"] is None - async with insert_and_get_row_lifespan( + async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup sqlalchemy_async_engine, table=products, values=product, @@ -149,7 +150,7 @@ async def user( injects a user in db """ assert user_id == user["id"] - async with insert_and_get_row_lifespan( + async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup sqlalchemy_async_engine, table=users, values=user, @@ -442,9 +443,9 @@ def _fake_factory( @pytest.fixture -def create_director_list_services_from() -> Callable[ - [list[dict[str, Any]], list], list[dict[str, Any]] -]: +def create_director_list_services_from() -> ( + Callable[[list[dict[str, Any]], list], list[dict[str, Any]]] +): """Convenience function to merge outputs of - `create_fake_service_data` callable with those of - `expected_director_list_services` fixture diff --git a/services/catalog/tests/unit/with_dbs/test_api_rest_services_specifications.py b/services/catalog/tests/unit/with_dbs/test_api_rest_services_specifications.py index f8515b57298..394ea9123ad 100644 --- a/services/catalog/tests/unit/with_dbs/test_api_rest_services_specifications.py +++ b/services/catalog/tests/unit/with_dbs/test_api_rest_services_specifications.py @@ -1,7 +1,8 @@ # pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable -# pylint: disable=too-many-arguments import asyncio diff --git a/services/catalog/tests/unit/with_dbs/test_api_rpc.py b/services/catalog/tests/unit/with_dbs/test_api_rpc.py index dfbf9c4adc8..3aeaaf4ef73 100644 --- a/services/catalog/tests/unit/with_dbs/test_api_rpc.py +++ b/services/catalog/tests/unit/with_dbs/test_api_rpc.py @@ -1,7 +1,8 @@ # pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable -# pylint: disable=too-many-arguments from collections.abc import AsyncIterator, Callable @@ -245,7 +246,7 @@ async def other_user( ) -> AsyncIterator[dict[str, Any]]: _user = random_user(fake=faker, id=user_id + 1) - async with insert_and_get_row_lifespan( + async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup sqlalchemy_async_engine, table=users, values=_user, diff --git a/services/clusters-keeper/requirements/_test.txt b/services/clusters-keeper/requirements/_test.txt index 4dea10f742d..00a7437644c 100644 --- a/services/clusters-keeper/requirements/_test.txt +++ b/services/clusters-keeper/requirements/_test.txt @@ -50,7 +50,7 @@ certifi==2024.2.2 # httpcore # httpx # requests -cffi==1.17.0 +cffi==1.17.1 # via cryptography cfn-lint==1.10.3 # via moto @@ -66,35 +66,35 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -cryptography==43.0.0 +cryptography==43.0.1 # via # -c requirements/../../../requirements/constraints.txt # joserfc # moto debugpy==1.8.5 # via -r requirements/_test.in -deepdiff==7.0.1 +deepdiff==8.0.1 # via -r requirements/_test.in docker==7.1.0 # via # -r requirements/_test.in # moto -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in -fakeredis==2.23.5 +fakeredis==2.24.1 # via -r requirements/_test.in flask==3.0.3 # via # flask-cors # moto -flask-cors==4.0.1 +flask-cors==5.0.0 # via moto frozenlist==1.4.1 # via # -c requirements/_base.txt # aiohttp # aiosignal -graphql-core==3.2.3 +graphql-core==3.2.4 # via moto h11==0.14.0 # via @@ -134,7 +134,7 @@ jmespath==1.0.1 # botocore joserfc==1.0.0 # via moto -jsondiff==2.2.0 +jsondiff==2.2.1 # via moto jsonpatch==1.33 # via cfn-lint @@ -164,7 +164,7 @@ markupsafe==2.1.5 # -c requirements/_base.txt # jinja2 # werkzeug -moto==5.0.13 +moto==5.0.15 # via -r requirements/_test.in mpmath==1.3.0 # via sympy @@ -179,7 +179,7 @@ openapi-schema-validator==0.6.2 # via openapi-spec-validator openapi-spec-validator==0.7.1 # via moto -ordered-set==4.1.0 +orderly-set==5.2.2 # via deepdiff packaging==24.0 # via @@ -197,7 +197,7 @@ psutil==6.0.0 # via # -c requirements/_base.txt # -r requirements/_test.in -py-partiql-parser==0.5.5 +py-partiql-parser==0.5.6 # via moto pycparser==2.22 # via cffi @@ -206,9 +206,9 @@ pydantic==1.10.15 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator -pyparsing==3.1.2 +pyparsing==3.1.4 # via moto -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -252,7 +252,7 @@ referencing==0.29.3 # jsonschema # jsonschema-path # jsonschema-specifications -regex==2024.7.24 +regex==2024.9.11 # via cfn-lint requests==2.32.3 # via @@ -295,9 +295,9 @@ sortedcontainers==2.4.0 # via # -c requirements/_base.txt # fakeredis -sympy==1.13.2 +sympy==1.13.3 # via cfn-lint -types-pyyaml==6.0.12.20240808 +types-pyyaml==6.0.12.20240917 # via -r requirements/_test.in typing-extensions==4.11.0 # via @@ -314,7 +314,7 @@ urllib3==2.2.1 # docker # requests # responses -werkzeug==3.0.3 +werkzeug==3.0.4 # via # flask # moto diff --git a/services/clusters-keeper/requirements/_tools.txt b/services/clusters-keeper/requirements/_tools.txt index 4ec61eba91a..97a49efc2eb 100644 --- a/services/clusters-keeper/requirements/_tools.txt +++ b/services/clusters-keeper/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -18,9 +18,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -48,14 +48,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -68,7 +68,7 @@ pyyaml==6.0.1 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt setuptools==74.0.0 # via @@ -82,9 +82,9 @@ typing-extensions==4.11.0 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit -watchdog==4.0.2 +watchdog==5.0.2 # via -r requirements/_tools.in wheel==0.44.0 # via pip-tools diff --git a/services/dask-sidecar/requirements/_test.txt b/services/dask-sidecar/requirements/_test.txt index 1f379c87c05..7f13a97ad89 100644 --- a/services/dask-sidecar/requirements/_test.txt +++ b/services/dask-sidecar/requirements/_test.txt @@ -27,7 +27,7 @@ certifi==2024.7.4 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # requests -cffi==1.17.0 +cffi==1.17.1 # via cryptography cfn-lint==1.10.3 # via moto @@ -43,7 +43,7 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -cryptography==43.0.0 +cryptography==43.0.1 # via # -c requirements/../../../requirements/constraints.txt # joserfc @@ -53,15 +53,15 @@ docker==7.1.0 # via # -r requirements/_test.in # moto -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in flask==3.0.3 # via # flask-cors # moto -flask-cors==4.0.1 +flask-cors==5.0.0 # via moto -graphql-core==3.2.3 +graphql-core==3.2.4 # via moto icdiff==2.0.7 # via pytest-icdiff @@ -86,7 +86,7 @@ jmespath==1.0.1 # botocore joserfc==1.0.0 # via moto -jsondiff==2.2.0 +jsondiff==2.2.1 # via moto jsonpatch==1.33 # via cfn-lint @@ -114,7 +114,7 @@ markupsafe==2.1.5 # -c requirements/_base.txt # jinja2 # werkzeug -moto==5.0.13 +moto==5.0.15 # via -r requirements/_test.in mpmath==1.3.0 # via sympy @@ -137,7 +137,7 @@ ply==3.11 # via jsonpath-ng pprintpp==0.4.0 # via pytest-icdiff -py-partiql-parser==0.5.5 +py-partiql-parser==0.5.6 # via moto pycparser==2.22 # via cffi @@ -146,13 +146,13 @@ pydantic==1.10.15 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator -pyftpdlib==1.5.10 +pyftpdlib==2.0.0 # via pytest-localftpserver pyopenssl==24.2.1 # via pytest-localftpserver -pyparsing==3.1.2 +pyparsing==3.1.4 # via moto -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -203,7 +203,7 @@ referencing==0.29.3 # jsonschema # jsonschema-path # jsonschema-specifications -regex==2024.7.24 +regex==2024.9.11 # via cfn-lint requests==2.32.3 # via @@ -232,7 +232,7 @@ six==1.16.0 # -c requirements/_base.txt # python-dateutil # rfc3339-validator -sympy==1.13.2 +sympy==1.13.3 # via cfn-lint termcolor==2.4.0 # via pytest-sugar @@ -252,7 +252,7 @@ urllib3==2.2.1 # docker # requests # responses -werkzeug==3.0.3 +werkzeug==3.0.4 # via # flask # moto diff --git a/services/dask-sidecar/requirements/_tools.txt b/services/dask-sidecar/requirements/_tools.txt index 4ec61eba91a..97a49efc2eb 100644 --- a/services/dask-sidecar/requirements/_tools.txt +++ b/services/dask-sidecar/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -18,9 +18,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -48,14 +48,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -68,7 +68,7 @@ pyyaml==6.0.1 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt setuptools==74.0.0 # via @@ -82,9 +82,9 @@ typing-extensions==4.11.0 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit -watchdog==4.0.2 +watchdog==5.0.2 # via -r requirements/_tools.in wheel==0.44.0 # via pip-tools diff --git a/services/datcore-adapter/requirements/_test.txt b/services/datcore-adapter/requirements/_test.txt index e54eea880ec..b09942fe970 100644 --- a/services/datcore-adapter/requirements/_test.txt +++ b/services/datcore-adapter/requirements/_test.txt @@ -4,9 +4,9 @@ anyio==4.3.0 # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in -boto3-stubs==1.35.2 +boto3-stubs==1.35.25 # via types-boto3 -botocore-stubs==1.35.2 +botocore-stubs==1.35.25 # via # boto3-stubs # types-botocore @@ -27,7 +27,7 @@ coverage==7.6.1 # pytest-cov execnet==2.1.1 # via pytest-xdist -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in h11==0.14.0 # via @@ -60,7 +60,7 @@ pluggy==1.5.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -110,13 +110,13 @@ sniffio==1.3.1 # httpx termcolor==2.4.0 # via pytest-sugar -types-awscrt==0.21.2 +types-awscrt==0.21.5 # via botocore-stubs types-boto3==1.0.2 # via -r requirements/_test.in types-botocore==1.0.2 # via -r requirements/_test.in -types-s3transfer==0.10.1 +types-s3transfer==0.10.2 # via boto3-stubs typing-extensions==4.10.0 # via diff --git a/services/datcore-adapter/requirements/_tools.txt b/services/datcore-adapter/requirements/_tools.txt index c155d41e265..508da70431f 100644 --- a/services/datcore-adapter/requirements/_tools.txt +++ b/services/datcore-adapter/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -17,9 +17,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -46,14 +46,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -65,7 +65,7 @@ pyyaml==6.0.1 # -c requirements/_base.txt # pre-commit # watchdog -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt setuptools==74.0.0 # via @@ -78,9 +78,9 @@ typing-extensions==4.10.0 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit -watchdog==4.0.2 +watchdog==5.0.2 # via -r requirements/_tools.in wheel==0.44.0 # via pip-tools diff --git a/services/director-v2/requirements/_test.txt b/services/director-v2/requirements/_test.txt index a344bc67885..22d12c69c20 100644 --- a/services/director-v2/requirements/_test.txt +++ b/services/director-v2/requirements/_test.txt @@ -16,7 +16,7 @@ aiohttp==3.9.5 # -c requirements/_base.txt # aiobotocore # dask-gateway-server -aioitertools==0.11.0 +aioitertools==0.12.0 # via aiobotocore aiormq==6.8.0 # via @@ -43,7 +43,7 @@ attrs==23.2.0 # -c requirements/_base.txt # aiohttp # pytest-docker -bokeh==3.5.1 +bokeh==3.5.2 # via dask boto3==1.34.131 # via aiobotocore @@ -59,7 +59,7 @@ certifi==2024.2.2 # httpcore # httpx # requests -cffi==1.17.0 +cffi==1.17.1 # via cryptography charset-normalizer==3.3.2 # via @@ -77,11 +77,11 @@ cloudpickle==3.0.0 # distributed colorlog==6.8.2 # via dask-gateway-server -contourpy==1.2.1 +contourpy==1.3.0 # via bokeh coverage==7.6.1 # via pytest-cov -cryptography==43.0.0 +cryptography==43.0.1 # via # -c requirements/../../../requirements/constraints.txt # dask-gateway-server @@ -100,7 +100,7 @@ docker==7.1.0 # via -r requirements/_test.in execnet==2.1.1 # via pytest-xdist -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in flaky==3.8.1 # via -r requirements/_test.in @@ -181,7 +181,7 @@ multidict==6.0.5 # aiohttp # async-asgi-testclient # yarl -mypy==1.11.1 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -203,7 +203,7 @@ pamqp==3.3.0 # via # -c requirements/_base.txt # aiormq -pandas==2.2.2 +pandas==2.2.3 # via bokeh partd==1.4.2 # via @@ -221,7 +221,7 @@ psutil==6.0.0 # distributed pycparser==2.22 # via cffi -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -252,7 +252,7 @@ python-dateutil==2.9.0.post0 # botocore # faker # pandas -pytz==2024.1 +pytz==2024.2 # via pandas pyyaml==6.0.1 # via @@ -310,11 +310,11 @@ tornado==6.4 # distributed traitlets==5.14.3 # via dask-gateway-server -types-networkx==3.2.1.20240820 +types-networkx==3.2.1.20240918 # via -r requirements/_test.in types-psycopg2==2.9.21.20240819 # via -r requirements/_test.in -types-pyyaml==6.0.12.20240808 +types-pyyaml==6.0.12.20240917 # via -r requirements/_test.in typing-extensions==4.11.0 # via @@ -322,7 +322,7 @@ typing-extensions==4.11.0 # alembic # mypy # sqlalchemy2-stubs -tzdata==2024.1 +tzdata==2024.2 # via pandas urllib3==2.2.1 # via @@ -336,7 +336,7 @@ wrapt==1.16.0 # via # -c requirements/_base.txt # aiobotocore -xyzservices==2024.6.0 +xyzservices==2024.9.0 # via bokeh yarl==1.9.4 # via diff --git a/services/director-v2/requirements/_tools.txt b/services/director-v2/requirements/_tools.txt index 311f84dcf2f..062a460207d 100644 --- a/services/director-v2/requirements/_tools.txt +++ b/services/director-v2/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -18,9 +18,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -51,14 +51,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -71,7 +71,7 @@ pyyaml==6.0.1 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt setuptools==74.0.0 # via @@ -84,9 +84,9 @@ typing-extensions==4.11.0 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit -watchdog==4.0.2 +watchdog==5.0.2 # via -r requirements/_tools.in wheel==0.44.0 # via pip-tools diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py index cea6e18770d..49fd757e886 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py @@ -286,7 +286,7 @@ async def _try_start_pipeline( ) # NOTE: in case of a burst of calls to that endpoint, we might end up in a weird state. @run_sequentially_in_context(target_args=["computation.project_id"]) -async def create_computation( # noqa: PLR0913 +async def create_computation( # noqa: PLR0913 # pylint:disable=too-many-positional-arguments computation: ComputationCreate, request: Request, project_repo: Annotated[ diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/base_scheduler.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/base_scheduler.py index 3ba703a78b7..08396686e43 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/base_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/base_scheduler.py @@ -27,6 +27,7 @@ from models_library.projects_state import RunningState from models_library.services import ServiceKey, ServiceType, ServiceVersion from models_library.users import UserID +from networkx.classes.reportviews import InDegreeView from pydantic import PositiveInt from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE from servicelib.rabbitmq import RabbitMQClient, RabbitMQRPCClient @@ -734,8 +735,10 @@ async def _schedule_tasks_to_start( # noqa: C901 if t.state == RunningState.SUCCESS } ) + dag_in_degree = dag.in_degree() + assert isinstance(dag_in_degree, InDegreeView) # nosec next_task_node_ids = [ - node_id for node_id, degree in dag.in_degree() if degree == 0 + node_id for node_id, degree in dag_in_degree if degree == 0 ] # get the tasks to start diff --git a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py index 39b7fea4e5d..cd6f8bc2223 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py +++ b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py @@ -1,7 +1,8 @@ # pylint: disable=protected-access # pylint: disable=redefined-outer-name -# pylint: disable=unused-argument # pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint:disable=too-many-positional-arguments import asyncio import hashlib diff --git a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py index d590985680d..4780c2f7a6f 100644 --- a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py +++ b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py @@ -1,6 +1,7 @@ -# pylint:disable=unused-argument # pylint:disable=redefined-outer-name # pylint:disable=too-many-arguments +# pylint:disable=too-many-positional-arguments +# pylint:disable=unused-argument import asyncio import logging diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py index 7fe67666267..81034fbaee5 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py @@ -4,6 +4,7 @@ # pylint: disable=too-many-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable +# pylint:disable=too-many-positional-arguments import datetime import json diff --git a/services/dynamic-scheduler/requirements/_test.txt b/services/dynamic-scheduler/requirements/_test.txt index 396d85a70e3..b48cff66d52 100644 --- a/services/dynamic-scheduler/requirements/_test.txt +++ b/services/dynamic-scheduler/requirements/_test.txt @@ -21,7 +21,7 @@ coverage==7.6.1 # pytest-cov docker==7.1.0 # via -r requirements/_test.in -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in h11==0.14.0 # via @@ -55,7 +55,7 @@ pluggy==1.5.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio diff --git a/services/dynamic-scheduler/requirements/_tools.txt b/services/dynamic-scheduler/requirements/_tools.txt index e946c9129aa..3f27c470fe3 100644 --- a/services/dynamic-scheduler/requirements/_tools.txt +++ b/services/dynamic-scheduler/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -17,9 +17,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -47,14 +47,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -65,7 +65,7 @@ pyyaml==6.0.1 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # pre-commit -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt setuptools==74.0.0 # via @@ -77,7 +77,7 @@ typing-extensions==4.10.0 # via # -c requirements/_base.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit wheel==0.44.0 # via pip-tools diff --git a/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py b/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py index e3d6acffa39..2dd5270b627 100644 --- a/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py +++ b/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py @@ -1,4 +1,5 @@ # pylint:disable=redefined-outer-name +# pylint:disable=too-many-positional-arguments # pylint:disable=unused-argument import json diff --git a/services/dynamic-sidecar/requirements/_test.txt b/services/dynamic-sidecar/requirements/_test.txt index ad204462b21..3b248a300cd 100644 --- a/services/dynamic-sidecar/requirements/_test.txt +++ b/services/dynamic-sidecar/requirements/_test.txt @@ -11,7 +11,7 @@ aiohttp==3.9.3 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aiobotocore -aioitertools==0.11.0 +aioitertools==0.12.0 # via aiobotocore aiosignal==1.3.1 # via @@ -45,7 +45,7 @@ coverage==7.6.1 # via pytest-cov docker==7.1.0 # via -r requirements/_test.in -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in flaky==3.8.1 # via -r requirements/_test.in @@ -75,7 +75,7 @@ multidict==6.0.5 # aiohttp # async-asgi-testclient # yarl -mypy==1.11.1 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -85,7 +85,7 @@ packaging==24.0 # pytest pluggy==1.5.0 # via pytest -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -128,13 +128,13 @@ sqlalchemy==1.4.52 # -r requirements/_test.in sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy -types-aiobotocore-s3==2.13.2 +types-aiobotocore-s3==2.15.1 # via -r requirements/_test.in types-aiofiles==24.1.0.20240626 # via -r requirements/_test.in -types-psutil==6.0.0.20240621 +types-psutil==6.0.0.20240901 # via -r requirements/_test.in -types-pyyaml==6.0.12.20240808 +types-pyyaml==6.0.12.20240917 # via -r requirements/_test.in typing-extensions==4.11.0 # via diff --git a/services/dynamic-sidecar/requirements/_tools.txt b/services/dynamic-sidecar/requirements/_tools.txt index 088c9a9396f..4eed4827cf8 100644 --- a/services/dynamic-sidecar/requirements/_tools.txt +++ b/services/dynamic-sidecar/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -17,9 +17,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -50,14 +50,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -68,7 +68,7 @@ pyyaml==6.0.1 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # pre-commit -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt setuptools==74.0.0 # via @@ -81,7 +81,7 @@ typing-extensions==4.11.0 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit wheel==0.44.0 # via pip-tools diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/attribute_monitor/_watchdog_extensions.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/attribute_monitor/_watchdog_extensions.py index 5925e7d7fe2..83389547c77 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/attribute_monitor/_watchdog_extensions.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/attribute_monitor/_watchdog_extensions.py @@ -20,7 +20,9 @@ def __init__(self, path, recursive=False): # pylint:disable=super-init-not-call # overwrite the `InotifyBuffer.__init__` method BaseThread.__init__(self) # pylint:disable=non-parent-init-called self._queue = DelayedQueue(self.delay) - self._inotify = Inotify(path, recursive, InotifyConstants.IN_ATTRIB) + self._inotify = Inotify( # pylint:disable=too-many-function-args + path, recursive, InotifyConstants.IN_ATTRIB + ) self.start() diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_watchdog_extensions.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_watchdog_extensions.py index 6d6917d4e15..c95813e939f 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_watchdog_extensions.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_watchdog_extensions.py @@ -36,7 +36,9 @@ def __init__(self, path, recursive=False): # pylint:disable=super-init-not-call # overwrite the `InotifyBuffer.__init__` method BaseThread.__init__(self) # pylint:disable=non-parent-init-called self._queue = DelayedQueue(self.delay) - self._inotify = Inotify(path, recursive, _EVENTS_TO_WATCH) + self._inotify = Inotify( # pylint:disable=too-many-function-args + path, recursive, _EVENTS_TO_WATCH + ) self.start() diff --git a/services/dynamic-sidecar/tests/conftest.py b/services/dynamic-sidecar/tests/conftest.py index 397666815fb..53b88ac1359 100644 --- a/services/dynamic-sidecar/tests/conftest.py +++ b/services/dynamic-sidecar/tests/conftest.py @@ -1,5 +1,6 @@ # pylint: disable=redefined-outer-name # pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable diff --git a/services/efs-guardian/requirements/_test.txt b/services/efs-guardian/requirements/_test.txt index f35b36a175a..efd05c557f6 100644 --- a/services/efs-guardian/requirements/_test.txt +++ b/services/efs-guardian/requirements/_test.txt @@ -50,7 +50,7 @@ certifi==2024.2.2 # httpcore # httpx # requests -cffi==1.17.0 +cffi==1.17.1 # via cryptography cfn-lint==1.10.3 # via moto @@ -66,35 +66,35 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -cryptography==43.0.0 +cryptography==43.0.1 # via # -c requirements/../../../requirements/constraints.txt # joserfc # moto debugpy==1.8.5 # via -r requirements/_test.in -deepdiff==7.0.1 +deepdiff==8.0.1 # via -r requirements/_test.in docker==7.1.0 # via # -r requirements/_test.in # moto -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in -fakeredis==2.23.5 +fakeredis==2.24.1 # via -r requirements/_test.in flask==3.0.3 # via # flask-cors # moto -flask-cors==4.0.1 +flask-cors==5.0.0 # via moto frozenlist==1.4.1 # via # -c requirements/_base.txt # aiohttp # aiosignal -graphql-core==3.2.3 +graphql-core==3.2.4 # via moto h11==0.14.0 # via @@ -133,7 +133,7 @@ jmespath==1.0.1 # botocore joserfc==1.0.0 # via moto -jsondiff==2.2.0 +jsondiff==2.2.1 # via moto jsonpatch==1.33 # via cfn-lint @@ -162,7 +162,7 @@ markupsafe==2.1.5 # via # jinja2 # werkzeug -moto==5.0.13 +moto==5.0.15 # via -r requirements/_test.in mpmath==1.3.0 # via sympy @@ -177,7 +177,7 @@ openapi-schema-validator==0.6.2 # via openapi-spec-validator openapi-spec-validator==0.7.1 # via moto -ordered-set==4.1.0 +orderly-set==5.2.2 # via deepdiff packaging==24.0 # via @@ -195,7 +195,7 @@ psutil==6.0.0 # via # -c requirements/_base.txt # -r requirements/_test.in -py-partiql-parser==0.5.5 +py-partiql-parser==0.5.6 # via moto pycparser==2.22 # via cffi @@ -204,9 +204,9 @@ pydantic==1.10.15 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator -pyparsing==3.1.2 +pyparsing==3.1.4 # via moto -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -250,7 +250,7 @@ referencing==0.29.3 # jsonschema # jsonschema-path # jsonschema-specifications -regex==2024.7.24 +regex==2024.9.11 # via cfn-lint requests==2.32.3 # via @@ -291,7 +291,7 @@ sniffio==1.3.1 # httpx sortedcontainers==2.4.0 # via fakeredis -sympy==1.13.2 +sympy==1.13.3 # via cfn-lint typing-extensions==4.11.0 # via @@ -308,7 +308,7 @@ urllib3==2.2.1 # docker # requests # responses -werkzeug==3.0.3 +werkzeug==3.0.4 # via # flask # moto diff --git a/services/efs-guardian/requirements/_tools.txt b/services/efs-guardian/requirements/_tools.txt index 4ec61eba91a..97a49efc2eb 100644 --- a/services/efs-guardian/requirements/_tools.txt +++ b/services/efs-guardian/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -18,9 +18,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -48,14 +48,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -68,7 +68,7 @@ pyyaml==6.0.1 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt setuptools==74.0.0 # via @@ -82,9 +82,9 @@ typing-extensions==4.11.0 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit -watchdog==4.0.2 +watchdog==5.0.2 # via -r requirements/_tools.in wheel==0.44.0 # via pip-tools diff --git a/services/invitations/requirements/_test.txt b/services/invitations/requirements/_test.txt index 7b029ccb0de..6a73c31809d 100644 --- a/services/invitations/requirements/_test.txt +++ b/services/invitations/requirements/_test.txt @@ -16,7 +16,7 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in h11==0.14.0 # via @@ -31,7 +31,7 @@ httpx==0.27.0 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # -r requirements/_test.in -hypothesis==6.111.1 +hypothesis==6.112.1 # via -r requirements/_test.in idna==3.6 # via @@ -47,7 +47,7 @@ packaging==24.0 # pytest-sugar pluggy==1.5.0 # via pytest -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio diff --git a/services/invitations/requirements/_tools.txt b/services/invitations/requirements/_tools.txt index df9e8e642d5..d6bba29eee2 100644 --- a/services/invitations/requirements/_tools.txt +++ b/services/invitations/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -17,9 +17,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -47,14 +47,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -66,7 +66,7 @@ pyyaml==6.0.1 # -c requirements/_base.txt # pre-commit # watchdog -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt setuptools==74.0.0 # via @@ -78,9 +78,9 @@ typing-extensions==4.10.0 # via # -c requirements/_base.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit -watchdog==4.0.2 +watchdog==5.0.2 # via -r requirements/_tools.in wheel==0.44.0 # via pip-tools diff --git a/services/migration/requirements/_test.txt b/services/migration/requirements/_test.txt index 5b2e5bb55dd..0c989c238a4 100644 --- a/services/migration/requirements/_test.txt +++ b/services/migration/requirements/_test.txt @@ -3,7 +3,7 @@ attrs==24.2.0 # jsonschema # pytest-docker # referencing -certifi==2024.7.4 +certifi==2024.8.30 # via # -c requirements/../../../requirements/constraints.txt # requests @@ -13,9 +13,9 @@ coverage==7.6.1 # via pytest-cov docker==7.1.0 # via -r requirements/_test.in -greenlet==3.0.3 +greenlet==3.1.1 # via sqlalchemy -idna==3.7 +idna==3.10 # via requests iniconfig==2.0.0 # via pytest @@ -23,7 +23,7 @@ jsonschema==4.23.0 # via -r requirements/_test.in jsonschema-specifications==2023.12.1 # via jsonschema -mypy==1.11.1 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -31,7 +31,7 @@ packaging==24.1 # via pytest pluggy==1.5.0 # via pytest -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -66,7 +66,7 @@ rpds-py==0.20.0 # via # jsonschema # referencing -sqlalchemy==1.4.53 +sqlalchemy==1.4.54 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_test.in @@ -78,7 +78,7 @@ typing-extensions==4.12.2 # via # mypy # sqlalchemy2-stubs -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt # docker diff --git a/services/migration/requirements/_tools.txt b/services/migration/requirements/_tools.txt index 70e35d70acc..e775221e68b 100644 --- a/services/migration/requirements/_tools.txt +++ b/services/migration/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -16,9 +16,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -26,7 +26,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -48,14 +48,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -67,9 +67,9 @@ pyyaml==6.0.2 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt -setuptools==73.0.1 +setuptools==75.1.0 # via pip-tools tomlkit==0.13.2 # via pylint @@ -77,9 +77,9 @@ typing-extensions==4.12.2 # via # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit -watchdog==4.0.2 +watchdog==5.0.2 # via -r requirements/_tools.in wheel==0.44.0 # via pip-tools diff --git a/services/osparc-gateway-server/requirements/_test.txt b/services/osparc-gateway-server/requirements/_test.txt index a092c888f38..908dca5582b 100644 --- a/services/osparc-gateway-server/requirements/_test.txt +++ b/services/osparc-gateway-server/requirements/_test.txt @@ -11,7 +11,7 @@ attrs==23.2.0 # via # -c requirements/_base.txt # aiohttp -certifi==2024.7.4 +certifi==2024.8.30 # via # -c requirements/../../../requirements/constraints.txt # requests @@ -47,7 +47,7 @@ distributed==2024.5.1 # dask-gateway docker==7.1.0 # via -r requirements/_test.in -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in frozenlist==1.4.1 # via @@ -98,7 +98,7 @@ multidict==6.0.5 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.11.1 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -121,7 +121,7 @@ psutil==6.0.0 # via # -c requirements/../../dask-sidecar/requirements/_dask-distributed.txt # distributed -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio diff --git a/services/osparc-gateway-server/requirements/_tools.txt b/services/osparc-gateway-server/requirements/_tools.txt index a3428b0ccc3..985945c7b0e 100644 --- a/services/osparc-gateway-server/requirements/_tools.txt +++ b/services/osparc-gateway-server/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -17,9 +17,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -49,14 +49,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -68,9 +68,9 @@ pyyaml==6.0.1 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt -setuptools==73.0.1 +setuptools==75.1.0 # via pip-tools tomlkit==0.13.2 # via pylint @@ -79,9 +79,9 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit -watchdog==4.0.2 +watchdog==5.0.2 # via -r requirements/_tools.in wheel==0.44.0 # via pip-tools diff --git a/services/osparc-gateway-server/tests/system/requirements/_test.txt b/services/osparc-gateway-server/tests/system/requirements/_test.txt index f2a90715442..410339df3c6 100644 --- a/services/osparc-gateway-server/tests/system/requirements/_test.txt +++ b/services/osparc-gateway-server/tests/system/requirements/_test.txt @@ -1,4 +1,4 @@ -aiodocker==0.22.2 +aiodocker==0.23.0 # via -r requirements/_test.in aiohappyeyeballs==2.4.0 # via aiohttp @@ -11,7 +11,7 @@ aiosignal==1.3.1 # via aiohttp attrs==24.2.0 # via aiohttp -certifi==2024.7.4 +certifi==2024.8.30 # via # -c requirements/../../../../../requirements/constraints.txt # requests @@ -43,7 +43,7 @@ distributed==2024.5.1 # dask-gateway docker==7.1.0 # via -r requirements/_test.in -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in frozenlist==1.4.1 # via @@ -55,7 +55,7 @@ fsspec==2024.5.0 # dask icdiff==2.0.7 # via pytest-icdiff -idna==3.7 +idna==3.10 # via # requests # yarl @@ -87,7 +87,7 @@ msgpack==1.0.8 # via # -c requirements/../../../../dask-sidecar/requirements/_dask-distributed.txt # distributed -multidict==6.0.5 +multidict==6.1.0 # via # aiohttp # yarl @@ -114,7 +114,7 @@ psutil==6.0.0 # via # -c requirements/../../../../dask-sidecar/requirements/_dask-distributed.txt # distributed -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -182,7 +182,7 @@ urllib3==2.2.1 # distributed # docker # requests -yarl==1.9.4 +yarl==1.12.1 # via aiohttp zict==3.0.0 # via diff --git a/services/osparc-gateway-server/tests/system/requirements/_tools.txt b/services/osparc-gateway-server/tests/system/requirements/_tools.txt index ba908e304bb..ce5d53160d5 100644 --- a/services/osparc-gateway-server/tests/system/requirements/_tools.txt +++ b/services/osparc-gateway-server/tests/system/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../../../requirements/devenv.txt @@ -17,9 +17,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via -r requirements/../../../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -46,14 +46,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -64,15 +64,15 @@ pyyaml==6.0.1 # -c requirements/../../../../../requirements/constraints.txt # -c requirements/_test.txt # pre-commit -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../../../requirements/devenv.txt -setuptools==73.0.1 +setuptools==75.1.0 # via pip-tools tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit wheel==0.44.0 # via pip-tools diff --git a/services/payments/requirements/_test.txt b/services/payments/requirements/_test.txt index 30290658b18..4a42c0d9ccf 100644 --- a/services/payments/requirements/_test.txt +++ b/services/payments/requirements/_test.txt @@ -42,7 +42,7 @@ coverage==7.6.1 # pytest-cov docker==7.1.0 # via -r requirements/_test.in -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in frozenlist==1.4.1 # via @@ -85,7 +85,7 @@ multidict==6.0.5 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.11.1 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -98,7 +98,7 @@ pluggy==1.5.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -167,11 +167,11 @@ termcolor==2.4.0 # via pytest-sugar types-aiofiles==24.1.0.20240626 # via -r requirements/_test.in -types-pyasn1==0.6.0.20240402 +types-pyasn1==0.6.0.20240913 # via types-python-jose types-python-jose==3.3.4.20240106 # via -r requirements/_test.in -types-pyyaml==6.0.12.20240808 +types-pyyaml==6.0.12.20240917 # via -r requirements/_test.in typing-extensions==4.12.2 # via diff --git a/services/payments/requirements/_tools.txt b/services/payments/requirements/_tools.txt index a3199931bff..5ac982ce79c 100644 --- a/services/payments/requirements/_tools.txt +++ b/services/payments/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -17,9 +17,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -50,14 +50,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -68,7 +68,7 @@ pyyaml==6.0.1 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # pre-commit -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt setuptools==74.0.0 # via @@ -81,7 +81,7 @@ typing-extensions==4.12.2 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit wheel==0.44.0 # via pip-tools diff --git a/services/payments/tests/unit/api/test__one_time_payment_workflows.py b/services/payments/tests/unit/api/test__one_time_payment_workflows.py index 2052dee31f6..753432ac6d6 100644 --- a/services/payments/tests/unit/api/test__one_time_payment_workflows.py +++ b/services/payments/tests/unit/api/test__one_time_payment_workflows.py @@ -1,7 +1,8 @@ # pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable -# pylint: disable=too-many-arguments import httpx diff --git a/services/payments/tests/unit/api/test__payment_method_workflows.py b/services/payments/tests/unit/api/test__payment_method_workflows.py index 5b92bee8b17..76640384f7b 100644 --- a/services/payments/tests/unit/api/test__payment_method_workflows.py +++ b/services/payments/tests/unit/api/test__payment_method_workflows.py @@ -1,7 +1,8 @@ # pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable -# pylint: disable=too-many-arguments import httpx diff --git a/services/payments/tests/unit/test_db_payments_users_repo.py b/services/payments/tests/unit/test_db_payments_users_repo.py index a695af251c4..51d5f540c6b 100644 --- a/services/payments/tests/unit/test_db_payments_users_repo.py +++ b/services/payments/tests/unit/test_db_payments_users_repo.py @@ -59,7 +59,7 @@ async def user( injects a user in db """ assert user_id == user["id"] - async with insert_and_get_row_lifespan( + async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup get_engine(app), table=users, values=user, @@ -84,7 +84,7 @@ async def product( """ # NOTE: this fixture ignores products' group-id but it is fine for this test context assert product["group_id"] is None - async with insert_and_get_row_lifespan( + async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup get_engine(app), table=products, values=product, @@ -101,7 +101,7 @@ async def successful_transaction( """ injects transaction in db """ - async with insert_and_get_row_lifespan( + async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup get_engine(app), table=payments_transactions, values=successful_transaction, diff --git a/services/payments/tests/unit/test_rpc_payments_methods.py b/services/payments/tests/unit/test_rpc_payments_methods.py index 9ecb10d9976..ef60bfa6c42 100644 --- a/services/payments/tests/unit/test_rpc_payments_methods.py +++ b/services/payments/tests/unit/test_rpc_payments_methods.py @@ -1,6 +1,7 @@ # pylint: disable=protected-access # pylint: disable=redefined-outer-name # pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable diff --git a/services/payments/tests/unit/test_services_payments.py b/services/payments/tests/unit/test_services_payments.py index 94452d9c772..4cb484aafbb 100644 --- a/services/payments/tests/unit/test_services_payments.py +++ b/services/payments/tests/unit/test_services_payments.py @@ -1,6 +1,7 @@ # pylint: disable=protected-access # pylint: disable=redefined-outer-name # pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable diff --git a/services/resource-usage-tracker/requirements/_test.txt b/services/resource-usage-tracker/requirements/_test.txt index bfc96422668..d1c36a7f469 100644 --- a/services/resource-usage-tracker/requirements/_test.txt +++ b/services/resource-usage-tracker/requirements/_test.txt @@ -40,9 +40,9 @@ certifi==2024.2.2 # httpcore # httpx # requests -cffi==1.17.0 +cffi==1.17.1 # via cryptography -cfn-lint==1.10.3 +cfn-lint==1.15.0 # via moto charset-normalizer==3.3.2 # via @@ -56,7 +56,7 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -cryptography==43.0.0 +cryptography==43.0.1 # via # -c requirements/../../../requirements/constraints.txt # joserfc @@ -65,17 +65,17 @@ docker==7.1.0 # via # -r requirements/_test.in # moto -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in -fakeredis==2.23.5 +fakeredis==2.24.1 # via -r requirements/_test.in flask==3.0.3 # via # flask-cors # moto -flask-cors==4.0.1 +flask-cors==5.0.0 # via moto -graphql-core==3.2.3 +graphql-core==3.2.4 # via moto greenlet==3.0.3 # via @@ -117,7 +117,7 @@ jmespath==1.0.1 # botocore joserfc==1.0.0 # via moto -jsondiff==2.2.0 +jsondiff==2.2.1 # via moto jsonpatch==1.33 # via cfn-lint @@ -153,11 +153,11 @@ markupsafe==2.1.5 # jinja2 # mako # werkzeug -moto==5.0.13 +moto==5.0.15 # via -r requirements/_test.in mpmath==1.3.0 # via sympy -mypy==1.11.1 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -178,7 +178,7 @@ pluggy==1.5.0 # via pytest ply==3.11 # via jsonpath-ng -py-partiql-parser==0.5.5 +py-partiql-parser==0.5.6 # via moto pycparser==2.22 # via cffi @@ -191,7 +191,7 @@ pyparsing==3.1.2 # via # -c requirements/_base.txt # moto -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -294,11 +294,11 @@ sqlalchemy==1.4.52 # alembic sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy -sympy==1.13.2 +sympy==1.13.3 # via cfn-lint termcolor==2.4.0 # via pytest-sugar -types-requests==2.32.0.20240712 +types-requests==2.32.0.20240914 # via -r requirements/_test.in typing-extensions==4.10.0 # via @@ -318,7 +318,7 @@ urllib3==2.0.7 # requests # responses # types-requests -werkzeug==3.0.3 +werkzeug==3.0.4 # via # flask # moto diff --git a/services/resource-usage-tracker/requirements/_tools.txt b/services/resource-usage-tracker/requirements/_tools.txt index 4be35ba3257..44759acdfd4 100644 --- a/services/resource-usage-tracker/requirements/_tools.txt +++ b/services/resource-usage-tracker/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -18,9 +18,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -51,14 +51,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -71,7 +71,7 @@ pyyaml==6.0.1 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt setuptools==74.0.0 # via @@ -85,9 +85,9 @@ typing-extensions==4.10.0 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit -watchdog==4.0.2 +watchdog==5.0.2 # via -r requirements/_tools.in wheel==0.44.0 # via pip-tools diff --git a/services/storage/requirements/_test.txt b/services/storage/requirements/_test.txt index b243f8f4428..c1abbffd545 100644 --- a/services/storage/requirements/_test.txt +++ b/services/storage/requirements/_test.txt @@ -43,9 +43,9 @@ certifi==2024.2.2 # -c requirements/_base.txt # requests # simcore-service-storage-sdk -cffi==1.17.0 +cffi==1.17.1 # via cryptography -cfn-lint==1.10.3 +cfn-lint==1.15.0 # via moto charset-normalizer==3.3.2 # via @@ -59,7 +59,7 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -cryptography==43.0.0 +cryptography==43.0.1 # via # -c requirements/../../../requirements/constraints.txt # joserfc @@ -68,22 +68,22 @@ docker==7.1.0 # via # -r requirements/_test.in # moto -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in -fakeredis==2.23.5 +fakeredis==2.24.1 # via -r requirements/_test.in flask==3.0.3 # via # flask-cors # moto -flask-cors==4.0.1 +flask-cors==5.0.0 # via moto frozenlist==1.4.1 # via # -c requirements/_base.txt # aiohttp # aiosignal -graphql-core==3.2.3 +graphql-core==3.2.4 # via moto greenlet==3.0.3 # via @@ -113,7 +113,7 @@ jmespath==1.0.1 # botocore joserfc==1.0.0 # via moto -jsondiff==2.2.0 +jsondiff==2.2.1 # via moto jsonpatch==1.33 # via cfn-lint @@ -147,7 +147,7 @@ markupsafe==2.1.5 # -c requirements/_base.txt # jinja2 # werkzeug -moto==5.0.13 +moto==5.0.15 # via -r requirements/_test.in mpmath==1.3.0 # via sympy @@ -156,13 +156,13 @@ multidict==6.0.5 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.11.1 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy networkx==3.3 # via cfn-lint -numpy==2.1.0 +numpy==2.1.1 # via pandas openapi-schema-validator==0.6.2 # via @@ -177,7 +177,7 @@ packaging==24.0 # -c requirements/_base.txt # pytest # pytest-sugar -pandas==2.2.2 +pandas==2.2.3 # via -r requirements/_test.in pathable==0.4.3 # via @@ -189,7 +189,7 @@ ply==3.11 # via jsonpath-ng pprintpp==0.4.0 # via pytest-icdiff -py-partiql-parser==0.5.5 +py-partiql-parser==0.5.6 # via moto pycparser==2.22 # via cffi @@ -198,9 +198,9 @@ pydantic==1.10.14 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator -pyparsing==3.1.2 +pyparsing==3.1.4 # via moto -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-aiohttp @@ -239,7 +239,7 @@ python-dateutil==2.9.0.post0 # simcore-service-storage-sdk python-dotenv==1.0.1 # via -r requirements/_test.in -pytz==2024.1 +pytz==2024.2 # via pandas pyyaml==6.0.1 # via @@ -261,7 +261,7 @@ referencing==0.29.3 # jsonschema # jsonschema-path # jsonschema-specifications -regex==2024.7.24 +regex==2024.9.11 # via cfn-lint requests==2.32.2 # via @@ -306,7 +306,7 @@ sqlalchemy==1.4.52 # -r requirements/_test.in sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy -sympy==1.13.2 +sympy==1.13.3 # via cfn-lint termcolor==2.4.0 # via pytest-sugar @@ -320,7 +320,7 @@ typing-extensions==4.10.0 # mypy # pydantic # sqlalchemy2-stubs -tzdata==2024.1 +tzdata==2024.2 # via pandas urllib3==2.0.7 # via diff --git a/services/storage/requirements/_tools.txt b/services/storage/requirements/_tools.txt index 4be35ba3257..44759acdfd4 100644 --- a/services/storage/requirements/_tools.txt +++ b/services/storage/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -18,9 +18,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -51,14 +51,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -71,7 +71,7 @@ pyyaml==6.0.1 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt setuptools==74.0.0 # via @@ -85,9 +85,9 @@ typing-extensions==4.10.0 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit -watchdog==4.0.2 +watchdog==5.0.2 # via -r requirements/_tools.in wheel==0.44.0 # via pip-tools diff --git a/services/storage/tests/unit/test_dsm_dsmcleaner.py b/services/storage/tests/unit/test_dsm_dsmcleaner.py index fd759ca1bf2..60f4f7f57a9 100644 --- a/services/storage/tests/unit/test_dsm_dsmcleaner.py +++ b/services/storage/tests/unit/test_dsm_dsmcleaner.py @@ -1,10 +1,11 @@ -# pylint: disable=unused-variable -# pylint: disable=unused-argument +# pylint: disable=no-member +# pylint: disable=no-name-in-module # pylint: disable=redefined-outer-name # pylint: disable=too-many-arguments -# pylint: disable=no-name-in-module -# pylint: disable=no-member # pylint: disable=too-many-branches +# pylint: disable=too-many-positional-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable import asyncio import datetime diff --git a/services/storage/tests/unit/test_handlers_files.py b/services/storage/tests/unit/test_handlers_files.py index 21e3bac8c3d..5623c7e67c2 100644 --- a/services/storage/tests/unit/test_handlers_files.py +++ b/services/storage/tests/unit/test_handlers_files.py @@ -1,9 +1,10 @@ -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name -# pylint:disable=too-many-arguments # pylint:disable=no-name-in-module # pylint:disable=protected-access +# pylint:disable=redefined-outer-name +# pylint:disable=too-many-arguments +# pylint:disable=too-many-positional-arguments +# pylint:disable=unused-argument +# pylint:disable=unused-variable import asyncio import filecmp diff --git a/services/web/server/requirements/_test.txt b/services/web/server/requirements/_test.txt index 046d1efecba..1b8cff63723 100644 --- a/services/web/server/requirements/_test.txt +++ b/services/web/server/requirements/_test.txt @@ -102,7 +102,7 @@ multidict==6.0.2 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.11.1 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -130,7 +130,7 @@ pyrsistent==0.18.1 # -c requirements/_base.txt # jsonschema # referencing -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-aiohttp @@ -220,11 +220,11 @@ types-aiofiles==24.1.0.20240626 # via -r requirements/_test.in types-jsonschema==4.23.0.20240813 # via -r requirements/_test.in -types-openpyxl==3.1.5.20240819 +types-openpyxl==3.1.5.20240918 # via -r requirements/_test.in types-passlib==1.7.7.20240819 # via -r requirements/_test.in -types-pyyaml==6.0.12.20240808 +types-pyyaml==6.0.12.20240917 # via -r requirements/_test.in typing-extensions==4.12.0 # via @@ -238,7 +238,7 @@ urllib3==1.26.11 # -c requirements/_base.txt # docker # requests -websockets==13.0 +websockets==13.1 # via -r requirements/_test.in yarl==1.9.4 # via diff --git a/services/web/server/requirements/_tools.txt b/services/web/server/requirements/_tools.txt index bad09bbc690..6aee7017f62 100644 --- a/services/web/server/requirements/_tools.txt +++ b/services/web/server/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../../requirements/devenv.txt @@ -18,9 +18,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit inotify==0.2.10 # via -r requirements/_tools.in @@ -30,7 +30,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via # -c requirements/_test.txt # -r requirements/../../../../requirements/devenv.txt @@ -55,14 +55,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -74,7 +74,7 @@ pyyaml==6.0.1 # -c requirements/_base.txt # -c requirements/_test.txt # pre-commit -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../../requirements/devenv.txt setuptools==69.1.1 # via @@ -90,7 +90,7 @@ typing-extensions==4.12.0 # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit wheel==0.44.0 # via pip-tools diff --git a/services/web/server/src/simcore_service_webserver/catalog/_models.py b/services/web/server/src/simcore_service_webserver/catalog/_models.py index a3803b04d08..4e2f4147e48 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_models.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_models.py @@ -12,7 +12,7 @@ ) from models_library.services import BaseServiceIOModel from pint import PintError, UnitRegistry -from pint.quantity import Quantity +from pint.quantity import Quantity # pylint:disable=no-name-in-module _logger = logging.getLogger(__name__) diff --git a/services/web/server/tests/unit/isolated/test_garbage_collector_core.py b/services/web/server/tests/unit/isolated/test_garbage_collector_core.py index 7f686a44292..0d84fbb534c 100644 --- a/services/web/server/tests/unit/isolated/test_garbage_collector_core.py +++ b/services/web/server/tests/unit/isolated/test_garbage_collector_core.py @@ -1,6 +1,7 @@ # pylint: disable=redefined-outer-name -# pylint: disable=unused-argument # pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments +# pylint: disable=unused-argument from collections.abc import Callable from typing import Final diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py index b177bcc16e0..956e0d415e2 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py @@ -1,7 +1,8 @@ # pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable -# pylint: disable=too-many-arguments from collections.abc import Callable, Iterator diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py index d34adace8ae..910d94f06c0 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py @@ -1,5 +1,6 @@ # pylint: disable=redefined-outer-name # pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments # pylint: disable=too-many-statements # pylint: disable=unused-argument # pylint: disable=unused-variable diff --git a/services/web/server/tests/unit/with_dbs/03/garbage_collector/test_resource_manager.py b/services/web/server/tests/unit/with_dbs/03/garbage_collector/test_resource_manager.py index 0f736a758b7..d1d6bcc73ee 100644 --- a/services/web/server/tests/unit/with_dbs/03/garbage_collector/test_resource_manager.py +++ b/services/web/server/tests/unit/with_dbs/03/garbage_collector/test_resource_manager.py @@ -1,7 +1,8 @@ # pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable -# pylint: disable=too-many-arguments import asyncio diff --git a/tests/e2e-playwright/requirements/_test.txt b/tests/e2e-playwright/requirements/_test.txt index 0f53acd4cf2..2934b76a3a8 100644 --- a/tests/e2e-playwright/requirements/_test.txt +++ b/tests/e2e-playwright/requirements/_test.txt @@ -2,7 +2,7 @@ annotated-types==0.7.0 # via pydantic arrow==1.3.0 # via -r requirements/_test.in -certifi==2024.7.4 +certifi==2024.8.30 # via requests charset-normalizer==3.3.2 # via requests @@ -12,11 +12,11 @@ docker==7.1.0 # via -r requirements/_test.in email-validator==2.2.0 # via pydantic -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in greenlet==3.0.3 # via playwright -idna==3.7 +idna==3.10 # via # email-validator # requests @@ -30,17 +30,17 @@ packaging==24.1 # via # pytest # pytest-sugar -playwright==1.46.0 +playwright==1.47.0 # via pytest-playwright pluggy==1.5.0 # via pytest -pydantic==2.8.2 +pydantic==2.9.2 # via -r requirements/_test.in -pydantic-core==2.20.1 +pydantic-core==2.23.4 # via pydantic -pyee==11.1.0 +pyee==12.0.0 # via playwright -pytest==8.3.2 +pytest==8.3.3 # via # pytest-base-url # pytest-html @@ -56,7 +56,7 @@ pytest-instafail==0.5.0 # via -r requirements/_test.in pytest-metadata==3.1.1 # via pytest-html -pytest-playwright==0.5.1 +pytest-playwright==0.5.2 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -82,14 +82,14 @@ termcolor==2.4.0 # via pytest-sugar text-unidecode==1.3 # via python-slugify -types-python-dateutil==2.9.0.20240821 +types-python-dateutil==2.9.0.20240906 # via arrow typing-extensions==4.12.2 # via # pydantic # pydantic-core # pyee -urllib3==2.2.2 +urllib3==2.2.3 # via # docker # requests diff --git a/tests/e2e-playwright/requirements/_tools.txt b/tests/e2e-playwright/requirements/_tools.txt index d41516afc10..cd07b666b72 100644 --- a/tests/e2e-playwright/requirements/_tools.txt +++ b/tests/e2e-playwright/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -16,9 +16,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -26,7 +26,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -45,14 +45,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -63,9 +63,9 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_test.txt # pre-commit -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt -setuptools==73.0.1 +setuptools==75.1.0 # via pip-tools tomlkit==0.13.2 # via pylint @@ -73,7 +73,7 @@ typing-extensions==4.12.2 # via # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit wheel==0.44.0 # via pip-tools diff --git a/tests/e2e/requirements/requirements.txt b/tests/e2e/requirements/requirements.txt index 4ab33ad3c77..9a3d2156703 100644 --- a/tests/e2e/requirements/requirements.txt +++ b/tests/e2e/requirements/requirements.txt @@ -1,4 +1,4 @@ -certifi==2024.7.4 +certifi==2024.8.30 # via # -c requirements/../../../requirements/constraints.txt # requests @@ -6,7 +6,7 @@ charset-normalizer==3.3.2 # via requests docker==7.1.0 # via -r requirements/requirements.in -idna==3.7 +idna==3.10 # via requests pyyaml==6.0.2 # via @@ -16,7 +16,7 @@ requests==2.32.3 # via docker tenacity==9.0.0 # via -r requirements/requirements.in -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt # docker diff --git a/tests/environment-setup/requirements/requirements.txt b/tests/environment-setup/requirements/requirements.txt index 2fb5a2fc83e..bc2672270a7 100644 --- a/tests/environment-setup/requirements/requirements.txt +++ b/tests/environment-setup/requirements/requirements.txt @@ -6,7 +6,7 @@ packaging==24.1 # pytest-sugar pluggy==1.5.0 # via pytest -pydantic==1.10.17 +pydantic==1.10.18 # via # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -16,7 +16,7 @@ pydantic==1.10.17 # -c requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../requirements/constraints.txt # -r requirements/requirements.in -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/requirements.in # pytest-asyncio diff --git a/tests/public-api/requirements/_base.txt b/tests/public-api/requirements/_base.txt index ed5d6c5a1ca..ef23afb0200 100644 --- a/tests/public-api/requirements/_base.txt +++ b/tests/public-api/requirements/_base.txt @@ -1,6 +1,6 @@ -anyio==4.4.0 +anyio==4.6.0 # via httpx -certifi==2024.7.4 +certifi==2024.8.30 # via # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # httpcore @@ -12,11 +12,11 @@ h11==0.14.0 # via httpcore httpcore==1.0.5 # via httpx -httpx==0.27.0 +httpx==0.27.2 # via # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # osparc -idna==3.7 +idna==3.10 # via # anyio # httpx @@ -32,7 +32,7 @@ osparc-client==0.6.6 # via osparc packaging==24.1 # via osparc -pydantic==1.10.17 +pydantic==1.10.18 # via # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -40,7 +40,7 @@ pygments==2.18.0 # via rich python-dateutil==2.9.0.post0 # via osparc-client -rich==13.7.1 +rich==13.8.1 # via # -r requirements/../../../packages/settings-library/requirements/_base.in # typer @@ -58,13 +58,13 @@ tenacity==9.0.0 # via osparc tqdm==4.66.5 # via osparc -typer==0.12.4 +typer==0.12.5 # via -r requirements/../../../packages/settings-library/requirements/_base.in typing-extensions==4.12.2 # via # pydantic # typer -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # osparc-client diff --git a/tests/public-api/requirements/_test.txt b/tests/public-api/requirements/_test.txt index d0498fe06e2..67ded32267c 100644 --- a/tests/public-api/requirements/_test.txt +++ b/tests/public-api/requirements/_test.txt @@ -1,4 +1,4 @@ -aiodocker==0.22.2 +aiodocker==0.23.0 # via -r requirements/_test.in aiohappyeyeballs==2.4.0 # via aiohttp @@ -9,14 +9,14 @@ aiohttp==3.10.5 # aiodocker aiosignal==1.3.1 # via aiohttp -anyio==4.4.0 +anyio==4.6.0 # via httpx attrs==24.2.0 # via # aiohttp # jsonschema # referencing -certifi==2024.7.4 +certifi==2024.8.30 # via # -c requirements/../../../requirements/constraints.txt # httpcore @@ -26,7 +26,7 @@ charset-normalizer==3.3.2 # via requests docker==7.1.0 # via -r requirements/_test.in -faker==27.0.0 +faker==29.0.0 # via -r requirements/_test.in frozenlist==1.4.1 # via @@ -36,11 +36,11 @@ h11==0.14.0 # via httpcore httpcore==1.0.5 # via httpx -httpx==0.27.0 +httpx==0.27.2 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_test.in -idna==3.7 +idna==3.10 # via # anyio # httpx @@ -52,7 +52,7 @@ jsonschema==4.23.0 # via -r requirements/_test.in jsonschema-specifications==2023.12.1 # via jsonschema -multidict==6.0.5 +multidict==6.1.0 # via # aiohttp # yarl @@ -60,7 +60,7 @@ packaging==24.1 # via pytest pluggy==1.5.0 # via pytest -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -94,10 +94,10 @@ sniffio==1.3.1 # httpx tenacity==9.0.0 # via -r requirements/_test.in -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt # docker # requests -yarl==1.9.4 +yarl==1.12.1 # via aiohttp diff --git a/tests/public-api/requirements/_tools.txt b/tests/public-api/requirements/_tools.txt index 5f175f88fa6..346aa34ba59 100644 --- a/tests/public-api/requirements/_tools.txt +++ b/tests/public-api/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -17,9 +17,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -47,14 +47,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -65,9 +65,9 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_test.txt # pre-commit -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt -setuptools==73.0.1 +setuptools==75.1.0 # via pip-tools tomlkit==0.13.2 # via pylint @@ -75,7 +75,7 @@ typing-extensions==4.12.2 # via # -c requirements/_base.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit wheel==0.44.0 # via pip-tools diff --git a/tests/swarm-deploy/requirements/_test.txt b/tests/swarm-deploy/requirements/_test.txt index 22c0768cda4..432abdef719 100644 --- a/tests/swarm-deploy/requirements/_test.txt +++ b/tests/swarm-deploy/requirements/_test.txt @@ -12,7 +12,7 @@ aiodebug==2.3.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -aiodocker==0.22.2 +aiodocker==0.23.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -43,16 +43,16 @@ aiohttp==3.10.5 # aiodocker aiopg==1.4.0 # via -r requirements/../../../packages/simcore-sdk/requirements/_base.in -aiormq==6.8.0 +aiormq==6.8.1 # via aio-pika aiosignal==1.3.1 # via aiohttp -alembic==1.13.2 +alembic==1.13.3 # via # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/_test.in -anyio==4.4.0 +anyio==4.6.0 # via # fast-depends # faststream @@ -77,7 +77,7 @@ attrs==24.2.0 # aiohttp # jsonschema # referencing -certifi==2024.7.4 +certifi==2024.8.30 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -117,9 +117,9 @@ docker==7.1.0 # -r requirements/_test.in email-validator==2.2.0 # via pydantic -fast-depends==2.4.8 +fast-depends==2.4.11 # via faststream -faststream==0.5.18 +faststream==0.5.23 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -135,20 +135,20 @@ googleapis-common-protos==1.65.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -greenlet==3.0.3 +greenlet==3.1.1 # via # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # sqlalchemy -grpcio==1.66.0 +grpcio==1.66.1 # via opentelemetry-exporter-otlp-proto-grpc -idna==3.7 +idna==3.10 # via # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # anyio # email-validator # requests # yarl -importlib-metadata==8.0.0 +importlib-metadata==8.4.0 # via opentelemetry-api iniconfig==2.0.0 # via pytest @@ -186,11 +186,11 @@ markupsafe==2.1.5 # mako mdurl==0.1.2 # via markdown-it-py -multidict==6.0.5 +multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.26.0 +opentelemetry-api==1.27.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -200,40 +200,40 @@ opentelemetry-api==1.26.0 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.26.0 +opentelemetry-exporter-otlp==1.27.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.26.0 +opentelemetry-exporter-otlp-proto-common==1.27.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.26.0 +opentelemetry-exporter-otlp-proto-grpc==1.27.0 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.26.0 +opentelemetry-exporter-otlp-proto-http==1.27.0 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.47b0 +opentelemetry-instrumentation==0.48b0 # via opentelemetry-instrumentation-requests -opentelemetry-instrumentation-requests==0.47b0 +opentelemetry-instrumentation-requests==0.48b0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.26.0 +opentelemetry-proto==1.27.0 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.26.0 +opentelemetry-sdk==1.27.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.47b0 +opentelemetry-semantic-conventions==0.48b0 # via # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.47b0 +opentelemetry-util-http==0.48b0 # via opentelemetry-instrumentation-requests orjson==3.10.7 # via @@ -265,7 +265,7 @@ pint==0.24.3 # via -r requirements/../../../packages/simcore-sdk/requirements/_base.in pluggy==1.5.0 # via pytest -protobuf==4.25.4 +protobuf==4.25.5 # via # googleapis-common-protos # opentelemetry-proto @@ -277,7 +277,7 @@ psycopg2-binary==2.9.9 # via # aiopg # sqlalchemy -pydantic==1.10.17 +pydantic==1.10.18 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -307,11 +307,11 @@ pydantic==1.10.17 # fast-depends pygments==2.18.0 # via rich -pyinstrument==4.7.2 +pyinstrument==4.7.3 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/_test.in # pytest-asyncio @@ -392,7 +392,7 @@ requests==2.32.3 # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # docker # opentelemetry-exporter-otlp-proto-http -rich==13.7.1 +rich==13.8.1 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -403,7 +403,7 @@ rpds-py==0.20.0 # via # jsonschema # referencing -setuptools==74.0.0 +setuptools==75.1.0 # via opentelemetry-instrumentation shellingham==1.5.4 # via typer @@ -411,7 +411,7 @@ six==1.16.0 # via python-dateutil sniffio==1.3.1 # via anyio -sqlalchemy==1.4.53 +sqlalchemy==1.4.54 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -448,14 +448,13 @@ tqdm==4.66.5 # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in -typer==0.12.4 +typer==0.12.5 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in - # faststream -types-python-dateutil==2.9.0.20240821 +types-python-dateutil==2.9.0.20240906 # via arrow typing-extensions==4.12.2 # via @@ -469,7 +468,7 @@ typing-extensions==4.12.2 # pint # pydantic # typer -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -491,11 +490,11 @@ wrapt==1.16.0 # via # deprecated # opentelemetry-instrumentation -yarl==1.9.4 +yarl==1.12.1 # via # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # aio-pika # aiohttp # aiormq -zipp==3.20.1 +zipp==3.20.2 # via importlib-metadata diff --git a/tests/swarm-deploy/requirements/_tools.txt b/tests/swarm-deploy/requirements/_tools.txt index 1e239766dbd..9de76ca7790 100644 --- a/tests/swarm-deploy/requirements/_tools.txt +++ b/tests/swarm-deploy/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.2.4 +astroid==3.3.4 # via pylint black==24.8.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.1 +build==1.2.2 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -17,9 +17,9 @@ dill==0.3.8 # via pylint distlib==0.3.8 # via virtualenv -filelock==3.15.4 +filelock==3.16.1 # via virtualenv -identify==2.6.0 +identify==2.6.1 # via pre-commit isort==5.13.2 # via @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.1 +mypy==1.11.2 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via @@ -46,14 +46,14 @@ pip==24.2 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # pylint # virtualenv pre-commit==3.8.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.2.6 +pylint==3.3.0 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.1.0 # via @@ -65,9 +65,9 @@ pyyaml==6.0.2 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.6.1 +ruff==0.6.7 # via -r requirements/../../../requirements/devenv.txt -setuptools==74.0.0 +setuptools==75.1.0 # via # -c requirements/_test.txt # pip-tools @@ -77,9 +77,9 @@ typing-extensions==4.12.2 # via # -c requirements/_test.txt # mypy -virtualenv==20.26.3 +virtualenv==20.26.5 # via pre-commit -watchdog==4.0.2 +watchdog==5.0.2 # via -r requirements/_tools.in wheel==0.44.0 # via pip-tools From aaee3b25e95d3e921044a739b6e81f470dc9dae4 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 25 Sep 2024 14:54:28 +0200 Subject: [PATCH 028/104] =?UTF-8?q?=E2=9C=85=20fixes=20check=20on=20full?= =?UTF-8?q?=20version=20(#6445)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/e2e-playwright/tests/tip/test_ti_plan.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/tests/e2e-playwright/tests/tip/test_ti_plan.py b/tests/e2e-playwright/tests/tip/test_ti_plan.py index cac6bb5b56d..02db3952310 100644 --- a/tests/e2e-playwright/tests/tip/test_ti_plan.py +++ b/tests/e2e-playwright/tests/tip/test_ti_plan.py @@ -94,10 +94,15 @@ def test_classic_ti_plan( # noqa: PLR0915 create_tip_plan_from_dashboard: Callable[[str], dict[str, Any]], ): with log_context(logging.INFO, "Checking 'Access TIP' teaser"): - page.get_by_test_id("userMenuBtn").click() - page.get_by_test_id("userMenuAccessTIPBtn").click() - assert page.get_by_test_id("tipTeaserWindow").is_visible() - page.get_by_test_id("tipTeaserWindowCloseBtn").click() + if is_product_lite: + page.get_by_test_id("userMenuBtn").click() + page.get_by_test_id("userMenuAccessTIPBtn").click() + assert page.get_by_test_id("tipTeaserWindow").is_visible() + page.get_by_test_id("tipTeaserWindowCloseBtn").click() + else: + assert ( + page.get_by_test_id("userMenuBtn").count() == 0 + ), "full version should NOT have a teaser" # press + button project_data = create_tip_plan_from_dashboard("newTIPlanButton") From aa87f1701d97d67984733e40de2f17a004307fd1 Mon Sep 17 00:00:00 2001 From: Andrei Neagu <5694077+GitHK@users.noreply.github.com> Date: Wed, 25 Sep 2024 15:43:23 +0200 Subject: [PATCH 029/104] =?UTF-8?q?=F0=9F=90=9B=20Fixes=20flaky=20tests=20?= =?UTF-8?q?in=20service-library=20test=5Fdeferred=5Ftasks.py=20(#6439)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrei Neagu --- .../tests/deferred_tasks/test_deferred_tasks.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py b/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py index 0ea55a62eee..b14f72618ec 100644 --- a/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py +++ b/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py @@ -9,7 +9,7 @@ import random import sys from collections.abc import AsyncIterable, AsyncIterator, Awaitable, Callable -from contextlib import AbstractAsyncContextManager, AsyncExitStack +from contextlib import AbstractAsyncContextManager, AsyncExitStack, suppress from pathlib import Path from typing import Any, Protocol @@ -60,10 +60,12 @@ async def stop(self, *, graceful: bool = False): assert self.process is not None assert self.pid is not None - parent = psutil.Process(self.pid) - children = parent.children(recursive=True) - for child_pid in [child.pid for child in children]: - psutil.Process(child_pid).kill() + with suppress(psutil.NoSuchProcess): + parent = psutil.Process(self.pid) + children = parent.children(recursive=True) + for child_pid in [child.pid for child in children]: + with suppress(psutil.NoSuchProcess): + psutil.Process(child_pid).kill() self.process = None self.pid = None From 97ccf19784aa2fd33b5894f08d78ad29b4537e4f Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Wed, 25 Sep 2024 16:35:44 +0200 Subject: [PATCH 030/104] =?UTF-8?q?=F0=9F=90=9B=20[Frontend]=20Announcemen?= =?UTF-8?q?ts:=20allow=20in=20ribbon=20only=20(#6440)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../announcement/AnnouncementUIFactory.js | 38 +++++++++---------- .../source/class/osparc/auth/ui/LoginView.js | 3 +- 2 files changed, 18 insertions(+), 23 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/announcement/AnnouncementUIFactory.js b/services/static-webserver/client/source/class/osparc/announcement/AnnouncementUIFactory.js index ebef5735983..5972ad1e28c 100644 --- a/services/static-webserver/client/source/class/osparc/announcement/AnnouncementUIFactory.js +++ b/services/static-webserver/client/source/class/osparc/announcement/AnnouncementUIFactory.js @@ -30,7 +30,7 @@ qx.Class.define("osparc.announcement.AnnouncementUIFactory", { }, statics: { - createLoginAnnouncement: function(title, text) { + createLoginAnnouncement: function(title, description) { const loginAnnouncement = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)).set({ backgroundColor: "strong-main", alignX: "center", @@ -46,22 +46,20 @@ qx.Class.define("osparc.announcement.AnnouncementUIFactory", { const titleLabel = new qx.ui.basic.Label().set({ value: title, font: "text-16", - textColor: "white", alignX: "center", - rich: true, - wrap: true + textAlign: "center", + rich: true }); loginAnnouncement.add(titleLabel); } - if (text) { + if (description) { const descriptionLabel = new qx.ui.basic.Label().set({ - value: text, + value: description, font: "text-14", - textColor: "white", alignX: "center", - rich: true, - wrap: true + textAlign: "center", + rich: true }); loginAnnouncement.add(descriptionLabel); } @@ -75,16 +73,12 @@ qx.Class.define("osparc.announcement.AnnouncementUIFactory", { __isValid: function(widgetType) { const announcement = this.getAnnouncement(); - - const now = new Date(); - if ( - announcement && - announcement.getProducts().includes(osparc.product.Utils.getProductName()) && - announcement.getWidgets().includes(widgetType) && - now > announcement.getStart() && - now < announcement.getEnd() - ) { - return true; + if (announcement) { + const now = new Date(); + const validPeriod = now > announcement.getStart() && now < announcement.getEnd(); + const validProduct = announcement.getProducts().includes(osparc.product.Utils.getProductName()); + const validWidgetType = widgetType ? announcement.getWidgets().includes(widgetType) : true; + return validPeriod && validProduct && validWidgetType; } return false; }, @@ -124,8 +118,10 @@ qx.Class.define("osparc.announcement.AnnouncementUIFactory", { return; } - let text = announcement.getTitle() + ": "; - text += announcement.getDescription(); + let text = announcement.getTitle(); + if (announcement.getDescription()) { + text += ": " + announcement.getDescription(); + } const ribbonAnnouncement = this.__ribbonAnnouncement = new osparc.notification.RibbonNotification(text, "announcement", true); ribbonAnnouncement.announcementId = announcement.getId(); diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js b/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js index 1f86b5877f6..c2aefdde877 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js @@ -45,8 +45,7 @@ qx.Class.define("osparc.auth.ui.LoginView", { this.addAt(announcementUIFactory.createLoginAnnouncement(), 0); } else { announcementUIFactory.addListenerOnce("changeAnnouncement", e => { - const announcement = e.getData(); - if (announcement) { + if (announcementUIFactory.hasLoginAnnouncement()) { this.addAt(announcementUIFactory.createLoginAnnouncement(), 0); } }); From 76e96c7a3a0722d14fdeaa392365c1c155e6fe3b Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 <60785969+matusdrobuliak66@users.noreply.github.com> Date: Wed, 25 Sep 2024 17:32:06 +0200 Subject: [PATCH 031/104] =?UTF-8?q?=F0=9F=8E=A8=20improve=20error=20handli?= =?UTF-8?q?ng=20on=20pricing=20plans=20(#6436)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../catalog/_handlers.py | 1 + .../catalog/_handlers_errors.py | 2 ++ .../projects/_nodes_handlers.py | 2 ++ .../projects/_states_handlers.py | 9 ++++++++- .../resource_usage/_client.py | 15 +++++++++++---- .../resource_usage/errors.py | 9 +++++++++ 6 files changed, 33 insertions(+), 5 deletions(-) create mode 100644 services/web/server/src/simcore_service_webserver/resource_usage/errors.py diff --git a/services/web/server/src/simcore_service_webserver/catalog/_handlers.py b/services/web/server/src/simcore_service_webserver/catalog/_handlers.py index cfaafe30ab6..02e21f37e29 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_handlers.py @@ -370,6 +370,7 @@ async def get_service_resources(request: Request): ) @login_required @permission_required("services.catalog.*") +@_handlers_errors.reraise_catalog_exceptions_as_http_errors async def get_service_pricing_plan(request: Request): ctx = CatalogRequestContext.create(request) path_params = parse_request_path_parameters_as(ServicePathParams, request) diff --git a/services/web/server/src/simcore_service_webserver/catalog/_handlers_errors.py b/services/web/server/src/simcore_service_webserver/catalog/_handlers_errors.py index 53f64c98b0c..4a278cc95dc 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_handlers_errors.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_handlers_errors.py @@ -3,6 +3,7 @@ from aiohttp import web from servicelib.aiohttp.typing_extension import Handler +from ..resource_usage.errors import DefaultPricingPlanNotFoundError from .exceptions import ( CatalogForbiddenError, CatalogItemNotFoundError, @@ -19,6 +20,7 @@ async def _wrapper(request: web.Request) -> web.StreamResponse: except ( CatalogItemNotFoundError, + DefaultPricingPlanNotFoundError, DefaultPricingUnitForServiceNotFoundError, ) as exc: raise web.HTTPNotFound(reason=f"{exc}") from exc diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py index c7b1ad4629a..0302c8f7e32 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py @@ -68,6 +68,7 @@ from ..groups.exceptions import GroupNotFoundError from ..login.decorators import login_required from ..projects.api import has_user_project_access_rights +from ..resource_usage.errors import DefaultPricingPlanNotFoundError from ..security.decorators import permission_required from ..users.api import get_user_id_from_gid, get_user_role from ..users.exceptions import UserDefaultWalletNotFoundError @@ -101,6 +102,7 @@ async def wrapper(request: web.Request) -> web.StreamResponse: ProjectNotFoundError, NodeNotFoundError, UserDefaultWalletNotFoundError, + DefaultPricingPlanNotFoundError, DefaultPricingUnitNotFoundError, GroupNotFoundError, CatalogItemNotFoundError, diff --git a/services/web/server/src/simcore_service_webserver/projects/_states_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_states_handlers.py index cc0e5b7ef57..fe7c62960f0 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_states_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_states_handlers.py @@ -29,6 +29,7 @@ from ..login.decorators import login_required from ..notifications import project_logs from ..products.api import Product, get_current_product +from ..resource_usage.errors import DefaultPricingPlanNotFoundError from ..security.decorators import permission_required from ..users import api from ..users.exceptions import UserDefaultWalletNotFoundError @@ -37,6 +38,7 @@ from . import projects_api from ._common_models import ProjectPathParams, RequestContext from .exceptions import ( + DefaultPricingUnitNotFoundError, ProjectInvalidRightsError, ProjectNotFoundError, ProjectStartsTooManyDynamicNodesError, @@ -57,7 +59,12 @@ async def _wrapper(request: web.Request) -> web.StreamResponse: try: return await handler(request) - except (ProjectNotFoundError, UserDefaultWalletNotFoundError) as exc: + except ( + ProjectNotFoundError, + UserDefaultWalletNotFoundError, + DefaultPricingPlanNotFoundError, + DefaultPricingUnitNotFoundError, + ) as exc: raise web.HTTPNotFound(reason=f"{exc}") from exc except ProjectInvalidRightsError as exc: diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_client.py b/services/web/server/src/simcore_service_webserver/resource_usage/_client.py index 63763aa145b..eb616b5d209 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_client.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_client.py @@ -23,11 +23,13 @@ from models_library.users import UserID from models_library.wallets import WalletID from pydantic import NonNegativeInt, parse_obj_as +from servicelib.aiohttp import status from servicelib.aiohttp.client_session import get_client_session from settings_library.resource_usage_tracker import ResourceUsageTrackerSettings from yarl import URL from ._utils import handle_client_exceptions +from .errors import DefaultPricingPlanNotFoundError from .settings import get_plugin_settings _logger = logging.getLogger(__name__) @@ -95,10 +97,15 @@ async def get_default_service_pricing_plan( } ) with handle_client_exceptions(app) as session: - async with session.get(url) as response: - response.raise_for_status() - body: dict = await response.json() - return parse_obj_as(PricingPlanGet, body) + try: + async with session.get(url) as response: + response.raise_for_status() + body: dict = await response.json() + return parse_obj_as(PricingPlanGet, body) + except ClientResponseError as e: + if e.status == status.HTTP_404_NOT_FOUND: + raise DefaultPricingPlanNotFoundError from e + raise async def get_pricing_plan_unit( diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/errors.py b/services/web/server/src/simcore_service_webserver/resource_usage/errors.py new file mode 100644 index 00000000000..ccaddaf1a70 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/resource_usage/errors.py @@ -0,0 +1,9 @@ +from ..errors import WebServerBaseError + + +class ResourceUsageValueError(WebServerBaseError, ValueError): + ... + + +class DefaultPricingPlanNotFoundError(ResourceUsageValueError): + msg_template = "Default pricing plan not found" From 02a7908b8f236fd69236ce700f72979ce4e11fd7 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 25 Sep 2024 17:55:50 +0200 Subject: [PATCH 032/104] =?UTF-8?q?=F0=9F=90=9B[FrontEnd]=20Better=20error?= =?UTF-8?q?=20handling=20of=20invalid=20pre-registration=20data=20(#6438)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source/class/osparc/po/PreRegistration.js | 31 +++++++++++++++---- 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/po/PreRegistration.js b/services/static-webserver/client/source/class/osparc/po/PreRegistration.js index 2234de28c19..e9fb2cdb434 100644 --- a/services/static-webserver/client/source/class/osparc/po/PreRegistration.js +++ b/services/static-webserver/client/source/class/osparc/po/PreRegistration.js @@ -29,7 +29,9 @@ qx.Class.define("osparc.po.PreRegistration", { break; case "finding-status": control = new qx.ui.basic.Label(); - this._add(control); + this._add(control, { + rich: true + }); break; case "pre-registration-container": control = new qx.ui.container.Scroll(); @@ -76,11 +78,27 @@ qx.Class.define("osparc.po.PreRegistration", { } if (form.validate()) { submitBtn.setFetching(true); + + const flashErrorMsg = this.tr("Pre-Registration Failed. See details below"); const findingStatus = this.getChildControl("finding-status"); findingStatus.setValue(this.tr("Searching Pre-Registered users...")); - const params = { - data: JSON.parse(requestAccountData.getValue()) - }; + + let params; + try { + params = { + data: JSON.parse(requestAccountData.getValue()) + }; + } catch (err) { + console.error(err); + + const detailErrorMsg = `Error parsing Request Form JSON. ${err}`; + findingStatus.setValue(detailErrorMsg); + + osparc.FlashMessenger.logAs(flashErrorMsg, "ERROR"); + submitBtn.setFetching(false); + return + } + osparc.data.Resources.fetch("users", "preRegister", params) .then(data => { if (data.length) { @@ -91,9 +109,10 @@ qx.Class.define("osparc.po.PreRegistration", { this.__populatePreRegistrationLayout(data); }) .catch(err => { - findingStatus.setValue(this.tr("Error searching Pre-Registered users")); + const detailErrorMsg = this.tr(`Error during Pre-Registeristration: ${err.message}`) + findingStatus.setValue(detailErrorMsg); console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); + osparc.FlashMessenger.logAs(flashErrorMsg, "ERROR"); }) .finally(() => submitBtn.setFetching(false)); } From 6b7f9f8a0b52db8d631bae8b9d82bcab90d224b4 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Wed, 25 Sep 2024 20:42:48 +0200 Subject: [PATCH 033/104] e2e: fixes wrong sequence in classic tip (#6450) --- tests/e2e-playwright/tests/tip/test_ti_plan.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/e2e-playwright/tests/tip/test_ti_plan.py b/tests/e2e-playwright/tests/tip/test_ti_plan.py index 02db3952310..f6defd9d6e4 100644 --- a/tests/e2e-playwright/tests/tip/test_ti_plan.py +++ b/tests/e2e-playwright/tests/tip/test_ti_plan.py @@ -94,15 +94,19 @@ def test_classic_ti_plan( # noqa: PLR0915 create_tip_plan_from_dashboard: Callable[[str], dict[str, Any]], ): with log_context(logging.INFO, "Checking 'Access TIP' teaser"): + # click to open and expand + page.get_by_test_id("userMenuBtn").click() + if is_product_lite: - page.get_by_test_id("userMenuBtn").click() page.get_by_test_id("userMenuAccessTIPBtn").click() assert page.get_by_test_id("tipTeaserWindow").is_visible() page.get_by_test_id("tipTeaserWindowCloseBtn").click() else: assert ( - page.get_by_test_id("userMenuBtn").count() == 0 + page.get_by_test_id("userMenuAccessTIPBtn").count() == 0 ), "full version should NOT have a teaser" + # click to close + page.get_by_test_id("userMenuBtn").click() # press + button project_data = create_tip_plan_from_dashboard("newTIPlanButton") From e2a6bdcb91183259e16ee217f87276ea451dca41 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 <60785969+matusdrobuliak66@users.noreply.github.com> Date: Wed, 25 Sep 2024 20:53:08 +0200 Subject: [PATCH 034/104] =?UTF-8?q?=F0=9F=8E=A8=20improve=20tip=20e2e=20te?= =?UTF-8?q?st=20(#6448)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/pytest_simcore/helpers/playwright.py | 27 ++++++++++++++++--- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/playwright.py b/packages/pytest-simcore/src/pytest_simcore/helpers/playwright.py index 0225642cc4f..38d016db73f 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/playwright.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/playwright.py @@ -236,6 +236,14 @@ def __call__(self, message: str) -> bool: return False + def is_progress_succesfully_finished(self) -> bool: + return all( + round(progress, 1) == 1.0 for progress in self._current_progress.values() + ) + + def get_current_progress(self): + return self._current_progress.values() + def wait_for_pipeline_state( current_state: RunningState, @@ -327,10 +335,21 @@ def expected_service_running( with log_context(logging.INFO, msg="Waiting for node to run") as ctx: waiter = SocketIONodeProgressCompleteWaiter(node_id=node_id, logger=ctx.logger) service_running = ServiceRunning(iframe_locator=None) - with websocket.expect_event("framereceived", waiter, timeout=timeout): - if press_start_button: - _trigger_service_start(page, node_id) - yield service_running + + try: + with websocket.expect_event("framereceived", waiter, timeout=timeout): + if press_start_button: + _trigger_service_start(page, node_id) + except TimeoutError: + if waiter.is_progress_succesfully_finished() is False: + ctx.logger.warning( + "⚠️ Progress bar didn't receive 100 percent: %s ⚠️", # https://github.com/ITISFoundation/osparc-simcore/issues/6449 + waiter.get_current_progress(), + ) + else: + raise + + yield service_running service_running.iframe_locator = page.frame_locator( f'[osparc-test-id="iframe_{node_id}"]' From 32bedc7dba9882bcc82faf7827e44519da86f855 Mon Sep 17 00:00:00 2001 From: Andrei Neagu <5694077+GitHK@users.noreply.github.com> Date: Thu, 26 Sep 2024 09:03:32 +0200 Subject: [PATCH 035/104] =?UTF-8?q?=F0=9F=90=9B=20Fixes=20issue=20with=20p?= =?UTF-8?q?arsing=20logs=20polluting=20sidecar=20logs=20(#6442)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrei Neagu --- .../node_ports_common/r_clone_utils.py | 1 + .../test_node_ports_common_r_clone_utils.py | 75 +++++++++++++++++++ 2 files changed, 76 insertions(+) create mode 100644 packages/simcore-sdk/tests/unit/test_node_ports_common_r_clone_utils.py diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py index f539e451026..fa34b0426a0 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py @@ -44,6 +44,7 @@ class _RCloneSyncTransferringMessage(_RCloneSyncMessageBase): _RCloneSyncTransferCompletedMessage, _RCloneSyncUpdatedMessage, _RCloneSyncTransferringMessage, + _RCloneSyncMessageBase, ] diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_common_r_clone_utils.py b/packages/simcore-sdk/tests/unit/test_node_ports_common_r_clone_utils.py new file mode 100644 index 00000000000..13d2bed6042 --- /dev/null +++ b/packages/simcore-sdk/tests/unit/test_node_ports_common_r_clone_utils.py @@ -0,0 +1,75 @@ +import json +from unittest.mock import AsyncMock + +import pytest +from pydantic import parse_raw_as +from simcore_sdk.node_ports_common.r_clone_utils import ( + SyncProgressLogParser, + _RCloneSyncMessageBase, + _RCloneSyncMessages, + _RCloneSyncTransferCompletedMessage, + _RCloneSyncTransferringMessage, + _RCloneSyncUpdatedMessage, +) + + +@pytest.mark.parametrize( + "log_message,expected", + [ + ( + '{"level":"info","msg":"There was nothing to transfer","source":"sync/sync.go:954","time":"2024-09-25T10:18:04.904537+00:00"}', + _RCloneSyncMessageBase, + ), + ( + '{"level":"info","msg":"","object":".hidden_do_not_remove","objectType":"*s3.Object","source":"operations/operations.go:277","time":"2024-09-24T07:11:22.147117+00:00"}', + _RCloneSyncUpdatedMessage, + ), + ( + '{"level":"info","msg":"Copied (new)","object":"README.ipynb","objectType":"*s3.Object","size":5123,"source":"operations/copy.go:360","time":"2024-04-23T14:05:10.408277+00:00"}', + _RCloneSyncTransferCompletedMessage, + ), + ( + json.dumps( + { + "level": "", + "msg": "", + "source": "", + "time": "2024-09-24T07:11:22.147117+00:00", + "object": "str", + } + ), + _RCloneSyncUpdatedMessage, + ), + ( + json.dumps( + { + "level": "", + "msg": "", + "source": "", + "time": "2024-09-24T07:11:22.147117+00:00", + "object": "str", + "size": 1, + } + ), + _RCloneSyncTransferCompletedMessage, + ), + ( + json.dumps( + { + "level": "", + "msg": "", + "source": "", + "time": "2024-09-24T07:11:22.147117+00:00", + "stats": {"bytes": 1, "totalBytes": 1}, + } + ), + _RCloneSyncTransferringMessage, + ), + ], +) +async def test_rclone_stbc_message_parsing_regression(log_message: str, expected: type): + parsed_log = parse_raw_as(_RCloneSyncMessages, log_message) # type: ignore[arg-type] + assert isinstance(parsed_log, expected) + + progress_log_parser = SyncProgressLogParser(AsyncMock()) + await progress_log_parser(log_message) From bd45d6b9e0e3419dda77da59bf330356f8228276 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Thu, 26 Sep 2024 09:33:16 +0200 Subject: [PATCH 036/104] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20TIP:=20Rebr?= =?UTF-8?q?anding=20(#6446)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- services/static-webserver/client/compile.json | 4 ++-- .../static-webserver/client/scripts/post-compile.py | 2 +- .../static-webserver/client/source/boot/index.html | 8 ++++---- .../client/source/class/osparc/dashboard/NewStudies.js | 9 ++++++++- .../class/osparc/product/quickStart/tis/S4LPostPro.js | 3 ++- .../client/source/class/osparc/utils/Utils.js | 4 ++++ .../client/source/resource/osparc/new_studies.json | 10 +++++----- 7 files changed, 26 insertions(+), 14 deletions(-) diff --git a/services/static-webserver/client/compile.json b/services/static-webserver/client/compile.json index f6cce359eed..780532e3cf0 100644 --- a/services/static-webserver/client/compile.json +++ b/services/static-webserver/client/compile.json @@ -136,7 +136,7 @@ "class": "osparc.Application", "theme": "osparc.theme.products.tis.ThemeDark", "name": "tis", - "title": "TI Plan - IT'IS", + "title": "TIP V3.0 - IT'IS", "include": [ "iconfont.material.Load", "iconfont.fontawesome5.Load", @@ -152,7 +152,7 @@ "class": "osparc.Application", "theme": "osparc.theme.products.tis.ThemeDark", "name": "tiplite", - "title": "TI Plan lite - IT'IS", + "title": "TIP.lite - IT'IS", "include": [ "iconfont.material.Load", "iconfont.fontawesome5.Load", diff --git a/services/static-webserver/client/scripts/post-compile.py b/services/static-webserver/client/scripts/post-compile.py index 6c532c83cee..afc1cd5033a 100644 --- a/services/static-webserver/client/scripts/post-compile.py +++ b/services/static-webserver/client/scripts/post-compile.py @@ -31,7 +31,7 @@ def update_apps_metadata(): replacements = i.get("replacements") for key in replacements: replace_text = replacements[key] - data = data.replace(key, replace_text) + data = data.replace("${"+key+"}", replace_text) with open(filename, "w") as file: print(f"Updating app metadata: {filename}") file.write(data) diff --git a/services/static-webserver/client/source/boot/index.html b/services/static-webserver/client/source/boot/index.html index 48b35c48db9..aa9e202a085 100644 --- a/services/static-webserver/client/source/boot/index.html +++ b/services/static-webserver/client/source/boot/index.html @@ -41,9 +41,9 @@ - - - + + + ${appTitle} - +