diff --git a/.env-devel b/.env-devel
index ab569ad0502..16839a5d0b2 100644
--- a/.env-devel
+++ b/.env-devel
@@ -17,10 +17,12 @@ AGENT_VOLUMES_CLEANUP_S3_ENDPOINT=http://172.17.0.1:9001
AGENT_VOLUMES_CLEANUP_S3_PROVIDER=MINIO
AGENT_VOLUMES_CLEANUP_S3_REGION=us-east-1
AGENT_VOLUMES_CLEANUP_S3_SECRET_KEY=12345678
+AGENT_TRACING={}
API_SERVER_DEV_FEATURES_ENABLED=0
API_SERVER_LOGLEVEL=INFO
API_SERVER_PROFILING=1
+API_SERVER_TRACING={}
TRAEFIK_API_SERVER_INFLIGHTREQ_AMOUNT=25
AUTOSCALING_DASK=null
@@ -33,6 +35,7 @@ AUTOSCALING_LOGLEVEL=INFO
AUTOSCALING_NODES_MONITORING=null
AUTOSCALING_POLL_INTERVAL="00:00:10"
AUTOSCALING_SSM_ACCESS=null
+AUTOSCALING_TRACING={}
AWS_S3_CLI_S3=null
@@ -44,6 +47,7 @@ CATALOG_PORT=8000
CATALOG_PROFILING=1
CATALOG_SERVICES_DEFAULT_RESOURCES='{"CPU": {"limit": 0.1, "reservation": 0.1}, "RAM": {"limit": 2147483648, "reservation": 2147483648}}'
CATALOG_SERVICES_DEFAULT_SPECIFICATIONS='{}'
+CATALOG_TRACING={}
CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH='{"type":"tls","tls_ca_file":"/home/scu/.dask/dask-crt.pem","tls_client_cert":"/home/scu/.dask/dask-crt.pem","tls_client_key":"/home/scu/.dask/dask-key.pem"}'
CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG=master-github-latest
@@ -57,6 +61,7 @@ CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION=5
CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES=null
CLUSTERS_KEEPER_TASK_INTERVAL=30
CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES=null
+CLUSTERS_KEEPER_TRACING={}
DASK_SCHEDULER_HOST=dask-scheduler
DASK_SCHEDULER_PORT=8786
@@ -70,6 +75,7 @@ DIRECTOR_HOST=director
DIRECTOR_PORT=8080
DIRECTOR_REGISTRY_CACHING_TTL=900
DIRECTOR_REGISTRY_CACHING=True
+DIRECTOR_TRACING={}
EFS_USER_ID=8006
EFS_USER_NAME=efs
@@ -79,6 +85,10 @@ EFS_DNS_NAME=fs-xxx.efs.us-east-1.amazonaws.com
EFS_MOUNTED_PATH=/tmp/efs
EFS_PROJECT_SPECIFIC_DATA_DIRECTORY=project-specific-data
EFS_ONLY_ENABLED_FOR_USERIDS=[]
+EFS_GUARDIAN_TRACING={}
+
+# DATCORE_ADAPTER
+DATCORE_ADAPTER_TRACING={}
# DIRECTOR_V2 ----
COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH='{"type":"tls","tls_ca_file":"/home/scu/.dask/dask-crt.pem","tls_client_cert":"/home/scu/.dask/dask-crt.pem","tls_client_key":"/home/scu/.dask/dask-key.pem"}'
@@ -105,10 +115,13 @@ DYNAMIC_SIDECAR_LOG_LEVEL=DEBUG
DYNAMIC_SIDECAR_PROMETHEUS_MONITORING_NETWORKS=[]
DYNAMIC_SIDECAR_PROMETHEUS_SERVICE_LABELS={}
DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT=01:00:00
-# DIRECTOR_V2 ----
+DIRECTOR_V2_TRACING={}
+
+# DYNAMIC_SCHEDULER ----
DYNAMIC_SCHEDULER_LOGLEVEL=DEBUG
DYNAMIC_SCHEDULER_PROFILING=1
DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT=01:00:00
+DYNAMIC_SCHEDULER_TRACING={}
FUNCTION_SERVICES_AUTHORS='{"UN": {"name": "Unknown", "email": "unknown@osparc.io", "affiliation": "unknown"}}'
@@ -122,6 +135,7 @@ INVITATIONS_PORT=8000
INVITATIONS_SECRET_KEY='REPLACE_ME_with_result__Fernet_generate_key='
INVITATIONS_SWAGGER_API_DOC_ENABLED=1
INVITATIONS_USERNAME=admin
+INVITATIONS_TRACING={}
LOG_FORMAT_LOCAL_DEV_ENABLED=1
LOG_FILTER_MAPPING='{}'
@@ -146,6 +160,7 @@ PAYMENTS_STRIPE_API_SECRET='REPLACE_ME_with_api_secret'
PAYMENTS_STRIPE_URL=https://api.stripe.com
PAYMENTS_SWAGGER_API_DOC_ENABLED=1
PAYMENTS_USERNAME=admin
+PAYMENTS_TRACING={}
POSTGRES_DB=simcoredb
POSTGRES_ENDPOINT=postgres:5432
@@ -185,6 +200,7 @@ RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_CHECK_ENABLED=1
RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_COUNTER_FAIL=6
RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_INTERVAL_SEC=300
RESOURCE_USAGE_TRACKER_S3=null
+RESOURCE_USAGE_TRACKER_TRACING={}
# NOTE: 172.17.0.1 is the docker0 interface, which redirect from inside a container onto the host network interface.
R_CLONE_OPTION_BUFFER_SIZE=0M
@@ -218,6 +234,7 @@ STORAGE_HOST=storage
STORAGE_LOGLEVEL=INFO
STORAGE_PORT=8080
STORAGE_PROFILING=1
+STORAGE_TRACING={}
# STORAGE ----
SWARM_STACK_NAME=master-simcore
diff --git a/api/specs/web-server/_folders.py b/api/specs/web-server/_folders.py
index ee529da655c..ef5e29ac85d 100644
--- a/api/specs/web-server/_folders.py
+++ b/api/specs/web-server/_folders.py
@@ -21,7 +21,7 @@
from models_library.workspaces import WorkspaceID
from pydantic import Json
from simcore_service_webserver._meta import API_VTAG
-from simcore_service_webserver.folders._folders_handlers import FoldersPathParams
+from simcore_service_webserver.folders._models import FolderFilters, FoldersPathParams
router = APIRouter(
prefix=f"/{API_VTAG}",
@@ -30,8 +30,6 @@
],
)
-### Folders
-
@router.post(
"/folders",
@@ -57,6 +55,32 @@ async def list_folders(
example='{"field": "name", "direction": "desc"}',
),
] = '{"field": "modified_at", "direction": "desc"}',
+ filters: Annotated[
+ Json | None,
+ Query(description=FolderFilters.schema_json(indent=1)),
+ ] = None,
+):
+ ...
+
+
+@router.get(
+ "/folders:search",
+ response_model=Envelope[list[FolderGet]],
+)
+async def list_folders_full_search(
+ params: Annotated[PageQueryParameters, Depends()],
+ text: str | None = None,
+ order_by: Annotated[
+ Json,
+ Query(
+ description="Order by field (modified_at|name|description) and direction (asc|desc). The default sorting order is ascending.",
+ example='{"field": "name", "direction": "desc"}',
+ ),
+ ] = '{"field": "modified_at", "direction": "desc"}',
+ filters: Annotated[
+ Json | None,
+ Query(description=FolderFilters.schema_json(indent=1)),
+ ] = None,
):
...
diff --git a/api/specs/web-server/_projects_crud.py b/api/specs/web-server/_projects_crud.py
index aad8fa82760..4c560464eb8 100644
--- a/api/specs/web-server/_projects_crud.py
+++ b/api/specs/web-server/_projects_crud.py
@@ -32,6 +32,7 @@
from simcore_service_webserver.projects._common_models import ProjectPathParams
from simcore_service_webserver.projects._crud_handlers import ProjectCreateParams
from simcore_service_webserver.projects._crud_handlers_models import (
+ ProjectFilters,
ProjectListFullSearchParams,
ProjectListParams,
)
@@ -83,7 +84,10 @@ async def list_projects(
example='{"field": "last_change_date", "direction": "desc"}',
),
] = '{"field": "last_change_date", "direction": "desc"}',
- filters: Annotated[Json | None, Query()] = None,
+ filters: Annotated[
+ Json | None,
+ Query(description=ProjectFilters.schema_json(indent=1)),
+ ] = None,
):
...
diff --git a/api/specs/web-server/_trash.py b/api/specs/web-server/_trash.py
index cdde2b8c32f..cdd883f7cf3 100644
--- a/api/specs/web-server/_trash.py
+++ b/api/specs/web-server/_trash.py
@@ -9,10 +9,14 @@
from fastapi import APIRouter, Depends, status
from simcore_service_webserver._meta import API_VTAG
-from simcore_service_webserver.projects._trash_handlers import (
- ProjectPathParams,
+from simcore_service_webserver.folders._models import (
+ FoldersPathParams,
RemoveQueryParams,
)
+from simcore_service_webserver.projects._trash_handlers import ProjectPathParams
+from simcore_service_webserver.projects._trash_handlers import (
+ RemoveQueryParams as RemoveQueryParams_duplicated,
+)
router = APIRouter(
prefix=f"/{API_VTAG}",
@@ -59,3 +63,36 @@ def untrash_project(
_p: Annotated[ProjectPathParams, Depends()],
):
...
+
+
+_extra_tags = ["folders"]
+
+
+@router.post(
+ "/folders/{folder_id}:trash",
+ tags=_extra_tags,
+ status_code=status.HTTP_204_NO_CONTENT,
+ responses={
+ status.HTTP_404_NOT_FOUND: {"description": "Not such a folder"},
+ status.HTTP_409_CONFLICT: {
+ "description": "One or more projects is in use and cannot be trashed"
+ },
+ status.HTTP_503_SERVICE_UNAVAILABLE: {"description": "Trash service error"},
+ },
+)
+def trash_folder(
+ _p: Annotated[FoldersPathParams, Depends()],
+ _q: Annotated[RemoveQueryParams_duplicated, Depends()],
+):
+ ...
+
+
+@router.post(
+ "/folders/{folder_id}:untrash",
+ tags=_extra_tags,
+ status_code=status.HTTP_204_NO_CONTENT,
+)
+def untrash_folder(
+ _p: Annotated[FoldersPathParams, Depends()],
+):
+ ...
diff --git a/packages/aws-library/requirements/_base.txt b/packages/aws-library/requirements/_base.txt
index a47365541df..c6500144e42 100644
--- a/packages/aws-library/requirements/_base.txt
+++ b/packages/aws-library/requirements/_base.txt
@@ -52,8 +52,6 @@ arrow==1.3.0
# -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in
# -r requirements/../../../packages/service-library/requirements/_base.in
# -r requirements/_base.in
-async-timeout==4.0.3
- # via redis
attrs==24.2.0
# via
# aiohttp
diff --git a/packages/models-library/src/models_library/api_schemas_webserver/folders.py b/packages/models-library/src/models_library/api_schemas_webserver/folders.py
index 48a2ae605e4..092a5cb94fe 100644
--- a/packages/models-library/src/models_library/api_schemas_webserver/folders.py
+++ b/packages/models-library/src/models_library/api_schemas_webserver/folders.py
@@ -18,6 +18,7 @@ class FolderGet(OutputSchema):
description: str
created_at: datetime
modified_at: datetime
+ trashed_at: datetime | None
owner: GroupID
my_access_rights: AccessRights
access_rights: dict[GroupID, AccessRights]
diff --git a/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py b/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py
index 85f3604381a..cd574893d94 100644
--- a/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py
+++ b/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py
@@ -18,6 +18,7 @@ class FolderGet(OutputSchema):
name: str
created_at: datetime
modified_at: datetime
+ trashed_at: datetime | None
owner: GroupID
workspace_id: WorkspaceID | None
my_access_rights: AccessRights
diff --git a/packages/models-library/src/models_library/folders.py b/packages/models-library/src/models_library/folders.py
index f514439d732..fad48dfb90a 100644
--- a/packages/models-library/src/models_library/folders.py
+++ b/packages/models-library/src/models_library/folders.py
@@ -1,14 +1,42 @@
from datetime import datetime
+from enum import auto
from typing import TypeAlias
-from pydantic import BaseModel, ConfigDict, Field, PositiveInt
+from pydantic import BaseModel, ConfigDict, Field, PositiveInt, field_validator
+from .access_rights import AccessRights
from .users import GroupID, UserID
+from .utils.enums import StrAutoEnum
from .workspaces import WorkspaceID
FolderID: TypeAlias = PositiveInt
+class FolderScope(StrAutoEnum):
+ ROOT = auto()
+ SPECIFIC = auto()
+ ALL = auto()
+
+
+class FolderQuery(BaseModel):
+ folder_scope: FolderScope
+ folder_id: PositiveInt | None = None
+
+ @field_validator("folder_id", mode="before")
+ @classmethod
+ def validate_folder_id(cls, value, values):
+ scope = values.get("folder_scope")
+ if scope == FolderScope.SPECIFIC and value is None:
+ raise ValueError(
+ "folder_id must be provided when folder_scope is SPECIFIC."
+ )
+ if scope != FolderScope.SPECIFIC and value is not None:
+ raise ValueError(
+ "folder_id should be None when folder_scope is not SPECIFIC."
+ )
+ return value
+
+
#
# DB
#
@@ -30,7 +58,17 @@ class FolderDB(BaseModel):
...,
description="Timestamp of last modification",
)
+ trashed_at: datetime | None = Field(
+ ...,
+ )
+
user_id: UserID | None
workspace_id: WorkspaceID | None
model_config = ConfigDict(from_attributes=True)
+
+
+class UserFolderAccessRightsDB(FolderDB):
+ my_access_rights: AccessRights
+
+ model_config = ConfigDict(from_attributes=True)
diff --git a/packages/models-library/src/models_library/projects.py b/packages/models-library/src/models_library/projects.py
index 5a1d20d16f2..dcc15295a5f 100644
--- a/packages/models-library/src/models_library/projects.py
+++ b/packages/models-library/src/models_library/projects.py
@@ -182,5 +182,6 @@ class Project(BaseProjectModel):
default=None,
alias="trashedAt",
)
+ trashed_explicitly: bool = Field(default=False, alias="trashedExplicitly")
model_config = ConfigDict(title="osparc-simcore project", extra="forbid")
diff --git a/packages/models-library/src/models_library/workspaces.py b/packages/models-library/src/models_library/workspaces.py
index e1d0f8d17fd..24bae0e7d5d 100644
--- a/packages/models-library/src/models_library/workspaces.py
+++ b/packages/models-library/src/models_library/workspaces.py
@@ -1,13 +1,41 @@
from datetime import datetime
+from enum import auto
from typing import TypeAlias
-from models_library.access_rights import AccessRights
-from models_library.users import GroupID
-from pydantic import BaseModel, ConfigDict, Field, PositiveInt
+from pydantic import BaseModel, ConfigDict, Field, PositiveInt, field_validator
+
+from .access_rights import AccessRights
+from .users import GroupID
+from .utils.enums import StrAutoEnum
WorkspaceID: TypeAlias = PositiveInt
+class WorkspaceScope(StrAutoEnum):
+ PRIVATE = auto()
+ SHARED = auto()
+ ALL = auto()
+
+
+class WorkspaceQuery(BaseModel):
+ workspace_scope: WorkspaceScope
+ workspace_id: PositiveInt | None = None
+
+ @field_validator("workspace_id", mode="before")
+ @classmethod
+ def validate_workspace_id(cls, value, values):
+ scope = values.get("workspace_scope")
+ if scope == WorkspaceScope.SHARED and value is None:
+ raise ValueError(
+ "workspace_id must be provided when workspace_scope is SHARED."
+ )
+ if scope != WorkspaceScope.SHARED and value is not None:
+ raise ValueError(
+ "workspace_id should be None when workspace_scope is not SHARED."
+ )
+ return value
+
+
#
# DB
#
diff --git a/packages/notifications-library/requirements/_base.txt b/packages/notifications-library/requirements/_base.txt
index f8088ba40d6..3fc598588d3 100644
--- a/packages/notifications-library/requirements/_base.txt
+++ b/packages/notifications-library/requirements/_base.txt
@@ -18,6 +18,10 @@ attrs==24.2.0
# referencing
click==8.1.7
# via typer
+deprecated==1.2.14
+ # via
+ # opentelemetry-api
+ # opentelemetry-semantic-conventions
dnspython==2.6.1
# via email-validator
email-validator==2.2.0
@@ -28,6 +32,8 @@ idna==3.10
# via
# email-validator
# yarl
+importlib-metadata==8.5.0
+ # via opentelemetry-api
jinja2==3.1.4
# via
# -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt
@@ -64,6 +70,19 @@ mdurl==0.1.2
# via markdown-it-py
multidict==6.1.0
# via yarl
+opentelemetry-api==1.28.1
+ # via
+ # opentelemetry-instrumentation
+ # opentelemetry-instrumentation-asyncpg
+ # opentelemetry-semantic-conventions
+opentelemetry-instrumentation==0.49b1
+ # via opentelemetry-instrumentation-asyncpg
+opentelemetry-instrumentation-asyncpg==0.49b1
+ # via -r requirements/../../../packages/postgres-database/requirements/_base.in
+opentelemetry-semantic-conventions==0.49b1
+ # via
+ # opentelemetry-instrumentation
+ # opentelemetry-instrumentation-asyncpg
orjson==3.10.7
# via
# -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt
@@ -79,6 +98,8 @@ orjson==3.10.7
# -r requirements/../../../packages/models-library/requirements/_base.in
# -r requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in
# -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in
+packaging==24.2
+ # via opentelemetry-instrumentation
psycopg2-binary==2.9.9
# via sqlalchemy
pydantic==2.9.1
@@ -157,5 +178,11 @@ typing-extensions==4.12.2
# pydantic
# pydantic-core
# typer
+wrapt==1.16.0
+ # via
+ # deprecated
+ # opentelemetry-instrumentation
yarl==1.12.1
# via -r requirements/../../../packages/postgres-database/requirements/_base.in
+zipp==3.21.0
+ # via importlib-metadata
diff --git a/packages/notifications-library/requirements/_test.txt b/packages/notifications-library/requirements/_test.txt
index bd0edc5133e..074c7ae2d3d 100644
--- a/packages/notifications-library/requirements/_test.txt
+++ b/packages/notifications-library/requirements/_test.txt
@@ -28,8 +28,9 @@ mypy==1.12.0
# via sqlalchemy
mypy-extensions==1.0.0
# via mypy
-packaging==24.1
+packaging==24.2
# via
+ # -c requirements/_base.txt
# pytest
# pytest-sugar
pluggy==1.5.0
diff --git a/packages/notifications-library/requirements/_tools.txt b/packages/notifications-library/requirements/_tools.txt
index 217752d687f..4a902da9cb2 100644
--- a/packages/notifications-library/requirements/_tools.txt
+++ b/packages/notifications-library/requirements/_tools.txt
@@ -38,8 +38,9 @@ mypy-extensions==1.0.0
# mypy
nodeenv==1.9.1
# via pre-commit
-packaging==24.1
+packaging==24.2
# via
+ # -c requirements/_base.txt
# -c requirements/_test.txt
# black
# build
diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/5ad02358751a_project_and_folder_trash_columns.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/5ad02358751a_project_and_folder_trash_columns.py
new file mode 100644
index 00000000000..2cd8adb00f0
--- /dev/null
+++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/5ad02358751a_project_and_folder_trash_columns.py
@@ -0,0 +1,73 @@
+"""project and folder trash columns
+
+Revision ID: 5ad02358751a
+Revises: fce5d231e16d
+Create Date: 2024-11-07 17:14:01.094583+00:00
+
+"""
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy.dialects import postgresql
+
+# revision identifiers, used by Alembic.
+revision = "5ad02358751a"
+down_revision = "fce5d231e16d"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column(
+ "folders_v2",
+ sa.Column(
+ "trashed_at",
+ sa.DateTime(timezone=True),
+ nullable=True,
+ comment="The date and time when the folder was marked as trashed.Null if the folder has not been trashed [default].",
+ ),
+ )
+ op.add_column(
+ "folders_v2",
+ sa.Column(
+ "trashed_explicitly",
+ sa.Boolean(),
+ server_default=sa.text("false"),
+ nullable=False,
+ comment="Indicates whether the folder was explicitly trashed by the user (true) or inherited its trashed status from a parent (false) [default].",
+ ),
+ )
+ op.add_column(
+ "projects",
+ sa.Column(
+ "trashed_explicitly",
+ sa.Boolean(),
+ server_default=sa.text("false"),
+ nullable=False,
+ comment="Indicates whether the project was explicitly trashed by the user (true) or inherited its trashed status from a parent (false) [default].",
+ ),
+ )
+ op.alter_column(
+ "projects",
+ "trashed_at",
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ comment="The date and time when the project was marked as trashed. Null if the project has not been trashed [default].",
+ existing_nullable=True,
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.alter_column(
+ "projects",
+ "trashed_at",
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ comment=None,
+ existing_comment="The date and time when the project was marked as trashed. Null if the project has not been trashed [default].",
+ existing_nullable=True,
+ )
+ op.drop_column("projects", "trashed_explicitly")
+ op.drop_column("folders_v2", "trashed_explicitly")
+ op.drop_column("folders_v2", "trashed_at")
+ # ### end Alembic commands ###
diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/8bfe65a5e294_add_cancellation_mark.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/8bfe65a5e294_add_cancellation_mark.py
new file mode 100644
index 00000000000..ecbe20b40e8
--- /dev/null
+++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/8bfe65a5e294_add_cancellation_mark.py
@@ -0,0 +1,29 @@
+"""add cancellation mark
+
+Revision ID: 8bfe65a5e294
+Revises: 5ad02358751a
+Create Date: 2024-11-08 14:40:59.266181+00:00
+
+"""
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "8bfe65a5e294"
+down_revision = "5ad02358751a"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column(
+ "comp_runs", sa.Column("cancelled", sa.DateTime(timezone=True), nullable=True)
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_column("comp_runs", "cancelled")
+ # ### end Alembic commands ###
diff --git a/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py b/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py
index e402a171562..eb84cefaa76 100644
--- a/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py
+++ b/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py
@@ -99,6 +99,12 @@
nullable=True,
doc="When the run was finished",
),
+ sa.Column(
+ "cancelled",
+ sa.DateTime(timezone=True),
+ nullable=True,
+ doc="If filled, when cancellation was requested",
+ ),
sa.Column("metadata", JSONB, nullable=True, doc="the run optional metadata"),
sa.Column(
"use_on_demand_clusters",
diff --git a/packages/postgres-database/src/simcore_postgres_database/models/folders_v2.py b/packages/postgres-database/src/simcore_postgres_database/models/folders_v2.py
index b1393bf5367..fcad0ada76c 100644
--- a/packages/postgres-database/src/simcore_postgres_database/models/folders_v2.py
+++ b/packages/postgres-database/src/simcore_postgres_database/models/folders_v2.py
@@ -1,4 +1,5 @@
import sqlalchemy as sa
+from sqlalchemy.sql import expression
from ._common import column_created_datetime, column_modified_datetime
from .base import metadata
@@ -74,4 +75,19 @@
),
column_created_datetime(timezone=True),
column_modified_datetime(timezone=True),
+ sa.Column(
+ "trashed_at",
+ sa.DateTime(timezone=True),
+ nullable=True,
+ comment="The date and time when the folder was marked as trashed."
+ "Null if the folder has not been trashed [default].",
+ ),
+ sa.Column(
+ "trashed_explicitly",
+ sa.Boolean,
+ nullable=False,
+ server_default=expression.false(),
+ comment="Indicates whether the folder was explicitly trashed by the user (true)"
+ " or inherited its trashed status from a parent (false) [default].",
+ ),
)
diff --git a/packages/postgres-database/src/simcore_postgres_database/models/projects.py b/packages/postgres-database/src/simcore_postgres_database/models/projects.py
index 629113f06dc..778d2b80eb5 100644
--- a/packages/postgres-database/src/simcore_postgres_database/models/projects.py
+++ b/packages/postgres-database/src/simcore_postgres_database/models/projects.py
@@ -5,7 +5,7 @@
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import ARRAY, JSONB
-from sqlalchemy.sql import func
+from sqlalchemy.sql import expression, func
from .base import metadata
@@ -145,7 +145,16 @@ class ProjectType(enum.Enum):
"trashed_at",
sa.DateTime(timezone=True),
nullable=True,
- doc="Timestamp indicating when the project was marked as trashed, or null otherwise.",
+ comment="The date and time when the project was marked as trashed. "
+ "Null if the project has not been trashed [default].",
+ ),
+ sa.Column(
+ "trashed_explicitly",
+ sa.Boolean,
+ nullable=False,
+ server_default=expression.false(),
+ comment="Indicates whether the project was explicitly trashed by the user (true)"
+ " or inherited its trashed status from a parent (false) [default].",
),
sa.Column(
"workspace_id",
diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_sql.py b/packages/postgres-database/src/simcore_postgres_database/utils_sql.py
new file mode 100644
index 00000000000..e3d4e1438af
--- /dev/null
+++ b/packages/postgres-database/src/simcore_postgres_database/utils_sql.py
@@ -0,0 +1,6 @@
+def assemble_array_groups(user_group_ids: list[int]) -> str:
+ return (
+ "array[]::text[]"
+ if len(user_group_ids) == 0
+ else f"""array[{', '.join(f"'{group_id}'" for group_id in user_group_ids)}]"""
+ )
diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_workspaces_sql.py b/packages/postgres-database/src/simcore_postgres_database/utils_workspaces_sql.py
new file mode 100644
index 00000000000..05b24d969bd
--- /dev/null
+++ b/packages/postgres-database/src/simcore_postgres_database/utils_workspaces_sql.py
@@ -0,0 +1,30 @@
+from simcore_postgres_database.models.groups import user_to_groups
+from simcore_postgres_database.models.workspaces_access_rights import (
+ workspaces_access_rights,
+)
+from sqlalchemy import func
+from sqlalchemy.dialects.postgresql import BOOLEAN, INTEGER
+from sqlalchemy.sql import Subquery, select
+
+
+def create_my_workspace_access_rights_subquery(user_id: int) -> Subquery:
+ return (
+ select(
+ workspaces_access_rights.c.workspace_id,
+ func.json_build_object(
+ "read",
+ func.max(workspaces_access_rights.c.read.cast(INTEGER)).cast(BOOLEAN),
+ "write",
+ func.max(workspaces_access_rights.c.write.cast(INTEGER)).cast(BOOLEAN),
+ "delete",
+ func.max(workspaces_access_rights.c.delete.cast(INTEGER)).cast(BOOLEAN),
+ ).label("my_access_rights"),
+ )
+ .select_from(
+ workspaces_access_rights.join(
+ user_to_groups, user_to_groups.c.gid == workspaces_access_rights.c.gid
+ )
+ )
+ .where(user_to_groups.c.uid == user_id)
+ .group_by(workspaces_access_rights.c.workspace_id)
+ ).subquery("my_workspace_access_rights_subquery")
diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py
index a190fa6900e..55065daaf76 100644
--- a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py
+++ b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py
@@ -186,5 +186,5 @@ async def assert_get_same_project(
data, error = await assert_status(resp, expected)
if not error:
- assert data == project
+ assert data == {k: project[k] for k in data}
return data
diff --git a/packages/service-library/requirements/_base.txt b/packages/service-library/requirements/_base.txt
index bbc6673d016..4ed45283e9e 100644
--- a/packages/service-library/requirements/_base.txt
+++ b/packages/service-library/requirements/_base.txt
@@ -33,8 +33,6 @@ arrow==1.3.0
# via
# -r requirements/../../../packages/models-library/requirements/_base.in
# -r requirements/_base.in
-async-timeout==4.0.3
- # via redis
attrs==24.2.0
# via
# aiohttp
diff --git a/packages/service-library/requirements/_fastapi.in b/packages/service-library/requirements/_fastapi.in
index 7b6a6bb2cf2..e11871af331 100644
--- a/packages/service-library/requirements/_fastapi.in
+++ b/packages/service-library/requirements/_fastapi.in
@@ -9,6 +9,7 @@
fastapi
httpx
opentelemetry-instrumentation-fastapi
+opentelemetry-instrumentation-httpx
prometheus-client
prometheus-fastapi-instrumentator
uvicorn
diff --git a/packages/service-library/requirements/_fastapi.txt b/packages/service-library/requirements/_fastapi.txt
index 42dd6b7a550..c1aa437bdea 100644
--- a/packages/service-library/requirements/_fastapi.txt
+++ b/packages/service-library/requirements/_fastapi.txt
@@ -52,23 +52,29 @@ opentelemetry-api==1.27.0
# opentelemetry-instrumentation
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-semantic-conventions
opentelemetry-instrumentation==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
opentelemetry-instrumentation-asgi==0.48b0
# via opentelemetry-instrumentation-fastapi
opentelemetry-instrumentation-fastapi==0.48b0
# via -r requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.48b0
+ # via -r requirements/_fastapi.in
opentelemetry-semantic-conventions==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
prometheus-client==0.21.0
# via
# -r requirements/_fastapi.in
diff --git a/packages/service-library/src/servicelib/fastapi/http_client_thin.py b/packages/service-library/src/servicelib/fastapi/http_client_thin.py
index c113321a488..14aae2dafdf 100644
--- a/packages/service-library/src/servicelib/fastapi/http_client_thin.py
+++ b/packages/service-library/src/servicelib/fastapi/http_client_thin.py
@@ -5,9 +5,11 @@
from collections.abc import Awaitable, Callable
from typing import Any
+from common_library.errors_classes import OsparcErrorMixin
from httpx import AsyncClient, ConnectError, HTTPError, PoolTimeout, Response
from httpx._types import TimeoutTypes, URLTypes
-from common_library.errors_classes import OsparcErrorMixin
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
+from settings_library.tracing import TracingSettings
from tenacity import RetryCallState
from tenacity.asyncio import AsyncRetrying
from tenacity.before_sleep import before_sleep_log
@@ -201,6 +203,7 @@ def __init__(
base_url: URLTypes | None = None,
default_http_client_timeout: TimeoutTypes | None = None,
extra_allowed_method_names: set[str] | None = None,
+ tracing_settings: TracingSettings | None,
) -> None:
_assert_public_interface(self, extra_allowed_method_names)
@@ -220,7 +223,10 @@ def __init__(
if default_http_client_timeout:
client_args["timeout"] = default_http_client_timeout
- super().__init__(client=AsyncClient(**client_args))
+ client = AsyncClient(**client_args)
+ if tracing_settings:
+ setup_httpx_client_tracing(client)
+ super().__init__(client=client)
async def __aenter__(self):
await self.setup_client()
diff --git a/packages/service-library/src/servicelib/fastapi/tracing.py b/packages/service-library/src/servicelib/fastapi/tracing.py
index b5179a8a5f6..36e9b06fa12 100644
--- a/packages/service-library/src/servicelib/fastapi/tracing.py
+++ b/packages/service-library/src/servicelib/fastapi/tracing.py
@@ -5,11 +5,13 @@
import logging
from fastapi import FastAPI
+from httpx import AsyncClient, Client
from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.http.trace_exporter import (
OTLPSpanExporter as OTLPSpanExporterHTTP,
)
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
+from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
@@ -121,3 +123,7 @@ def setup_tracing(
msg="Attempting to add requests opentelemetry autoinstrumentation...",
):
RequestsInstrumentor().instrument()
+
+
+def setup_httpx_client_tracing(client: AsyncClient | Client):
+ HTTPXClientInstrumentor.instrument_client(client)
diff --git a/packages/service-library/src/servicelib/redis.py b/packages/service-library/src/servicelib/redis.py
index fce89d7790e..f4b0c75ac72 100644
--- a/packages/service-library/src/servicelib/redis.py
+++ b/packages/service-library/src/servicelib/redis.py
@@ -60,6 +60,7 @@ async def _cancel_or_warn(task: Task) -> None:
@dataclass
class RedisClientSDK:
redis_dsn: str
+ client_name: str
decode_responses: bool = _DEFAULT_DECODE_RESPONSES
health_check_interval: datetime.timedelta = _DEFAULT_HEALTH_CHECK_INTERVAL
@@ -86,7 +87,7 @@ def __post_init__(self):
socket_connect_timeout=_DEFAULT_SOCKET_TIMEOUT.total_seconds(),
encoding="utf-8",
decode_responses=self.decode_responses,
- auto_close_connection_pool=True,
+ client_name=self.client_name,
)
@retry(**RedisRetryPolicyUponInitialization(_logger).kwargs)
@@ -238,6 +239,7 @@ class RedisClientsManager:
databases_configs: set[RedisManagerDBConfig]
settings: RedisSettings
+ client_name: str
_client_sdks: dict[RedisDatabase, RedisClientSDK] = field(default_factory=dict)
@@ -247,6 +249,7 @@ async def setup(self) -> None:
redis_dsn=self.settings.build_redis_dsn(config.database),
decode_responses=config.decode_responses,
health_check_interval=config.health_check_interval,
+ client_name=f"{self.client_name}",
)
for client in self._client_sdks.values():
diff --git a/packages/service-library/src/servicelib/redis_utils.py b/packages/service-library/src/servicelib/redis_utils.py
index 10f32ae5944..559349cbb0d 100644
--- a/packages/service-library/src/servicelib/redis_utils.py
+++ b/packages/service-library/src/servicelib/redis_utils.py
@@ -3,7 +3,7 @@
import logging
from collections.abc import Awaitable, Callable
from datetime import timedelta
-from typing import Any
+from typing import Any, ParamSpec, TypeVar
import arrow
@@ -12,10 +12,16 @@
_logger = logging.getLogger(__file__)
+P = ParamSpec("P")
+R = TypeVar("R")
+
def exclusive(
- redis: RedisClientSDK, *, lock_key: str, lock_value: bytes | str | None = None
-):
+ redis: RedisClientSDK | Callable[..., RedisClientSDK],
+ *,
+ lock_key: str | Callable[..., str],
+ lock_value: bytes | str | None = None,
+) -> Callable[[Callable[P, Awaitable[R]]], Callable[P, Awaitable[R]]]:
"""
Define a method to run exclusively across
processes by leveraging a Redis Lock.
@@ -24,12 +30,30 @@ def exclusive(
redis: the redis client SDK
lock_key: a string as the name of the lock (good practice: app_name:lock_name)
lock_value: some additional data that can be retrieved by another client
+
+ Raises:
+ - ValueError if used incorrectly
+ - CouldNotAcquireLockError if the lock could not be acquired
"""
- def decorator(func):
+ if not lock_key:
+ msg = "lock_key cannot be empty string!"
+ raise ValueError(msg)
+
+ def decorator(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[R]]:
@functools.wraps(func)
- async def wrapper(*args, **kwargs):
- async with redis.lock_context(lock_key=lock_key, lock_value=lock_value):
+ async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
+ redis_lock_key = (
+ lock_key(*args, **kwargs) if callable(lock_key) else lock_key
+ )
+ assert isinstance(redis_lock_key, str) # nosec
+
+ redis_client = redis(*args, **kwargs) if callable(redis) else redis
+ assert isinstance(redis_client, RedisClientSDK) # nosec
+
+ async with redis_client.lock_context(
+ lock_key=redis_lock_key, lock_value=lock_value
+ ):
return await func(*args, **kwargs)
return wrapper
diff --git a/packages/service-library/tests/conftest.py b/packages/service-library/tests/conftest.py
index 927ff75477f..7527ee67a14 100644
--- a/packages/service-library/tests/conftest.py
+++ b/packages/service-library/tests/conftest.py
@@ -80,9 +80,12 @@ async def _(
database: RedisDatabase, decode_response: bool = True # noqa: FBT002
) -> AsyncIterator[RedisClientSDK]:
redis_resources_dns = redis_service.build_redis_dsn(database)
- client = RedisClientSDK(redis_resources_dns, decode_responses=decode_response)
+ client = RedisClientSDK(
+ redis_resources_dns, decode_responses=decode_response, client_name="pytest"
+ )
assert client
assert client.redis_dsn == redis_resources_dns
+ assert client.client_name == "pytest"
await client.setup()
yield client
@@ -94,7 +97,9 @@ async def _cleanup_redis_data(clients_manager: RedisClientsManager) -> None:
await clients_manager.client(db).redis.flushall()
async with RedisClientsManager(
- {RedisManagerDBConfig(db) for db in RedisDatabase}, redis_service
+ {RedisManagerDBConfig(db) for db in RedisDatabase},
+ redis_service,
+ client_name="pytest",
) as clients_manager:
await _cleanup_redis_data(clients_manager)
yield _
diff --git a/packages/service-library/tests/deferred_tasks/example_app.py b/packages/service-library/tests/deferred_tasks/example_app.py
index 8faea8d76ff..991aa2efe8e 100644
--- a/packages/service-library/tests/deferred_tasks/example_app.py
+++ b/packages/service-library/tests/deferred_tasks/example_app.py
@@ -60,6 +60,7 @@ def __init__(self, redis_settings: RedisSettings, port: int) -> None:
self.redis: Redis = RedisClientSDK(
redis_settings.build_redis_dsn(RedisDatabase.DEFERRED_TASKS),
decode_responses=True,
+ client_name="example_app",
).redis
self.port = port
@@ -84,6 +85,7 @@ def __init__(
self._redis_client = RedisClientSDK(
redis_settings.build_redis_dsn(RedisDatabase.DEFERRED_TASKS),
decode_responses=False,
+ client_name="example_app",
)
self._manager = DeferredManager(
rabbit_settings,
diff --git a/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py b/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py
index a5b45ed80d9..3aa5b53e7f5 100644
--- a/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py
+++ b/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py
@@ -55,6 +55,7 @@ async def redis_client_sdk(
sdk = RedisClientSDK(
redis_service.build_redis_dsn(RedisDatabase.DEFERRED_TASKS),
decode_responses=False,
+ client_name="pytest",
)
await sdk.setup()
yield sdk
diff --git a/packages/service-library/tests/fastapi/test_http_client_thin.py b/packages/service-library/tests/fastapi/test_http_client_thin.py
index 103584f3f77..daff1f8deaf 100644
--- a/packages/service-library/tests/fastapi/test_http_client_thin.py
+++ b/packages/service-library/tests/fastapi/test_http_client_thin.py
@@ -72,7 +72,9 @@ def request_timeout() -> int:
@pytest.fixture
async def thick_client(request_timeout: int) -> AsyncIterable[FakeThickClient]:
- async with FakeThickClient(total_retry_interval=request_timeout) as client:
+ async with FakeThickClient(
+ total_retry_interval=request_timeout, tracing_settings=None
+ ) as client:
yield client
@@ -98,7 +100,9 @@ async def test_retry_on_errors(
test_url: str,
caplog_info_level: pytest.LogCaptureFixture,
) -> None:
- client = FakeThickClient(total_retry_interval=request_timeout)
+ client = FakeThickClient(
+ total_retry_interval=request_timeout, tracing_settings=None
+ )
with pytest.raises(ClientHttpError):
await client.get_provided_url(test_url)
@@ -122,7 +126,7 @@ async def raises_request_error(self) -> Response:
request=Request(method="GET", url=test_url),
)
- client = ATestClient(total_retry_interval=request_timeout)
+ client = ATestClient(total_retry_interval=request_timeout, tracing_settings=None)
with pytest.raises(ClientHttpError):
await client.raises_request_error()
@@ -148,7 +152,7 @@ async def raises_http_error(self) -> Response:
msg = "mock_http_error"
raise HTTPError(msg)
- client = ATestClient(total_retry_interval=request_timeout)
+ client = ATestClient(total_retry_interval=request_timeout, tracing_settings=None)
with pytest.raises(ClientHttpError):
await client.raises_http_error()
@@ -162,21 +166,25 @@ async def public_method_ok(self) -> Response: # type: ignore
"""this method will be ok even if no code is used"""
# OK
- OKTestClient(total_retry_interval=request_timeout)
+ OKTestClient(total_retry_interval=request_timeout, tracing_settings=None)
class FailWrongAnnotationTestClient(BaseThinClient):
async def public_method_wrong_annotation(self) -> None:
"""this method will raise an error"""
with pytest.raises(AssertionError, match="should return an instance"):
- FailWrongAnnotationTestClient(total_retry_interval=request_timeout)
+ FailWrongAnnotationTestClient(
+ total_retry_interval=request_timeout, tracing_settings=None
+ )
class FailNoAnnotationTestClient(BaseThinClient):
async def public_method_no_annotation(self):
"""this method will raise an error"""
with pytest.raises(AssertionError, match="should return an instance"):
- FailNoAnnotationTestClient(total_retry_interval=request_timeout)
+ FailNoAnnotationTestClient(
+ total_retry_interval=request_timeout, tracing_settings=None
+ )
async def test_expect_state_decorator(
@@ -200,7 +208,9 @@ async def get_wrong_state(self) -> Response:
respx_mock.get(url_get_200_ok).mock(return_value=Response(codes.OK))
respx_mock.get(get_wrong_state).mock(return_value=Response(codes.OK))
- test_client = ATestClient(total_retry_interval=request_timeout)
+ test_client = ATestClient(
+ total_retry_interval=request_timeout, tracing_settings=None
+ )
# OK
response = await test_client.get_200_ok()
@@ -221,7 +231,9 @@ async def test_retry_timeout_overwrite(
request_timeout: int,
caplog_info_level: pytest.LogCaptureFixture,
) -> None:
- client = FakeThickClient(total_retry_interval=request_timeout)
+ client = FakeThickClient(
+ total_retry_interval=request_timeout, tracing_settings=None
+ )
caplog_info_level.clear()
start = arrow.utcnow()
diff --git a/packages/service-library/tests/test_pools.py b/packages/service-library/tests/test_pools.py
index 13c62ad0a3a..1604ba10147 100644
--- a/packages/service-library/tests/test_pools.py
+++ b/packages/service-library/tests/test_pools.py
@@ -1,4 +1,4 @@
-from asyncio import BaseEventLoop
+import asyncio
from concurrent.futures import ProcessPoolExecutor
from servicelib.pools import (
@@ -11,17 +11,25 @@ def return_int_one() -> int:
return 1
-async def test_default_thread_pool_executor(event_loop: BaseEventLoop) -> None:
- assert await event_loop.run_in_executor(None, return_int_one) == 1
+async def test_default_thread_pool_executor() -> None:
+ assert await asyncio.get_running_loop().run_in_executor(None, return_int_one) == 1
-async def test_blocking_process_pool_executor(event_loop: BaseEventLoop) -> None:
- assert await event_loop.run_in_executor(ProcessPoolExecutor(), return_int_one) == 1
+async def test_blocking_process_pool_executor() -> None:
+ assert (
+ await asyncio.get_running_loop().run_in_executor(
+ ProcessPoolExecutor(), return_int_one
+ )
+ == 1
+ )
-async def test_non_blocking_process_pool_executor(event_loop: BaseEventLoop) -> None:
+async def test_non_blocking_process_pool_executor() -> None:
with non_blocking_process_pool_executor() as executor:
- assert await event_loop.run_in_executor(executor, return_int_one) == 1
+ assert (
+ await asyncio.get_running_loop().run_in_executor(executor, return_int_one)
+ == 1
+ )
async def test_same_pool_instances() -> None:
@@ -36,9 +44,12 @@ async def test_different_pool_instances() -> None:
assert first != second
-async def test_non_blocking_thread_pool_executor(event_loop: BaseEventLoop) -> None:
+async def test_non_blocking_thread_pool_executor() -> None:
with non_blocking_thread_pool_executor() as executor:
- assert await event_loop.run_in_executor(executor, return_int_one) == 1
+ assert (
+ await asyncio.get_running_loop().run_in_executor(executor, return_int_one)
+ == 1
+ )
async def test_same_thread_pool_instances() -> None:
diff --git a/packages/service-library/tests/test_redis.py b/packages/service-library/tests/test_redis.py
index 7a3fa9b52d6..c120f85d344 100644
--- a/packages/service-library/tests/test_redis.py
+++ b/packages/service-library/tests/test_redis.py
@@ -277,7 +277,9 @@ async def test_redis_client_sdks_manager(
RedisManagerDBConfig(db) for db in RedisDatabase
}
manager = RedisClientsManager(
- databases_configs=all_redis_configs, settings=redis_service
+ databases_configs=all_redis_configs,
+ settings=redis_service,
+ client_name="pytest",
)
async with manager:
@@ -290,7 +292,7 @@ async def test_redis_client_sdk_setup_shutdown(
):
# setup
redis_resources_dns = redis_service.build_redis_dsn(RedisDatabase.RESOURCES)
- client = RedisClientSDK(redis_resources_dns)
+ client = RedisClientSDK(redis_resources_dns, client_name="pytest")
assert client
assert client.redis_dsn == redis_resources_dns
diff --git a/packages/service-library/tests/test_redis__recoonection.py b/packages/service-library/tests/test_redis__reconection.py
similarity index 88%
rename from packages/service-library/tests/test_redis__recoonection.py
rename to packages/service-library/tests/test_redis__reconection.py
index 89902a4b66e..8fe5a718527 100644
--- a/packages/service-library/tests/test_redis__recoonection.py
+++ b/packages/service-library/tests/test_redis__reconection.py
@@ -21,9 +21,9 @@ async def test_redis_client_sdk_lost_connection(
docker_client: docker.client.DockerClient,
):
redis_client_sdk = RedisClientSDK(
- redis_service.build_redis_dsn(RedisDatabase.RESOURCES)
+ redis_service.build_redis_dsn(RedisDatabase.RESOURCES), client_name="pytest"
)
-
+ assert redis_client_sdk.client_name == "pytest"
await redis_client_sdk.setup()
assert await redis_client_sdk.ping() is True
@@ -41,3 +41,5 @@ async def test_redis_client_sdk_lost_connection(
):
with attempt:
assert await redis_client_sdk.ping() is False
+
+ await redis_client_sdk.shutdown()
diff --git a/packages/service-library/tests/test_redis_utils.py b/packages/service-library/tests/test_redis_utils.py
index f897fc7c399..26f749cd894 100644
--- a/packages/service-library/tests/test_redis_utils.py
+++ b/packages/service-library/tests/test_redis_utils.py
@@ -5,6 +5,7 @@
from contextlib import AbstractAsyncContextManager
from datetime import timedelta
from itertools import chain
+from typing import Awaitable
from unittest.mock import Mock
import arrow
@@ -32,39 +33,117 @@ async def _is_locked(redis_client_sdk: RedisClientSDK, lock_name: str) -> bool:
@pytest.fixture
def lock_name(faker: Faker) -> str:
- return faker.uuid4() # type: ignore
+ return faker.pystr()
+
+
+def _exclusive_sleeping_task(
+ redis_client_sdk: RedisClientSDK | Callable[..., RedisClientSDK],
+ lock_name: str | Callable[..., str],
+ sleep_duration: float,
+) -> Callable[..., Awaitable[float]]:
+ @exclusive(redis_client_sdk, lock_key=lock_name)
+ async def _() -> float:
+ resolved_client = (
+ redis_client_sdk() if callable(redis_client_sdk) else redis_client_sdk
+ )
+ resolved_lock_name = lock_name() if callable(lock_name) else lock_name
+ assert await _is_locked(resolved_client, resolved_lock_name)
+ await asyncio.sleep(sleep_duration)
+ assert await _is_locked(resolved_client, resolved_lock_name)
+ return sleep_duration
+
+ return _
+
+
+@pytest.fixture
+def sleep_duration(faker: Faker) -> float:
+ return faker.pyfloat(positive=True, min_value=0.2, max_value=0.8)
-async def _contained_client(
+async def test_exclusive_decorator(
get_redis_client_sdk: Callable[
[RedisDatabase], AbstractAsyncContextManager[RedisClientSDK]
],
lock_name: str,
- task_duration: float,
-) -> None:
- async with get_redis_client_sdk(RedisDatabase.RESOURCES) as redis_client_sdk:
- assert not await _is_locked(redis_client_sdk, lock_name)
-
- @exclusive(redis_client_sdk, lock_key=lock_name)
- async def _some_task() -> None:
- assert await _is_locked(redis_client_sdk, lock_name)
- await asyncio.sleep(task_duration)
- assert await _is_locked(redis_client_sdk, lock_name)
-
- await _some_task()
+ sleep_duration: float,
+):
- assert not await _is_locked(redis_client_sdk, lock_name)
+ async with get_redis_client_sdk(RedisDatabase.RESOURCES) as redis_client:
+ for _ in range(3):
+ assert (
+ await _exclusive_sleeping_task(
+ redis_client, lock_name, sleep_duration
+ )()
+ == sleep_duration
+ )
-@pytest.mark.parametrize("task_duration", [0.1, 1, 2])
-async def test_exclusive_sequentially(
+async def test_exclusive_decorator_with_key_builder(
get_redis_client_sdk: Callable[
[RedisDatabase], AbstractAsyncContextManager[RedisClientSDK]
],
lock_name: str,
- task_duration: float,
+ sleep_duration: float,
):
- await _contained_client(get_redis_client_sdk, lock_name, task_duration)
+ def _get_lock_name(*args, **kwargs) -> str:
+ assert args is not None
+ assert kwargs is not None
+ return lock_name
+
+ async with get_redis_client_sdk(RedisDatabase.RESOURCES) as redis_client:
+ for _ in range(3):
+ assert (
+ await _exclusive_sleeping_task(
+ redis_client, _get_lock_name, sleep_duration
+ )()
+ == sleep_duration
+ )
+
+
+async def test_exclusive_decorator_with_client_builder(
+ get_redis_client_sdk: Callable[
+ [RedisDatabase], AbstractAsyncContextManager[RedisClientSDK]
+ ],
+ lock_name: str,
+ sleep_duration: float,
+):
+ async with get_redis_client_sdk(RedisDatabase.RESOURCES) as redis_client:
+
+ def _get_redis_client_builder(*args, **kwargs) -> RedisClientSDK:
+ assert args is not None
+ assert kwargs is not None
+ return redis_client
+
+ for _ in range(3):
+ assert (
+ await _exclusive_sleeping_task(
+ _get_redis_client_builder, lock_name, sleep_duration
+ )()
+ == sleep_duration
+ )
+
+
+async def _acquire_lock_and_exclusively_sleep(
+ get_redis_client_sdk: Callable[
+ [RedisDatabase], AbstractAsyncContextManager[RedisClientSDK]
+ ],
+ lock_name: str | Callable[..., str],
+ sleep_duration: float,
+) -> None:
+ async with get_redis_client_sdk(RedisDatabase.RESOURCES) as redis_client_sdk:
+ redis_lock_name = lock_name() if callable(lock_name) else lock_name
+ assert not await _is_locked(redis_client_sdk, redis_lock_name)
+
+ @exclusive(redis_client_sdk, lock_key=lock_name)
+ async def _() -> float:
+ assert await _is_locked(redis_client_sdk, redis_lock_name)
+ await asyncio.sleep(sleep_duration)
+ assert await _is_locked(redis_client_sdk, redis_lock_name)
+ return sleep_duration
+
+ assert await _() == sleep_duration
+
+ assert not await _is_locked(redis_client_sdk, redis_lock_name)
async def test_exclusive_parallel_lock_is_released_and_reacquired(
@@ -76,17 +155,19 @@ async def test_exclusive_parallel_lock_is_released_and_reacquired(
parallel_tasks = 10
results = await logged_gather(
*[
- _contained_client(get_redis_client_sdk, lock_name, task_duration=0.1)
+ _acquire_lock_and_exclusively_sleep(
+ get_redis_client_sdk, lock_name, sleep_duration=0.1
+ )
for _ in range(parallel_tasks)
],
- reraise=False
+ reraise=False,
)
assert results.count(None) == 1
assert [isinstance(x, CouldNotAcquireLockError) for x in results].count(
True
) == parallel_tasks - 1
- # check lock is being released
+ # check lock is released
async with get_redis_client_sdk(RedisDatabase.RESOURCES) as redis_client_sdk:
assert not await _is_locked(redis_client_sdk, lock_name)
@@ -168,7 +249,7 @@ async def test_start_exclusive_periodic_task_parallel_all_finish(
_assert_task_completes_once(get_redis_client_sdk, stop_after=60)
for _ in range(parallel_tasks)
],
- reraise=False
+ reraise=False,
)
# check no error occurred
diff --git a/packages/service-library/tests/test_utils.py b/packages/service-library/tests/test_utils.py
index 7bfcd4cee69..ebcad03b031 100644
--- a/packages/service-library/tests/test_utils.py
+++ b/packages/service-library/tests/test_utils.py
@@ -5,7 +5,6 @@
import asyncio
from collections.abc import AsyncIterator, Awaitable, Coroutine, Iterator
from copy import copy, deepcopy
-from random import randint
from typing import NoReturn
from unittest import mock
@@ -66,7 +65,6 @@ def mock_logger(mocker: MockerFixture) -> Iterator[mock.Mock]:
async def test_logged_gather(
- event_loop: asyncio.AbstractEventLoop,
coros: list[Coroutine],
mock_logger: mock.Mock,
):
@@ -79,7 +77,7 @@ async def test_logged_gather(
# NOTE: only first error in the list is raised, since it is not RuntimeError, that task
assert isinstance(excinfo.value, ValueError)
- for task in asyncio.all_tasks(event_loop):
+ for task in asyncio.all_tasks(asyncio.get_running_loop()):
if task is not asyncio.current_task():
# info
task.print_stack()
@@ -148,7 +146,7 @@ async def test_fire_and_forget_1000s_tasks(faker: Faker):
tasks_collection = set()
async def _some_task(n: int) -> str:
- await asyncio.sleep(randint(1, 3))
+ await asyncio.sleep(faker.random_int(1, 3))
return f"I'm great since I slept a bit, and by the way I'm task {n}"
for n in range(1000):
@@ -251,7 +249,6 @@ async def test_limited_gather_limits(
async def test_limited_gather(
- event_loop: asyncio.AbstractEventLoop,
coros: list[Coroutine],
mock_logger: mock.Mock,
):
@@ -266,7 +263,7 @@ async def test_limited_gather(
unfinished_tasks = [
task
- for task in asyncio.all_tasks(event_loop)
+ for task in asyncio.all_tasks(asyncio.get_running_loop())
if task is not asyncio.current_task()
]
final_results = await asyncio.gather(*unfinished_tasks, return_exceptions=True)
@@ -288,9 +285,7 @@ async def test_limited_gather_wo_raising(
assert results[5] == 5
-async def test_limited_gather_cancellation(
- event_loop: asyncio.AbstractEventLoop, slow_successful_coros_list: list[Coroutine]
-):
+async def test_limited_gather_cancellation(slow_successful_coros_list: list[Coroutine]):
task = asyncio.create_task(limited_gather(*slow_successful_coros_list, limit=0))
await asyncio.sleep(3)
task.cancel()
@@ -300,7 +295,7 @@ async def test_limited_gather_cancellation(
# check all coros are cancelled
unfinished_tasks = [
task
- for task in asyncio.all_tasks(event_loop)
+ for task in asyncio.all_tasks(asyncio.get_running_loop())
if task is not asyncio.current_task()
]
assert not unfinished_tasks
diff --git a/packages/simcore-sdk/requirements/_base.txt b/packages/simcore-sdk/requirements/_base.txt
index 75e9659cb85..d2fa58f9494 100644
--- a/packages/simcore-sdk/requirements/_base.txt
+++ b/packages/simcore-sdk/requirements/_base.txt
@@ -57,7 +57,6 @@ async-timeout==4.0.3
# via
# aiopg
# asyncpg
- # redis
asyncpg==0.29.0
# via sqlalchemy
attrs==24.2.0
diff --git a/services/agent/requirements/_base.txt b/services/agent/requirements/_base.txt
index bb02d3eab70..dd7a2ade645 100644
--- a/services/agent/requirements/_base.txt
+++ b/services/agent/requirements/_base.txt
@@ -46,8 +46,6 @@ arrow==1.3.0
# -r requirements/../../../packages/service-library/requirements/_base.in
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
-async-timeout==4.0.3
- # via redis
attrs==24.2.0
# via
# aiohttp
@@ -156,6 +154,7 @@ opentelemetry-api==1.27.0
# opentelemetry-instrumentation
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -174,12 +173,15 @@ opentelemetry-instrumentation==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.48b0
# via opentelemetry-instrumentation-fastapi
opentelemetry-instrumentation-fastapi==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.48b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_base.in
opentelemetry-instrumentation-requests==0.48b0
@@ -198,6 +200,7 @@ opentelemetry-semantic-conventions==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -205,6 +208,7 @@ opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.7
# via
diff --git a/services/agent/src/simcore_service_agent/core/application.py b/services/agent/src/simcore_service_agent/core/application.py
index ea4030bc46f..af2e2011823 100644
--- a/services/agent/src/simcore_service_agent/core/application.py
+++ b/services/agent/src/simcore_service_agent/core/application.py
@@ -5,6 +5,7 @@
get_common_oas_options,
override_fastapi_openapi_method,
)
+from servicelib.fastapi.tracing import setup_tracing
from servicelib.logging_utils import config_all_loggers
from .._meta import (
@@ -59,6 +60,9 @@ def create_app() -> FastAPI:
setup_rest_api(app)
setup_rpc_api_routes(app)
+ if settings.AGENT_TRACING:
+ setup_tracing(app, settings.AGENT_TRACING, APP_NAME)
+
async def _on_startup() -> None:
print(APP_STARTED_BANNER_MSG, flush=True) # noqa: T201
diff --git a/services/agent/src/simcore_service_agent/core/settings.py b/services/agent/src/simcore_service_agent/core/settings.py
index 53cdb199f5e..3f7af297189 100644
--- a/services/agent/src/simcore_service_agent/core/settings.py
+++ b/services/agent/src/simcore_service_agent/core/settings.py
@@ -1,13 +1,13 @@
from datetime import timedelta
from common_library.pydantic_networks_extension import AnyHttpUrlLegacy
-from common_library.pydantic_validators import validate_numeric_string_as_timedelta
from models_library.basic_types import BootModeEnum, LogLevel
from pydantic import AliasChoices, Field, field_validator
from servicelib.logging_utils_filtering import LoggerName, MessageSubstring
from settings_library.base import BaseCustomSettings
from settings_library.r_clone import S3Provider
from settings_library.rabbit import RabbitSettings
+from settings_library.tracing import TracingSettings
from settings_library.utils_logging import MixinLoggingSettings
@@ -87,19 +87,9 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings):
json_schema_extra={"auto_default_from_env": True},
)
- _validate_agent_volumes_cleanup_interval = validate_numeric_string_as_timedelta(
- "AGENT_VOLUMES_CLEANUP_INTERVAL"
- )
-
- _validate_agent_volumes_cleanup_book_keeping_interval = (
- validate_numeric_string_as_timedelta(
- "AGENT_VOLUMES_CLEANUP_BOOK_KEEPING_INTERVAL"
- )
- )
- _validate_agent_volumes_cleanup_remove_volumes_inactive_for = (
- validate_numeric_string_as_timedelta(
- "AGENT_VOLUMES_CLEANUP_REMOVE_VOLUMES_INACTIVE_FOR"
- )
+ AGENT_TRACING: TracingSettings | None = Field(
+ description="settings for opentelemetry tracing",
+ json_schema_extra={"auto_default_from_env": True},
)
@field_validator("LOGLEVEL")
diff --git a/services/agent/tests/conftest.py b/services/agent/tests/conftest.py
index c71656e2c08..a0c59ca9f37 100644
--- a/services/agent/tests/conftest.py
+++ b/services/agent/tests/conftest.py
@@ -59,6 +59,7 @@ def mock_environment(
"RABBIT_SECURE": "false",
"RABBIT_USER": "test",
"AGENT_DOCKER_NODE_ID": docker_node_id,
+ "AGENT_TRACING": "null",
},
)
diff --git a/services/api-server/requirements/_base.txt b/services/api-server/requirements/_base.txt
index db3534f6526..8b08e429db1 100644
--- a/services/api-server/requirements/_base.txt
+++ b/services/api-server/requirements/_base.txt
@@ -90,7 +90,6 @@ async-timeout==4.0.3
# via
# aiopg
# asyncpg
- # redis
asyncpg==0.29.0
# via sqlalchemy
attrs==23.2.0
@@ -357,6 +356,7 @@ opentelemetry-api==1.27.0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -380,6 +380,7 @@ opentelemetry-instrumentation==0.48b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-aiopg==0.48b0
@@ -394,6 +395,8 @@ opentelemetry-instrumentation-dbapi==0.48b0
# via opentelemetry-instrumentation-aiopg
opentelemetry-instrumentation-fastapi==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.48b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.48b0
# via
# -r requirements/../../../packages/service-library/requirements/_base.in
@@ -419,6 +422,7 @@ opentelemetry-semantic-conventions==0.48b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -426,6 +430,7 @@ opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.0
# via
diff --git a/services/api-server/src/simcore_service_api_server/core/application.py b/services/api-server/src/simcore_service_api_server/core/application.py
index 2e97391460b..8f9eed26ef3 100644
--- a/services/api-server/src/simcore_service_api_server/core/application.py
+++ b/services/api-server/src/simcore_service_api_server/core/application.py
@@ -82,19 +82,36 @@ def init_app(settings: ApplicationSettings | None = None) -> FastAPI:
setup_rabbitmq(app)
+ if settings.API_SERVER_TRACING:
+ setup_tracing(app, settings.API_SERVER_TRACING, APP_NAME)
+
if settings.API_SERVER_WEBSERVER:
- webserver.setup(app, settings.API_SERVER_WEBSERVER)
- if app.state.settings.API_SERVER_TRACING:
- setup_tracing(app, app.state.settings.API_SERVER_TRACING, APP_NAME)
+ webserver.setup(
+ app,
+ settings.API_SERVER_WEBSERVER,
+ tracing_settings=settings.API_SERVER_TRACING,
+ )
if settings.API_SERVER_CATALOG:
- catalog.setup(app, settings.API_SERVER_CATALOG)
+ catalog.setup(
+ app,
+ settings.API_SERVER_CATALOG,
+ tracing_settings=settings.API_SERVER_TRACING,
+ )
if settings.API_SERVER_STORAGE:
- storage.setup(app, settings.API_SERVER_STORAGE)
+ storage.setup(
+ app,
+ settings.API_SERVER_STORAGE,
+ tracing_settings=settings.API_SERVER_TRACING,
+ )
if settings.API_SERVER_DIRECTOR_V2:
- director_v2.setup(app, settings.API_SERVER_DIRECTOR_V2)
+ director_v2.setup(
+ app,
+ settings.API_SERVER_DIRECTOR_V2,
+ tracing_settings=settings.API_SERVER_TRACING,
+ )
# setup app
app.add_event_handler("startup", create_start_app_handler(app))
diff --git a/services/api-server/src/simcore_service_api_server/services/catalog.py b/services/api-server/src/simcore_service_api_server/services/catalog.py
index 2bbc741b5f2..34f092a6191 100644
--- a/services/api-server/src/simcore_service_api_server/services/catalog.py
+++ b/services/api-server/src/simcore_service_api_server/services/catalog.py
@@ -12,6 +12,7 @@
from models_library.services import ServiceMetaDataPublished, ServiceType
from pydantic import ConfigDict, TypeAdapter, ValidationError
from settings_library.catalog import CatalogSettings
+from settings_library.tracing import TracingSettings
from simcore_service_api_server.exceptions.backend_errors import (
ListSolversOrStudiesError,
SolverOrStudyNotFoundError,
@@ -222,10 +223,16 @@ async def get_latest_release(
# MODULES APP SETUP -------------------------------------------------------------
-def setup(app: FastAPI, settings: CatalogSettings) -> None:
+def setup(
+ app: FastAPI, settings: CatalogSettings, tracing_settings: TracingSettings | None
+) -> None:
if not settings:
settings = CatalogSettings()
setup_client_instance(
- app, CatalogApi, api_baseurl=settings.api_base_url, service_name="catalog"
+ app,
+ CatalogApi,
+ api_baseurl=settings.api_base_url,
+ service_name="catalog",
+ tracing_settings=tracing_settings,
)
diff --git a/services/api-server/src/simcore_service_api_server/services/director_v2.py b/services/api-server/src/simcore_service_api_server/services/director_v2.py
index 5ddbc4d2bca..abf112212b4 100644
--- a/services/api-server/src/simcore_service_api_server/services/director_v2.py
+++ b/services/api-server/src/simcore_service_api_server/services/director_v2.py
@@ -7,15 +7,8 @@
from models_library.projects_nodes_io import NodeID
from models_library.projects_pipeline import ComputationTask
from models_library.projects_state import RunningState
-from pydantic import (
- AnyHttpUrl,
- AnyUrl,
- BaseModel,
- ConfigDict,
- Field,
- PositiveInt,
- TypeAdapter,
-)
+from pydantic import AnyHttpUrl, AnyUrl, BaseModel, Field, PositiveInt, TypeAdapter
+from settings_library.tracing import TracingSettings
from starlette import status
from ..core.settings import DirectorV2Settings
@@ -197,11 +190,14 @@ async def get_computation_logs(
# MODULES APP SETUP -------------------------------------------------------------
-def setup(app: FastAPI, settings: DirectorV2Settings) -> None:
+def setup(
+ app: FastAPI, settings: DirectorV2Settings, tracing_settings: TracingSettings | None
+) -> None:
setup_client_instance(
app,
DirectorV2Api,
# WARNING: it has /v0 and /v2 prefixes
api_baseurl=settings.base_url,
service_name="director_v2",
+ tracing_settings=tracing_settings,
)
diff --git a/services/api-server/src/simcore_service_api_server/services/storage.py b/services/api-server/src/simcore_service_api_server/services/storage.py
index 442520c1582..0095dd343f5 100644
--- a/services/api-server/src/simcore_service_api_server/services/storage.py
+++ b/services/api-server/src/simcore_service_api_server/services/storage.py
@@ -14,6 +14,7 @@
from models_library.basic_types import SHA256Str
from models_library.generics import Envelope
from pydantic import AnyUrl, PositiveInt
+from settings_library.tracing import TracingSettings
from starlette.datastructures import URL
from ..core.settings import StorageSettings
@@ -215,12 +216,18 @@ async def create_soft_link(
# MODULES APP SETUP -------------------------------------------------------------
-def setup(app: FastAPI, settings: StorageSettings) -> None:
+def setup(
+ app: FastAPI, settings: StorageSettings, tracing_settings: TracingSettings | None
+) -> None:
if not settings:
settings = StorageSettings()
setup_client_instance(
- app, StorageApi, api_baseurl=settings.api_base_url, service_name="storage"
+ app,
+ StorageApi,
+ api_baseurl=settings.api_base_url,
+ service_name="storage",
+ tracing_settings=tracing_settings,
)
diff --git a/services/api-server/src/simcore_service_api_server/services/webserver.py b/services/api-server/src/simcore_service_api_server/services/webserver.py
index 0c5428bf0e6..ac0437dbc7d 100644
--- a/services/api-server/src/simcore_service_api_server/services/webserver.py
+++ b/services/api-server/src/simcore_service_api_server/services/webserver.py
@@ -48,6 +48,7 @@
X_SIMCORE_PARENT_NODE_ID,
X_SIMCORE_PARENT_PROJECT_UUID,
)
+from settings_library.tracing import TracingSettings
from simcore_service_api_server.exceptions.backend_errors import (
ConfigurationError,
ForbiddenWalletError,
@@ -608,24 +609,30 @@ async def get_service_pricing_plan(
# MODULES APP SETUP -------------------------------------------------------------
-def setup(app: FastAPI, settings: WebServerSettings) -> None:
+def setup(
+ app: FastAPI,
+ webserver_settings: WebServerSettings,
+ tracing_settings: TracingSettings | None,
+) -> None:
setup_client_instance(
app,
WebserverApi,
- api_baseurl=settings.api_base_url,
+ api_baseurl=webserver_settings.api_base_url,
service_name="webserver",
+ tracing_settings=tracing_settings,
)
setup_client_instance(
app,
LongRunningTasksClient,
api_baseurl="",
service_name="long_running_tasks_client",
+ tracing_settings=tracing_settings,
)
def _on_startup() -> None:
# normalize & encrypt
- secret_key = settings.WEBSERVER_SESSION_SECRET_KEY.get_secret_value()
+ secret_key = webserver_settings.WEBSERVER_SESSION_SECRET_KEY.get_secret_value()
app.state.webserver_fernet = fernet.Fernet(secret_key)
async def _on_shutdown() -> None:
diff --git a/services/api-server/src/simcore_service_api_server/utils/client_base.py b/services/api-server/src/simcore_service_api_server/utils/client_base.py
index ed58f7429e3..3cc35a74bb6 100644
--- a/services/api-server/src/simcore_service_api_server/utils/client_base.py
+++ b/services/api-server/src/simcore_service_api_server/utils/client_base.py
@@ -4,6 +4,8 @@
import httpx
from fastapi import FastAPI
from httpx import AsyncClient
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
+from settings_library.tracing import TracingSettings
from .app_data import AppDataMixin
@@ -43,14 +45,16 @@ def setup_client_instance(
api_cls: type[BaseServiceClientApi],
api_baseurl,
service_name: str,
+ tracing_settings: TracingSettings | None,
**extra_fields,
) -> None:
"""Helper to add init/cleanup of ServiceClientApi instances in the app lifespam"""
assert issubclass(api_cls, BaseServiceClientApi) # nosec
-
# NOTE: this term is mocked in tests. If you need to modify pay attention to the mock
client = AsyncClient(base_url=api_baseurl)
+ if tracing_settings:
+ setup_httpx_client_tracing(client)
# events
def _create_instance() -> None:
diff --git a/services/api-server/tests/unit/conftest.py b/services/api-server/tests/unit/conftest.py
index f6b932f9281..a8ade97aee9 100644
--- a/services/api-server/tests/unit/conftest.py
+++ b/services/api-server/tests/unit/conftest.py
@@ -62,6 +62,7 @@ def app_environment(
"WEBSERVER_HOST": "webserver",
"API_SERVER_POSTGRES": "null",
"API_SERVER_RABBITMQ": "null",
+ "API_SERVER_TRACING": "null",
"LOG_LEVEL": "debug",
"SC_BOOT_MODE": "production",
"API_SERVER_HEALTH_CHECK_TASK_PERIOD_SECONDS": "3",
diff --git a/services/api-server/tests/unit/test_utils_client_base.py b/services/api-server/tests/unit/test_utils_client_base.py
index 61370a8ea52..9fe2da1a28c 100644
--- a/services/api-server/tests/unit/test_utils_client_base.py
+++ b/services/api-server/tests/unit/test_utils_client_base.py
@@ -43,6 +43,7 @@ class TheClientApi(BaseServiceClientApi):
service_name="the_service",
health_check_path="/health",
x=42,
+ tracing_settings=None,
)
assert not TheClientApi.get_instance(app)
diff --git a/services/autoscaling/requirements/_base.txt b/services/autoscaling/requirements/_base.txt
index e7e0807dba8..ba3c992df23 100644
--- a/services/autoscaling/requirements/_base.txt
+++ b/services/autoscaling/requirements/_base.txt
@@ -79,8 +79,6 @@ arrow==1.3.0
# -r requirements/../../../packages/service-library/requirements/_base.in
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
-async-timeout==4.0.3
- # via redis
attrs==23.2.0
# via
# aiohttp
@@ -298,6 +296,7 @@ opentelemetry-api==1.26.0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-propagator-aws-xray
@@ -320,6 +319,7 @@ opentelemetry-instrumentation==0.47b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.47b0
@@ -328,6 +328,8 @@ opentelemetry-instrumentation-botocore==0.47b0
# via -r requirements/../../../packages/aws-library/requirements/_base.in
opentelemetry-instrumentation-fastapi==0.47b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.47b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.47b0
# via
# -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in
@@ -354,6 +356,7 @@ opentelemetry-semantic-conventions==0.47b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -361,6 +364,7 @@ opentelemetry-util-http==0.47b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.3
# via
diff --git a/services/autoscaling/requirements/_test.txt b/services/autoscaling/requirements/_test.txt
index 2cef475fc17..e019e4f118b 100644
--- a/services/autoscaling/requirements/_test.txt
+++ b/services/autoscaling/requirements/_test.txt
@@ -10,10 +10,6 @@ anyio==4.3.0
# httpx
asgi-lifespan==2.1.0
# via -r requirements/_test.in
-async-timeout==4.0.3
- # via
- # -c requirements/_base.txt
- # redis
attrs==23.2.0
# via
# -c requirements/_base.txt
diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/redis.py b/services/autoscaling/src/simcore_service_autoscaling/modules/redis.py
index 29fed9c6b97..60ce15df956 100644
--- a/services/autoscaling/src/simcore_service_autoscaling/modules/redis.py
+++ b/services/autoscaling/src/simcore_service_autoscaling/modules/redis.py
@@ -5,6 +5,8 @@
from servicelib.redis import RedisClientSDK
from settings_library.redis import RedisDatabase, RedisSettings
+from .._meta import APP_NAME
+
logger = logging.getLogger(__name__)
@@ -13,7 +15,9 @@ async def on_startup() -> None:
app.state.redis_client_sdk = None
settings: RedisSettings = app.state.settings.AUTOSCALING_REDIS
redis_locks_dsn = settings.build_redis_dsn(RedisDatabase.LOCKS)
- app.state.redis_client_sdk = client = RedisClientSDK(redis_locks_dsn)
+ app.state.redis_client_sdk = client = RedisClientSDK(
+ redis_locks_dsn, client_name=APP_NAME
+ )
await client.setup()
async def on_shutdown() -> None:
diff --git a/services/autoscaling/tests/unit/conftest.py b/services/autoscaling/tests/unit/conftest.py
index 5fafbf5fae1..ed95060d7d6 100644
--- a/services/autoscaling/tests/unit/conftest.py
+++ b/services/autoscaling/tests/unit/conftest.py
@@ -227,7 +227,7 @@ def app_environment(
"AUTOSCALING_EC2_SECRET_ACCESS_KEY": faker.pystr(),
"AUTOSCALING_EC2_INSTANCES": "{}",
"AUTOSCALING_SSM_ACCESS": "{}",
- "AUTOSCALING_TRACING": "{}",
+ "AUTOSCALING_TRACING": "null",
"SSM_ACCESS_KEY_ID": faker.pystr(),
"SSM_SECRET_ACCESS_KEY": faker.pystr(),
"EC2_INSTANCES_KEY_NAME": faker.pystr(),
@@ -372,13 +372,6 @@ def disabled_ec2(app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch)
monkeypatch.setenv("AUTOSCALING_EC2_ACCESS", "null")
-@pytest.fixture
-def disabled_opentelemetry(
- app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch
-) -> None:
- monkeypatch.setenv("AUTOSCALING_TRACING", "null")
-
-
@pytest.fixture
def disabled_ssm(app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.setenv("AUTOSCALING_SSM_ACCESS", "null")
diff --git a/services/autoscaling/tests/unit/test_core_settings.py b/services/autoscaling/tests/unit/test_core_settings.py
index 8ad55ec40f0..5b30abe878c 100644
--- a/services/autoscaling/tests/unit/test_core_settings.py
+++ b/services/autoscaling/tests/unit/test_core_settings.py
@@ -5,6 +5,7 @@
import datetime
import json
+import os
import pytest
from faker import Faker
@@ -204,11 +205,42 @@ def test_EC2_INSTANCES_ALLOWED_TYPES_passing_valid_image_tags( # noqa: N802
def test_EC2_INSTANCES_ALLOWED_TYPES_empty_not_allowed( # noqa: N802
app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch
):
+ assert app_environment["AUTOSCALING_EC2_INSTANCES"] == "{}"
monkeypatch.setenv("EC2_INSTANCES_ALLOWED_TYPES", "{}")
- with pytest.raises(ValidationError):
+ # test child settings
+ with pytest.raises(ValidationError) as err_info:
+ EC2InstancesSettings.create_from_envs()
+
+ assert err_info.value.errors()[0]["loc"] == ("EC2_INSTANCES_ALLOWED_TYPES",)
+
+
+def test_EC2_INSTANCES_ALLOWED_TYPES_empty_not_allowed_with_main_field_env_var( # noqa: N802
+ app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch
+):
+ assert os.environ["AUTOSCALING_EC2_INSTANCES"] == "{}"
+ monkeypatch.setenv("EC2_INSTANCES_ALLOWED_TYPES", "{}")
+
+ # now as part of AUTOSCALING_EC2_INSTANCES: EC2InstancesSettings | None
+ with pytest.raises(ValidationError) as exc_before:
+ ApplicationSettings.create_from_envs(AUTOSCALING_EC2_INSTANCES={})
+
+ with pytest.raises(ValidationError) as exc_after:
ApplicationSettings.create_from_envs()
+ assert exc_before.value.errors() == exc_after.value.errors()
+
+
+def test_EC2_INSTANCES_ALLOWED_TYPES_empty_not_allowed_without_main_field_env_var( # noqa: N802
+ app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch
+):
+ monkeypatch.delenv("AUTOSCALING_EC2_INSTANCES")
+ monkeypatch.setenv("EC2_INSTANCES_ALLOWED_TYPES", "{}")
+
+ # removing any value for AUTOSCALING_EC2_INSTANCES
+ settings = ApplicationSettings.create_from_envs()
+ assert settings.AUTOSCALING_EC2_INSTANCES is None
+
@pytest.mark.xfail(
reason="disabling till pydantic2 migration is complete see https://github.com/ITISFoundation/osparc-simcore/pull/6705"
diff --git a/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py b/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py
index 8fc7c91caab..f9e0e4c416d 100644
--- a/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py
+++ b/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py
@@ -79,7 +79,6 @@ def minimal_configuration(
local_dask_scheduler_server_envs: EnvVarsDict,
mocked_ec2_instances_envs: EnvVarsDict,
disabled_rabbitmq: None,
- disabled_opentelemetry: None,
disable_dynamic_service_background_task: None,
disable_buffers_pool_background_task: None,
mocked_redis_server: None,
diff --git a/services/catalog/requirements/_base.txt b/services/catalog/requirements/_base.txt
index b8aec0c2f54..6b8253888a8 100644
--- a/services/catalog/requirements/_base.txt
+++ b/services/catalog/requirements/_base.txt
@@ -50,9 +50,7 @@ arrow==1.3.0
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
async-timeout==4.0.3
- # via
- # asyncpg
- # redis
+ # via asyncpg
asyncpg==0.29.0
# via
# -r requirements/_base.in
@@ -223,6 +221,7 @@ opentelemetry-api==1.27.0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -242,6 +241,7 @@ opentelemetry-instrumentation==0.48b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.48b0
@@ -250,6 +250,8 @@ opentelemetry-instrumentation-asyncpg==0.48b0
# via -r requirements/../../../packages/postgres-database/requirements/_base.in
opentelemetry-instrumentation-fastapi==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.48b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_base.in
opentelemetry-instrumentation-requests==0.48b0
@@ -269,6 +271,7 @@ opentelemetry-semantic-conventions==0.48b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -276,6 +279,7 @@ opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.0
# via
diff --git a/services/catalog/src/simcore_service_catalog/core/application.py b/services/catalog/src/simcore_service_catalog/core/application.py
index a753f206a3e..6ed95110c39 100644
--- a/services/catalog/src/simcore_service_catalog/core/application.py
+++ b/services/catalog/src/simcore_service_catalog/core/application.py
@@ -46,8 +46,13 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI:
# STATE
app.state.settings = settings
+ if settings.CATALOG_TRACING:
+ setup_tracing(app, settings.CATALOG_TRACING, APP_NAME)
+
# STARTUP-EVENT
- app.add_event_handler("startup", create_on_startup(app))
+ app.add_event_handler(
+ "startup", create_on_startup(app, tracing_settings=settings.CATALOG_TRACING)
+ )
# PLUGIN SETUP
setup_function_services(app)
@@ -65,8 +70,6 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI:
app.add_middleware(
BaseHTTPMiddleware, dispatch=timing_middleware.add_process_time_header
)
- if app.state.settings.CATALOG_TRACING:
- setup_tracing(app, app.state.settings.CATALOG_TRACING, APP_NAME)
app.add_middleware(GZipMiddleware)
diff --git a/services/catalog/src/simcore_service_catalog/core/events.py b/services/catalog/src/simcore_service_catalog/core/events.py
index f22adbba4ec..dde295a2e56 100644
--- a/services/catalog/src/simcore_service_catalog/core/events.py
+++ b/services/catalog/src/simcore_service_catalog/core/events.py
@@ -5,6 +5,7 @@
from fastapi import FastAPI
from servicelib.fastapi.db_asyncpg_engine import close_db_connection, connect_to_db
from servicelib.logging_utils import log_context
+from settings_library.tracing import TracingSettings
from .._meta import APP_FINISHED_BANNER_MSG, APP_STARTED_BANNER_MSG
from ..db.events import setup_default_product
@@ -26,7 +27,9 @@ def _flush_finished_banner() -> None:
print(APP_FINISHED_BANNER_MSG, flush=True) # noqa: T201
-def create_on_startup(app: FastAPI) -> EventCallable:
+def create_on_startup(
+ app: FastAPI, tracing_settings: TracingSettings | None
+) -> EventCallable:
async def _() -> None:
_flush_started_banner()
@@ -37,7 +40,7 @@ async def _() -> None:
if app.state.settings.CATALOG_DIRECTOR:
# setup connection to director
- await setup_director(app)
+ await setup_director(app, tracing_settings=tracing_settings)
# FIXME: check director service is in place and ready. Hand-shake??
# SEE https://github.com/ITISFoundation/osparc-simcore/issues/1728
diff --git a/services/catalog/src/simcore_service_catalog/services/director.py b/services/catalog/src/simcore_service_catalog/services/director.py
index a762892dd77..41b975c4e60 100644
--- a/services/catalog/src/simcore_service_catalog/services/director.py
+++ b/services/catalog/src/simcore_service_catalog/services/director.py
@@ -11,7 +11,9 @@
from fastapi import FastAPI, HTTPException
from models_library.services_metadata_published import ServiceMetaDataPublished
from models_library.services_types import ServiceKey, ServiceVersion
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
from servicelib.logging_utils import log_context
+from settings_library.tracing import TracingSettings
from starlette import status
from tenacity.asyncio import AsyncRetrying
from tenacity.before_sleep import before_sleep_log
@@ -106,11 +108,15 @@ class DirectorApi:
SEE services/catalog/src/simcore_service_catalog/api/dependencies/director.py
"""
- def __init__(self, base_url: str, app: FastAPI):
+ def __init__(
+ self, base_url: str, app: FastAPI, tracing_settings: TracingSettings | None
+ ):
self.client = httpx.AsyncClient(
base_url=base_url,
timeout=app.state.settings.CATALOG_CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT,
)
+ if tracing_settings:
+ setup_httpx_client_tracing(self.client)
self.vtag = app.state.settings.CATALOG_DIRECTOR.DIRECTOR_VTAG
async def close(self):
@@ -151,15 +157,25 @@ async def get_service(
return ServiceMetaDataPublished.model_validate(data[0])
-async def setup_director(app: FastAPI) -> None:
+async def setup_director(
+ app: FastAPI, tracing_settings: TracingSettings | None
+) -> None:
if settings := app.state.settings.CATALOG_DIRECTOR:
with log_context(
_logger, logging.DEBUG, "Setup director at %s", f"{settings.base_url=}"
):
async for attempt in AsyncRetrying(**_director_startup_retry_policy):
- client = DirectorApi(base_url=settings.base_url, app=app)
+ client = DirectorApi(
+ base_url=settings.base_url,
+ app=app,
+ tracing_settings=tracing_settings,
+ )
with attempt:
- client = DirectorApi(base_url=settings.base_url, app=app)
+ client = DirectorApi(
+ base_url=settings.base_url,
+ app=app,
+ tracing_settings=tracing_settings,
+ )
if not await client.is_responsive():
with suppress(Exception):
await client.close()
diff --git a/services/catalog/tests/unit/conftest.py b/services/catalog/tests/unit/conftest.py
index 278af317091..184acf22a68 100644
--- a/services/catalog/tests/unit/conftest.py
+++ b/services/catalog/tests/unit/conftest.py
@@ -88,8 +88,6 @@ def app_environment(
return setenvs_from_dict(
monkeypatch,
{
- "SC_BOOT_MODE": "production",
- "SC_BOOT_TARGET": "null",
**docker_compose_service_environment_dict,
"CATALOG_TRACING": "null",
},
diff --git a/services/clusters-keeper/requirements/_base.txt b/services/clusters-keeper/requirements/_base.txt
index c0a8246fc0e..24cf9913155 100644
--- a/services/clusters-keeper/requirements/_base.txt
+++ b/services/clusters-keeper/requirements/_base.txt
@@ -77,8 +77,6 @@ arrow==1.3.0
# -r requirements/../../../packages/service-library/requirements/_base.in
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
-async-timeout==4.0.3
- # via redis
attrs==23.2.0
# via
# aiohttp
@@ -296,6 +294,7 @@ opentelemetry-api==1.26.0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-propagator-aws-xray
@@ -318,6 +317,7 @@ opentelemetry-instrumentation==0.47b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.47b0
@@ -326,6 +326,8 @@ opentelemetry-instrumentation-botocore==0.47b0
# via -r requirements/../../../packages/aws-library/requirements/_base.in
opentelemetry-instrumentation-fastapi==0.47b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.47b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.47b0
# via
# -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in
@@ -352,6 +354,7 @@ opentelemetry-semantic-conventions==0.47b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -359,6 +362,7 @@ opentelemetry-util-http==0.47b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.3
# via
diff --git a/services/clusters-keeper/requirements/_test.txt b/services/clusters-keeper/requirements/_test.txt
index 9df0f3305b2..4e297870fd4 100644
--- a/services/clusters-keeper/requirements/_test.txt
+++ b/services/clusters-keeper/requirements/_test.txt
@@ -23,10 +23,6 @@ anyio==4.3.0
# httpx
asgi-lifespan==2.1.0
# via -r requirements/_test.in
-async-timeout==4.0.3
- # via
- # -c requirements/_base.txt
- # redis
attrs==23.2.0
# via
# -c requirements/_base.txt
diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py
index 410edba1efb..d2e8f6e4c6f 100644
--- a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py
+++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py
@@ -6,6 +6,7 @@
from servicelib.background_task import start_periodic_task, stop_periodic_task
from servicelib.redis_utils import exclusive
+from .._meta import APP_NAME
from ..core.settings import ApplicationSettings
from ..modules.redis import get_redis_client
from .clusters_management_core import check_clusters
@@ -19,7 +20,7 @@ def on_app_startup(app: FastAPI) -> Callable[[], Awaitable[None]]:
async def _startup() -> None:
app_settings: ApplicationSettings = app.state.settings
- lock_key = f"{app.title}:clusters-management_lock"
+ lock_key = f"{APP_NAME}:clusters-management_lock"
lock_value = json.dumps({})
app.state.clusters_cleaning_task = start_periodic_task(
exclusive(get_redis_client(app), lock_key=lock_key, lock_value=lock_value)(
diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/redis.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/redis.py
index 08f0ff54f73..a0a0d6a8745 100644
--- a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/redis.py
+++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/redis.py
@@ -5,6 +5,7 @@
from servicelib.redis import RedisClientSDK
from settings_library.redis import RedisDatabase, RedisSettings
+from .._meta import APP_NAME
from ..core.settings import get_application_settings
logger = logging.getLogger(__name__)
@@ -15,7 +16,9 @@ async def on_startup() -> None:
app.state.redis_client_sdk = None
settings: RedisSettings = get_application_settings(app).CLUSTERS_KEEPER_REDIS
redis_locks_dsn = settings.build_redis_dsn(RedisDatabase.LOCKS)
- app.state.redis_client_sdk = client = RedisClientSDK(redis_locks_dsn)
+ app.state.redis_client_sdk = client = RedisClientSDK(
+ redis_locks_dsn, client_name=APP_NAME
+ )
await client.setup()
async def on_shutdown() -> None:
diff --git a/services/clusters-keeper/tests/unit/conftest.py b/services/clusters-keeper/tests/unit/conftest.py
index da94a9886f3..432d743fb0c 100644
--- a/services/clusters-keeper/tests/unit/conftest.py
+++ b/services/clusters-keeper/tests/unit/conftest.py
@@ -119,6 +119,7 @@ def app_environment(
envs = setenvs_from_dict(
monkeypatch,
{
+ "CLUSTERS_KEEPER_TRACING": "null",
"CLUSTERS_KEEPER_EC2_ACCESS": "{}",
"CLUSTERS_KEEPER_EC2_ACCESS_KEY_ID": faker.pystr(),
"CLUSTERS_KEEPER_EC2_SECRET_ACCESS_KEY": faker.pystr(),
diff --git a/services/dask-sidecar/requirements/_base.txt b/services/dask-sidecar/requirements/_base.txt
index fe622316c3b..e3cd751062d 100644
--- a/services/dask-sidecar/requirements/_base.txt
+++ b/services/dask-sidecar/requirements/_base.txt
@@ -57,8 +57,6 @@ arrow==1.3.0
# -r requirements/../../../packages/models-library/requirements/_base.in
# -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in
# -r requirements/../../../packages/service-library/requirements/_base.in
-async-timeout==4.0.3
- # via redis
attrs==23.2.0
# via
# aiohttp
diff --git a/services/datcore-adapter/requirements/_base.txt b/services/datcore-adapter/requirements/_base.txt
index 845c1784e56..ef6b7c14ebb 100644
--- a/services/datcore-adapter/requirements/_base.txt
+++ b/services/datcore-adapter/requirements/_base.txt
@@ -47,8 +47,6 @@ arrow==1.3.0
# -r requirements/../../../packages/service-library/requirements/_base.in
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
-async-timeout==4.0.3
- # via redis
attrs==23.2.0
# via
# aiohttp
@@ -179,6 +177,7 @@ opentelemetry-api==1.26.0
# opentelemetry-instrumentation
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -197,12 +196,15 @@ opentelemetry-instrumentation==0.47b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.47b0
# via opentelemetry-instrumentation-fastapi
opentelemetry-instrumentation-fastapi==0.47b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.47b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.47b0
# via -r requirements/../../../packages/service-library/requirements/_base.in
opentelemetry-instrumentation-requests==0.47b0
@@ -221,6 +223,7 @@ opentelemetry-semantic-conventions==0.47b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -228,6 +231,7 @@ opentelemetry-util-http==0.47b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.0
# via
diff --git a/services/datcore-adapter/tests/unit/conftest.py b/services/datcore-adapter/tests/unit/conftest.py
index 0130fda027a..19ae09e588f 100644
--- a/services/datcore-adapter/tests/unit/conftest.py
+++ b/services/datcore-adapter/tests/unit/conftest.py
@@ -15,9 +15,8 @@
import simcore_service_datcore_adapter
from asgi_lifespan import LifespanManager
from fastapi.applications import FastAPI
-from models_library.basic_types import BootModeEnum
from pytest_mock import MockFixture
-from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict
+from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict
from simcore_service_datcore_adapter.modules.pennsieve import (
PennsieveAuthorizationHeaders,
)
@@ -25,6 +24,7 @@
from starlette.testclient import TestClient
pytest_plugins = [
+ "pytest_simcore.environment_configs",
"pytest_simcore.repository_paths",
"pytest_simcore.pytest_global_environs",
]
@@ -79,9 +79,16 @@ def client(minimal_app: FastAPI) -> TestClient:
@pytest.fixture
-def app_envs(monkeypatch: pytest.MonkeyPatch):
- # disable tracing as together with LifespanManager, it does not remove itself nicely
- return setenvs_from_dict(monkeypatch, {"SC_BOOT_MODE": BootModeEnum.DEBUG})
+def app_envs(
+ mock_env_devel_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch
+) -> EnvVarsDict:
+ return setenvs_from_dict(
+ monkeypatch,
+ {
+ **mock_env_devel_environment,
+ "DATCORE_ADAPTER_TRACING": "null",
+ },
+ )
@pytest.fixture()
diff --git a/services/director-v2/requirements/_base.txt b/services/director-v2/requirements/_base.txt
index 7166d62daaf..4ce57c6439b 100644
--- a/services/director-v2/requirements/_base.txt
+++ b/services/director-v2/requirements/_base.txt
@@ -100,7 +100,6 @@ async-timeout==4.0.3
# via
# aiopg
# asyncpg
- # redis
asyncpg==0.29.0
# via sqlalchemy
attrs==23.2.0
@@ -412,6 +411,7 @@ opentelemetry-api==1.27.0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -435,6 +435,7 @@ opentelemetry-instrumentation==0.48b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-aiopg==0.48b0
@@ -449,6 +450,8 @@ opentelemetry-instrumentation-dbapi==0.48b0
# via opentelemetry-instrumentation-aiopg
opentelemetry-instrumentation-fastapi==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.48b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.48b0
# via
# -r requirements/../../../packages/service-library/requirements/_base.in
@@ -474,6 +477,7 @@ opentelemetry-semantic-conventions==0.48b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -481,6 +485,7 @@ opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
ordered-set==4.1.0
# via -r requirements/_base.in
diff --git a/services/director-v2/src/simcore_service_director_v2/api/dependencies/scheduler.py b/services/director-v2/src/simcore_service_director_v2/api/dependencies/scheduler.py
index a0903608789..aa01af1f34b 100644
--- a/services/director-v2/src/simcore_service_director_v2/api/dependencies/scheduler.py
+++ b/services/director-v2/src/simcore_service_director_v2/api/dependencies/scheduler.py
@@ -1,3 +1,5 @@
+from typing import Annotated
+
from fastapi import Depends, FastAPI, Request
from ...core.settings import ComputationalBackendSettings
@@ -11,7 +13,7 @@ def get_scheduler(request: Request) -> BaseCompScheduler:
def get_scheduler_settings(
- app: FastAPI = Depends(get_app),
+ app: Annotated[FastAPI, Depends(get_app)]
) -> ComputationalBackendSettings:
settings: ComputationalBackendSettings = (
app.state.settings.DIRECTOR_V2_COMPUTATIONAL_BACKEND
diff --git a/services/director-v2/src/simcore_service_director_v2/cli/_client.py b/services/director-v2/src/simcore_service_director_v2/cli/_client.py
index 541d90688dc..872c08f3b5f 100644
--- a/services/director-v2/src/simcore_service_director_v2/cli/_client.py
+++ b/services/director-v2/src/simcore_service_director_v2/cli/_client.py
@@ -12,7 +12,9 @@ class ThinDV2LocalhostClient(BaseThinClient):
def __init__(self):
super().__init__(
- total_retry_interval=10, default_http_client_timeout=Timeout(5)
+ total_retry_interval=10,
+ default_http_client_timeout=Timeout(5),
+ tracing_settings=None,
)
def _get_url(self, postfix: str) -> str:
diff --git a/services/director-v2/src/simcore_service_director_v2/cli/_core.py b/services/director-v2/src/simcore_service_director_v2/cli/_core.py
index 028a882f268..67a99d4886f 100644
--- a/services/director-v2/src/simcore_service_director_v2/cli/_core.py
+++ b/services/director-v2/src/simcore_service_director_v2/cli/_core.py
@@ -36,13 +36,16 @@
async def _initialized_app(only_db: bool = False) -> AsyncIterator[FastAPI]:
app = create_base_app()
settings: AppSettings = app.state.settings
-
# Initialize minimal required components for the application
db.setup(app, settings.POSTGRES)
if not only_db:
dynamic_sidecar.setup(app)
- director_v0.setup(app, settings.DIRECTOR_V0)
+ director_v0.setup(
+ app,
+ director_v0_settings=settings.DIRECTOR_V0,
+ tracing_settings=settings.DIRECTOR_V2_TRACING,
+ )
await app.router.startup()
yield app
diff --git a/services/director-v2/src/simcore_service_director_v2/core/application.py b/services/director-v2/src/simcore_service_director_v2/core/application.py
index d611351f782..f8dc9e2eaa8 100644
--- a/services/director-v2/src/simcore_service_director_v2/core/application.py
+++ b/services/director-v2/src/simcore_service_director_v2/core/application.py
@@ -150,19 +150,34 @@ def init_app(settings: AppSettings | None = None) -> FastAPI:
substitutions.setup(app)
+ if settings.DIRECTOR_V2_TRACING:
+ setup_tracing(app, settings.DIRECTOR_V2_TRACING, APP_NAME)
+
if settings.DIRECTOR_V0.DIRECTOR_V0_ENABLED:
- director_v0.setup(app, settings.DIRECTOR_V0)
+ director_v0.setup(
+ app,
+ director_v0_settings=settings.DIRECTOR_V0,
+ tracing_settings=settings.DIRECTOR_V2_TRACING,
+ )
if settings.DIRECTOR_V2_STORAGE:
- storage.setup(app, settings.DIRECTOR_V2_STORAGE)
+ storage.setup(
+ app,
+ storage_settings=settings.DIRECTOR_V2_STORAGE,
+ tracing_settings=settings.DIRECTOR_V2_TRACING,
+ )
if settings.DIRECTOR_V2_CATALOG:
- catalog.setup(app, settings.DIRECTOR_V2_CATALOG)
+ catalog.setup(
+ app,
+ catalog_settings=settings.DIRECTOR_V2_CATALOG,
+ tracing_settings=settings.DIRECTOR_V2_TRACING,
+ )
db.setup(app, settings.POSTGRES)
if settings.DYNAMIC_SERVICES.DIRECTOR_V2_DYNAMIC_SERVICES_ENABLED:
- dynamic_services.setup(app)
+ dynamic_services.setup(app, tracing_settings=settings.DIRECTOR_V2_TRACING)
dynamic_scheduler_enabled = settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR and (
settings.DYNAMIC_SERVICES.DYNAMIC_SCHEDULER
@@ -193,8 +208,6 @@ def init_app(settings: AppSettings | None = None) -> FastAPI:
if settings.DIRECTOR_V2_PROMETHEUS_INSTRUMENTATION_ENABLED:
instrumentation.setup(app)
- if settings.DIRECTOR_V2_TRACING:
- setup_tracing(app, app.state.settings.DIRECTOR_V2_TRACING, APP_NAME)
if settings.DIRECTOR_V2_PROFILING:
app.add_middleware(ProfilerMiddleware)
diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py
index 9b466bbc49f..2c671d2c146 100644
--- a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py
+++ b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py
@@ -44,8 +44,9 @@ class CompRunsAtDB(BaseModel):
result: RunningState
created: datetime.datetime
modified: datetime.datetime
- started: datetime.datetime | None = None
- ended: datetime.datetime | None = None
+ started: datetime.datetime | None
+ ended: datetime.datetime | None
+ cancelled: datetime.datetime | None
metadata: RunMetadataDict = RunMetadataDict()
use_on_demand_clusters: bool
@@ -93,9 +94,22 @@ def convert_null_to_empty_metadata(cls, v):
"user_id": 132,
"cluster_id": 0,
"iteration": 42,
+ "result": "UNKNOWN",
+ "created": "2021-03-01 13:07:34.19161",
+ "modified": "2021-03-01 13:07:34.19161",
+ "cancelled": None,
+ "use_on_demand_clusters": False,
+ },
+ {
+ "run_id": 432,
+ "project_uuid": "65fee9d2-e030-452c-a29c-45d288577ca5",
+ "user_id": 132,
+ "cluster_id": None, # this default to DEFAULT_CLUSTER_ID
+ "iteration": 42,
"result": "NOT_STARTED",
"created": "2021-03-01 13:07:34.19161",
"modified": "2021-03-01 13:07:34.19161",
+ "cancelled": None,
"use_on_demand_clusters": False,
},
{
@@ -109,6 +123,7 @@ def convert_null_to_empty_metadata(cls, v):
"modified": "2021-03-01 13:07:34.19161",
"started": "2021-03-01 08:07:34.19161",
"ended": "2021-03-01 13:07:34.10",
+ "cancelled": None,
"metadata": {
"node_id_names_map": {},
"product_name": "osparc",
@@ -118,6 +133,21 @@ def convert_null_to_empty_metadata(cls, v):
},
"use_on_demand_clusters": False,
},
+ {
+ "run_id": 43243,
+ "project_uuid": "65fee9d2-e030-452c-a29c-45d288577ca5",
+ "user_id": 132,
+ "cluster_id": 123,
+ "iteration": 12,
+ "result": "SUCCESS",
+ "created": "2021-03-01 13:07:34.19161",
+ "modified": "2021-03-01 13:07:34.19161",
+ "started": "2021-03-01 8:07:34.19161",
+ "ended": "2021-03-01 13:07:34.10",
+ "cancelled": None,
+ "metadata": None,
+ "use_on_demand_clusters": False,
+ },
]
},
)
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/catalog.py b/services/director-v2/src/simcore_service_director_v2/modules/catalog.py
index 2a064d14642..6125c4cfb02 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/catalog.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/catalog.py
@@ -9,26 +9,37 @@
from models_library.services_resources import ServiceResourcesDict
from models_library.users import UserID
from pydantic import TypeAdapter
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
from settings_library.catalog import CatalogSettings
+from settings_library.tracing import TracingSettings
from ..utils.client_decorators import handle_errors, handle_retry
logger = logging.getLogger(__name__)
-def setup(app: FastAPI, settings: CatalogSettings) -> None:
- if not settings:
- settings = CatalogSettings()
+def setup(
+ app: FastAPI,
+ catalog_settings: CatalogSettings | None,
+ tracing_settings: TracingSettings | None,
+) -> None:
+
+ if not catalog_settings:
+ catalog_settings = CatalogSettings()
async def on_startup() -> None:
+ client = httpx.AsyncClient(
+ base_url=f"{catalog_settings.api_base_url}",
+ timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT,
+ )
+ if tracing_settings:
+ setup_httpx_client_tracing(client=client)
+
CatalogClient.create(
app,
- client=httpx.AsyncClient(
- base_url=f"{settings.api_base_url}",
- timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT,
- ),
+ client=client,
)
- logger.debug("created client for catalog: %s", settings.api_base_url)
+ logger.debug("created client for catalog: %s", catalog_settings.api_base_url)
# Here we currently do not ensure the catalog is up on start
# This will need to be assessed.
@@ -90,9 +101,9 @@ async def get_service_resources(
)
resp.raise_for_status()
if resp.status_code == status.HTTP_200_OK:
- json_response: ServiceResourcesDict = TypeAdapter(ServiceResourcesDict).validate_python(
- resp.json()
- )
+ json_response: ServiceResourcesDict = TypeAdapter(
+ ServiceResourcesDict
+ ).validate_python(resp.json())
return json_response
raise HTTPException(status_code=resp.status_code, detail=resp.content)
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/__init__.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/__init__.py
index 1eb6c3dab10..d06c37457b7 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/__init__.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/__init__.py
@@ -1,7 +1,38 @@
+import logging
+from collections.abc import Callable, Coroutine
+from typing import Any, cast
+
from fastapi import FastAPI
+from servicelib.logging_utils import log_context
+from . import _scheduler_factory
from ._base_scheduler import BaseCompScheduler
-from ._task import on_app_shutdown, on_app_startup
+
+_logger = logging.getLogger(__name__)
+
+
+def on_app_startup(app: FastAPI) -> Callable[[], Coroutine[Any, Any, None]]:
+ async def start_scheduler() -> None:
+ with log_context(
+ _logger, level=logging.INFO, msg="starting computational scheduler"
+ ):
+ app.state.scheduler = scheduler = await _scheduler_factory.create_from_db(
+ app
+ )
+ scheduler.recover_scheduling()
+
+ return start_scheduler
+
+
+def on_app_shutdown(app: FastAPI) -> Callable[[], Coroutine[Any, Any, None]]:
+ async def stop_scheduler() -> None:
+ await get_scheduler(app).shutdown()
+
+ return stop_scheduler
+
+
+def get_scheduler(app: FastAPI) -> BaseCompScheduler:
+ return cast(BaseCompScheduler, app.state.scheduler)
def setup(app: FastAPI):
@@ -12,4 +43,5 @@ def setup(app: FastAPI):
__all__: tuple[str, ...] = (
"setup",
"BaseCompScheduler",
+ "get_scheduler",
)
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py
index 08396686e43..097afd95288 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py
@@ -12,7 +12,9 @@
"""
import asyncio
+import contextlib
import datetime
+import functools
import logging
from abc import ABC, abstractmethod
from dataclasses import dataclass, field
@@ -29,9 +31,12 @@
from models_library.users import UserID
from networkx.classes.reportviews import InDegreeView
from pydantic import PositiveInt
+from servicelib.background_task import start_periodic_task, stop_periodic_task
from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE
+from servicelib.logging_utils import log_context
from servicelib.rabbitmq import RabbitMQClient, RabbitMQRPCClient
-from servicelib.utils import limited_gather
+from servicelib.redis import CouldNotAcquireLockError, RedisClientSDK
+from servicelib.redis_utils import exclusive
from ...constants import UNDEFINED_STR_METADATA
from ...core.errors import (
@@ -47,7 +52,7 @@
)
from ...core.settings import ComputationalBackendSettings
from ...models.comp_pipelines import CompPipelineAtDB
-from ...models.comp_runs import CompRunsAtDB, RunMetadataDict
+from ...models.comp_runs import RunMetadataDict
from ...models.comp_tasks import CompTaskAtDB
from ...utils.comp_scheduler import (
COMPLETED_STATES,
@@ -76,6 +81,10 @@
_Previous = CompTaskAtDB
_Current = CompTaskAtDB
_MAX_WAITING_FOR_CLUSTER_TIMEOUT_IN_MIN: Final[int] = 10
+_SCHEDULER_INTERVAL: Final[datetime.timedelta] = datetime.timedelta(seconds=5)
+_TASK_NAME_TEMPLATE: Final[
+ str
+] = "computational-scheduler-{user_id}:{project_id}:{iteration}"
@dataclass(frozen=True, slots=True)
@@ -131,9 +140,15 @@ async def _triage_changed_tasks(
class ScheduledPipelineParams:
cluster_id: ClusterID
run_metadata: RunMetadataDict
- mark_for_cancellation: bool = False
+ mark_for_cancellation: datetime.datetime | None
use_on_demand_clusters: bool
+ scheduler_task: asyncio.Task | None = None
+ scheduler_waker: asyncio.Event = field(default_factory=asyncio.Event)
+
+ def wake_up(self) -> None:
+ self.scheduler_waker.set()
+
@dataclass
class BaseCompScheduler(ABC):
@@ -146,6 +161,7 @@ class BaseCompScheduler(ABC):
rabbitmq_rpc_client: RabbitMQRPCClient
settings: ComputationalBackendSettings
service_runtime_heartbeat_interval: datetime.timedelta
+ redis_client: RedisClientSDK
async def run_new_pipeline(
self,
@@ -169,7 +185,7 @@ async def run_new_pipeline(
return
runs_repo = CompRunsRepository.instance(self.db_engine)
- new_run: CompRunsAtDB = await runs_repo.create(
+ new_run = await runs_repo.create(
user_id=user_id,
project_id=project_id,
cluster_id=cluster_id,
@@ -178,10 +194,11 @@ async def run_new_pipeline(
)
self.scheduled_pipelines[
(user_id, project_id, new_run.iteration)
- ] = ScheduledPipelineParams(
+ ] = pipeline_params = ScheduledPipelineParams(
cluster_id=cluster_id,
run_metadata=new_run.metadata,
use_on_demand_clusters=use_on_demand_clusters,
+ mark_for_cancellation=None,
)
await publish_project_log(
self.rabbitmq_client,
@@ -190,8 +207,8 @@ async def run_new_pipeline(
log=f"Project pipeline scheduled using {'on-demand clusters' if use_on_demand_clusters else 'pre-defined clusters'}, starting soon...",
log_level=logging.INFO,
)
- # ensure the scheduler starts right away
- self._wake_up_scheduler_now()
+
+ self._start_scheduling(pipeline_params, user_id, project_id, new_run.iteration)
async def stop_pipeline(
self, user_id: UserID, project_id: ProjectID, iteration: int | None = None
@@ -212,33 +229,87 @@ async def stop_pipeline(
selected_iteration = iteration
# mark the scheduled pipeline for stopping
- self.scheduled_pipelines[
- (user_id, project_id, selected_iteration)
- ].mark_for_cancellation = True
- # ensure the scheduler starts right away
- self._wake_up_scheduler_now()
-
- async def schedule_all_pipelines(self) -> None:
- self.wake_up_event.clear()
- # if one of the task throws, the other are NOT cancelled which is what we want
- await limited_gather(
+ updated_comp_run = await CompRunsRepository.instance(
+ self.db_engine
+ ).mark_for_cancellation(
+ user_id=user_id, project_id=project_id, iteration=selected_iteration
+ )
+ if updated_comp_run:
+ assert updated_comp_run.cancelled is not None # nosec
+ self.scheduled_pipelines[
+ (user_id, project_id, selected_iteration)
+ ].mark_for_cancellation = updated_comp_run.cancelled
+ # ensure the scheduler starts right away
+ self.scheduled_pipelines[
+ (user_id, project_id, selected_iteration)
+ ].wake_up()
+
+ def recover_scheduling(self) -> None:
+ for (
+ user_id,
+ project_id,
+ iteration,
+ ), params in self.scheduled_pipelines.items():
+ self._start_scheduling(params, user_id, project_id, iteration)
+
+ async def shutdown(self) -> None:
+ # cancel all current scheduling processes
+ await asyncio.gather(
*(
- self._schedule_pipeline(
+ stop_periodic_task(p.scheduler_task, timeout=3)
+ for p in self.scheduled_pipelines.values()
+ if p.scheduler_task
+ ),
+ return_exceptions=True,
+ )
+
+ def _get_last_iteration(self, user_id: UserID, project_id: ProjectID) -> Iteration:
+ # if no iteration given find the latest one in the list
+ possible_iterations = {
+ it
+ for u_id, p_id, it in self.scheduled_pipelines
+ if u_id == user_id and p_id == project_id
+ }
+ if not possible_iterations:
+ msg = f"There are no pipeline scheduled for {user_id}:{project_id}"
+ raise SchedulerError(msg)
+ return max(possible_iterations)
+
+ def _start_scheduling(
+ self,
+ pipeline_params: ScheduledPipelineParams,
+ user_id: UserID,
+ project_id: ProjectID,
+ iteration: Iteration,
+ ) -> None:
+ async def _exclusive_safe_schedule_pipeline(
+ *,
+ user_id: UserID,
+ project_id: ProjectID,
+ iteration: Iteration,
+ pipeline_params: ScheduledPipelineParams,
+ ) -> None:
+ with contextlib.suppress(CouldNotAcquireLockError):
+ await self._schedule_pipeline(
user_id=user_id,
project_id=project_id,
iteration=iteration,
pipeline_params=pipeline_params,
)
- for (
- user_id,
- project_id,
- iteration,
- ), pipeline_params in self.scheduled_pipelines.items()
+
+ pipeline_params.scheduler_task = start_periodic_task(
+ functools.partial(
+ _exclusive_safe_schedule_pipeline,
+ user_id=user_id,
+ project_id=project_id,
+ iteration=iteration,
+ pipeline_params=pipeline_params,
+ ),
+ interval=_SCHEDULER_INTERVAL,
+ task_name=_TASK_NAME_TEMPLATE.format(
+ user_id=user_id, project_id=project_id, iteration=iteration
),
- reraise=False,
- log=_logger,
- limit=40,
- tasks_group_prefix="computational-scheduled-pipeline",
+ early_wake_up_event=pipeline_params.scheduler_waker,
)
async def _get_pipeline_dag(self, project_id: ProjectID) -> nx.DiGraph:
@@ -343,7 +414,7 @@ def _need_heartbeat(task: CompTaskAtDB) -> bool:
if task.last_heartbeat is None:
assert task.start # nosec
return bool(
- (utc_now - task.start.replace(tzinfo=datetime.timezone.utc))
+ (utc_now - task.start.replace(tzinfo=datetime.UTC))
> self.service_runtime_heartbeat_interval
)
return bool(
@@ -602,6 +673,22 @@ async def _process_completed_tasks(
) -> None:
...
+ @staticmethod
+ def _build_exclusive_lock_key(*args, **kwargs) -> str:
+ assert args # nosec
+ return f"{kwargs['user_id']}:{kwargs['project_id']}:{kwargs['iteration']}"
+
+ @staticmethod
+ def _redis_client_getter(*args, **kwargs) -> RedisClientSDK:
+ assert kwargs # nosec
+ zelf = args[0]
+ assert isinstance(zelf, BaseCompScheduler) # nosec
+ return zelf.redis_client
+
+ @exclusive(
+ redis=_redis_client_getter,
+ lock_key=_build_exclusive_lock_key,
+ )
async def _schedule_pipeline(
self,
*,
@@ -610,98 +697,99 @@ async def _schedule_pipeline(
iteration: PositiveInt,
pipeline_params: ScheduledPipelineParams,
) -> None:
- _logger.debug(
- "checking run of project [%s:%s] for user [%s]",
- f"{project_id=}",
- f"{iteration=}",
- f"{user_id=}",
- )
- dag: nx.DiGraph = nx.DiGraph()
- try:
- dag = await self._get_pipeline_dag(project_id)
- # 1. Update our list of tasks with data from backend (state, results)
- await self._update_states_from_comp_backend(
- user_id, project_id, iteration, dag, pipeline_params=pipeline_params
- )
- # 2. Any task following a FAILED task shall be ABORTED
- comp_tasks = await self._set_states_following_failed_to_aborted(
- project_id, dag
- )
- # 3. do we want to stop the pipeline now?
- if pipeline_params.mark_for_cancellation:
- await self._schedule_tasks_to_stop(
- user_id, project_id, comp_tasks, pipeline_params
+ with log_context(
+ _logger,
+ level=logging.INFO,
+ msg=f"scheduling pipeline {user_id=}:{project_id=}:{iteration=}",
+ ):
+ dag: nx.DiGraph = nx.DiGraph()
+ try:
+ dag = await self._get_pipeline_dag(project_id)
+ # 1. Update our list of tasks with data from backend (state, results)
+ await self._update_states_from_comp_backend(
+ user_id, project_id, iteration, dag, pipeline_params=pipeline_params
)
- else:
- # let's get the tasks to schedule then
- comp_tasks = await self._schedule_tasks_to_start(
- user_id=user_id,
- project_id=project_id,
- comp_tasks=comp_tasks,
- dag=dag,
- pipeline_params=pipeline_params,
+ # 2. Any task following a FAILED task shall be ABORTED
+ comp_tasks = await self._set_states_following_failed_to_aborted(
+ project_id, dag
+ )
+ # 3. do we want to stop the pipeline now?
+ if pipeline_params.mark_for_cancellation:
+ await self._schedule_tasks_to_stop(
+ user_id, project_id, comp_tasks, pipeline_params
+ )
+ else:
+ # let's get the tasks to schedule then
+ comp_tasks = await self._schedule_tasks_to_start(
+ user_id=user_id,
+ project_id=project_id,
+ comp_tasks=comp_tasks,
+ dag=dag,
+ pipeline_params=pipeline_params,
+ )
+ # 4. timeout if waiting for cluster has been there for more than X minutes
+ comp_tasks = await self._timeout_if_waiting_for_cluster_too_long(
+ user_id, project_id, comp_tasks
+ )
+ # 5. send a heartbeat
+ await self._send_running_tasks_heartbeat(
+ user_id, project_id, iteration, dag
)
- # 4. timeout if waiting for cluster has been there for more than X minutes
- comp_tasks = await self._timeout_if_waiting_for_cluster_too_long(
- user_id, project_id, comp_tasks
- )
- # 5. send a heartbeat
- await self._send_running_tasks_heartbeat(
- user_id, project_id, iteration, dag
- )
- # 6. Update the run result
- pipeline_result = await self._update_run_result_from_tasks(
- user_id, project_id, iteration, comp_tasks
- )
+ # 6. Update the run result
+ pipeline_result = await self._update_run_result_from_tasks(
+ user_id, project_id, iteration, comp_tasks
+ )
- # 7. Are we done scheduling that pipeline?
- if not dag.nodes() or pipeline_result in COMPLETED_STATES:
- # there is nothing left, the run is completed, we're done here
+ # 7. Are we done scheduling that pipeline?
+ if not dag.nodes() or pipeline_result in COMPLETED_STATES:
+ # there is nothing left, the run is completed, we're done here
+ self.scheduled_pipelines.pop((user_id, project_id, iteration), None)
+ _logger.info(
+ "pipeline %s scheduling completed with result %s",
+ f"{project_id=}",
+ f"{pipeline_result=}",
+ )
+ assert pipeline_params.scheduler_task is not None # nosec
+ pipeline_params.scheduler_task.cancel()
+ except PipelineNotFoundError:
+ _logger.warning(
+ "pipeline %s does not exist in comp_pipeline table, it will be removed from scheduler",
+ f"{project_id=}",
+ )
+ await self._set_run_result(
+ user_id, project_id, iteration, RunningState.ABORTED
+ )
self.scheduled_pipelines.pop((user_id, project_id, iteration), None)
- _logger.info(
- "pipeline %s scheduling completed with result %s",
+ except InvalidPipelineError as exc:
+ _logger.warning(
+ "pipeline %s appears to be misconfigured, it will be removed from scheduler. Please check pipeline:\n%s",
f"{project_id=}",
- f"{pipeline_result=}",
+ exc,
)
- except PipelineNotFoundError:
- _logger.warning(
- "pipeline %s does not exist in comp_pipeline table, it will be removed from scheduler",
- f"{project_id=}",
- )
- await self._set_run_result(
- user_id, project_id, iteration, RunningState.ABORTED
- )
- self.scheduled_pipelines.pop((user_id, project_id, iteration), None)
- except InvalidPipelineError as exc:
- _logger.warning(
- "pipeline %s appears to be misconfigured, it will be removed from scheduler. Please check pipeline:\n%s",
- f"{project_id=}",
- exc,
- )
- await self._set_run_result(
- user_id, project_id, iteration, RunningState.ABORTED
- )
- self.scheduled_pipelines.pop((user_id, project_id, iteration), None)
- except (DaskClientAcquisisitonError, ClustersKeeperNotAvailableError):
- _logger.exception(
- "Unexpected error while connecting with computational backend, aborting pipeline"
- )
- tasks: dict[NodeIDStr, CompTaskAtDB] = await self._get_pipeline_tasks(
- project_id, dag
- )
- comp_tasks_repo = CompTasksRepository(self.db_engine)
- await comp_tasks_repo.update_project_tasks_state(
- project_id,
- [t.node_id for t in tasks.values()],
- RunningState.FAILED,
- )
- await self._set_run_result(
- user_id, project_id, iteration, RunningState.FAILED
- )
- self.scheduled_pipelines.pop((user_id, project_id, iteration), None)
- except ComputationalBackendNotConnectedError:
- _logger.exception("Computational backend is not connected!")
+ await self._set_run_result(
+ user_id, project_id, iteration, RunningState.ABORTED
+ )
+ self.scheduled_pipelines.pop((user_id, project_id, iteration), None)
+ except (DaskClientAcquisisitonError, ClustersKeeperNotAvailableError):
+ _logger.exception(
+ "Unexpected error while connecting with computational backend, aborting pipeline"
+ )
+ tasks: dict[NodeIDStr, CompTaskAtDB] = await self._get_pipeline_tasks(
+ project_id, dag
+ )
+ comp_tasks_repo = CompTasksRepository(self.db_engine)
+ await comp_tasks_repo.update_project_tasks_state(
+ project_id,
+ [t.node_id for t in tasks.values()],
+ RunningState.FAILED,
+ )
+ await self._set_run_result(
+ user_id, project_id, iteration, RunningState.FAILED
+ )
+ self.scheduled_pipelines.pop((user_id, project_id, iteration), None)
+ except ComputationalBackendNotConnectedError:
+ _logger.exception("Computational backend is not connected!")
async def _schedule_tasks_to_stop(
self,
@@ -902,6 +990,3 @@ async def _timeout_if_waiting_for_cluster_too_long(
log_level=logging.ERROR,
)
return comp_tasks
-
- def _wake_up_scheduler_now(self) -> None:
- self.wake_up_event.set()
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py
index abc97638608..2195d293e2e 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py
@@ -124,7 +124,7 @@ async def _start_tasks(
cluster_id=pipeline_params.cluster_id,
tasks={node_id: task.image},
hardware_info=task.hardware_info,
- callback=self._wake_up_scheduler_now,
+ callback=pipeline_params.wake_up,
metadata=pipeline_params.run_metadata,
)
for node_id, task in scheduled_tasks.items()
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py
index 458950e9798..4f7812816cc 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py
@@ -2,6 +2,8 @@
from fastapi import FastAPI
from models_library.clusters import DEFAULT_CLUSTER_ID
+from servicelib.logging_utils import log_context
+from settings_library.redis import RedisDatabase
from ...core.errors import ConfigurationError
from ...core.settings import AppSettings
@@ -10,10 +12,11 @@
from ..dask_clients_pool import DaskClientsPool
from ..db.repositories.comp_runs import CompRunsRepository
from ..rabbitmq import get_rabbitmq_client, get_rabbitmq_rpc_client
+from ..redis import get_redis_client_manager
from ._base_scheduler import BaseCompScheduler, ScheduledPipelineParams
from ._dask_scheduler import DaskScheduler
-logger = logging.getLogger(__name__)
+_logger = logging.getLogger(__name__)
async def create_from_db(app: FastAPI) -> BaseCompScheduler:
@@ -28,29 +31,32 @@ async def create_from_db(app: FastAPI) -> BaseCompScheduler:
filter_by_state=SCHEDULED_STATES
)
- logger.debug(
+ _logger.debug(
"Following scheduled comp_runs found still to be scheduled: %s",
runs if runs else "NONE",
)
- logger.info("Creating Dask-based scheduler...")
- app_settings: AppSettings = app.state.settings
- return DaskScheduler(
- settings=app_settings.DIRECTOR_V2_COMPUTATIONAL_BACKEND,
- dask_clients_pool=DaskClientsPool.instance(app),
- rabbitmq_client=get_rabbitmq_client(app),
- rabbitmq_rpc_client=get_rabbitmq_rpc_client(app),
- db_engine=db_engine,
- scheduled_pipelines={
- (r.user_id, r.project_uuid, r.iteration): ScheduledPipelineParams(
- cluster_id=(
- r.cluster_id if r.cluster_id is not None else DEFAULT_CLUSTER_ID
- ),
- run_metadata=r.metadata,
- mark_for_cancellation=False,
- use_on_demand_clusters=r.use_on_demand_clusters,
- )
- for r in runs
- },
- service_runtime_heartbeat_interval=app_settings.SERVICE_TRACKING_HEARTBEAT,
- )
+ with log_context(
+ _logger, logging.INFO, msg="Creating Dask-based computational scheduler"
+ ):
+ app_settings: AppSettings = app.state.settings
+ return DaskScheduler(
+ settings=app_settings.DIRECTOR_V2_COMPUTATIONAL_BACKEND,
+ dask_clients_pool=DaskClientsPool.instance(app),
+ rabbitmq_client=get_rabbitmq_client(app),
+ rabbitmq_rpc_client=get_rabbitmq_rpc_client(app),
+ redis_client=get_redis_client_manager(app).client(RedisDatabase.LOCKS),
+ db_engine=db_engine,
+ scheduled_pipelines={
+ (r.user_id, r.project_uuid, r.iteration): ScheduledPipelineParams(
+ cluster_id=(
+ r.cluster_id if r.cluster_id is not None else DEFAULT_CLUSTER_ID
+ ),
+ run_metadata=r.metadata,
+ mark_for_cancellation=r.cancelled,
+ use_on_demand_clusters=r.use_on_demand_clusters,
+ )
+ for r in runs
+ },
+ service_runtime_heartbeat_interval=app_settings.SERVICE_TRACKING_HEARTBEAT,
+ )
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_task.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_task.py
deleted file mode 100644
index 0e1c79ff8b6..00000000000
--- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_task.py
+++ /dev/null
@@ -1,50 +0,0 @@
-import datetime
-import logging
-from collections.abc import Callable, Coroutine
-from typing import Any, Final
-
-from fastapi import FastAPI
-from servicelib.background_task import start_periodic_task, stop_periodic_task
-from servicelib.logging_utils import log_context
-from servicelib.redis import RedisClientsManager
-from servicelib.redis_utils import exclusive
-from settings_library.redis import RedisDatabase
-
-from . import _scheduler_factory
-
-_logger = logging.getLogger(__name__)
-
-_COMPUTATIONAL_SCHEDULER_INTERVAL: Final[datetime.timedelta] = datetime.timedelta(
- seconds=5
-)
-_TASK_NAME: Final[str] = "computational services scheduler"
-
-
-def on_app_startup(app: FastAPI) -> Callable[[], Coroutine[Any, Any, None]]:
- async def start_scheduler() -> None:
- with log_context(
- _logger, level=logging.INFO, msg="starting computational scheduler"
- ):
- redis_clients_manager: RedisClientsManager = app.state.redis_clients_manager
- lock_key = f"{app.title}:computational_scheduler"
- app.state.scheduler = scheduler = await _scheduler_factory.create_from_db(
- app
- )
- app.state.computational_scheduler_task = start_periodic_task(
- exclusive(
- redis_clients_manager.client(RedisDatabase.LOCKS),
- lock_key=lock_key,
- )(scheduler.schedule_all_pipelines),
- interval=_COMPUTATIONAL_SCHEDULER_INTERVAL,
- task_name=_TASK_NAME,
- early_wake_up_event=scheduler.wake_up_event,
- )
-
- return start_scheduler
-
-
-def on_app_shutdown(app: FastAPI) -> Callable[[], Coroutine[Any, Any, None]]:
- async def stop_scheduler() -> None:
- await stop_periodic_task(app.state.computational_scheduler_task)
-
- return stop_scheduler
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py
index 289a0063649..9ce28bcda8d 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py
@@ -3,6 +3,7 @@
from collections import deque
from typing import Any
+import arrow
import sqlalchemy as sa
from aiopg.sa.result import RowProxy
from models_library.clusters import DEFAULT_CLUSTER_ID, ClusterID
@@ -146,10 +147,20 @@ async def set_run_result(
) -> CompRunsAtDB | None:
values: dict[str, Any] = {"result": RUNNING_STATE_TO_DB[result_state]}
if final_state:
- values.update({"ended": datetime.datetime.now(tz=datetime.UTC)})
+ values.update({"ended": arrow.utcnow().datetime})
return await self.update(
user_id,
project_id,
iteration,
**values,
)
+
+ async def mark_for_cancellation(
+ self, *, user_id: UserID, project_id: ProjectID, iteration: PositiveInt
+ ) -> CompRunsAtDB | None:
+ return await self.update(
+ user_id,
+ project_id,
+ iteration,
+ cancelled=arrow.utcnow().datetime,
+ )
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py b/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py
index 322e5281e46..ca211eb70dd 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py
@@ -1,7 +1,4 @@
-""" Module that takes care of communications with director v0 service
-
-
-"""
+"""Module that takes care of communications with director v0 service"""
import logging
import urllib.parse
@@ -20,7 +17,9 @@
from models_library.service_settings_labels import SimcoreServiceLabels
from models_library.services import ServiceKey, ServiceKeyVersion, ServiceVersion
from models_library.users import UserID
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
from servicelib.logging_utils import log_decorator
+from settings_library.tracing import TracingSettings
from ..core.settings import DirectorV0Settings
from ..utils.client_decorators import handle_errors, handle_retry
@@ -31,25 +30,34 @@
# Module's setup logic ---------------------------------------------
-def setup(app: FastAPI, settings: DirectorV0Settings | None):
- if not settings:
- settings = DirectorV0Settings()
+def setup(
+ app: FastAPI,
+ director_v0_settings: DirectorV0Settings | None,
+ tracing_settings: TracingSettings | None,
+):
+ if not director_v0_settings:
+ director_v0_settings = DirectorV0Settings()
def on_startup() -> None:
+ client = httpx.AsyncClient(
+ base_url=f"{director_v0_settings.endpoint}",
+ timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT,
+ )
+ if tracing_settings:
+ setup_httpx_client_tracing(client=client)
DirectorV0Client.create(
app,
- client=httpx.AsyncClient(
- base_url=f"{settings.endpoint}",
- timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT,
- ),
+ client=client,
+ )
+ logger.debug(
+ "created client for director-v0: %s", director_v0_settings.endpoint
)
- logger.debug("created client for director-v0: %s", settings.endpoint)
async def on_shutdown() -> None:
client = DirectorV0Client.instance(app).client
await client.aclose()
del client
- logger.debug("delete client for director-v0: %s", settings.endpoint)
+ logger.debug("delete client for director-v0: %s", director_v0_settings.endpoint)
app.add_event_handler("startup", on_startup)
app.add_event_handler("shutdown", on_shutdown)
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_services.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_services.py
index d572a9f23fb..acbc08849a6 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_services.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_services.py
@@ -8,19 +8,24 @@
import httpx
from fastapi import FastAPI
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
+from settings_library.tracing import TracingSettings
from ..utils.client_decorators import handle_errors, handle_retry
logger = logging.getLogger(__name__)
-def setup(app: FastAPI) -> None:
+def setup(app: FastAPI, tracing_settings: TracingSettings | None) -> None:
def on_startup() -> None:
+ client = httpx.AsyncClient(
+ timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT
+ )
+ if tracing_settings:
+ setup_httpx_client_tracing(client=client)
ServicesClient.create(
app,
- client=httpx.AsyncClient(
- timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT
- ),
+ client=client,
)
async def on_shutdown() -> None:
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py
index 21ef1bbe279..09d5c7a7272 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py
@@ -12,6 +12,7 @@
expect_status,
retry_on_errors,
)
+from settings_library.tracing import TracingSettings
from ....core.dynamic_services_settings.scheduler import (
DynamicServicesSchedulerSettings,
@@ -31,6 +32,9 @@ def __init__(self, app: FastAPI):
scheduler_settings: DynamicServicesSchedulerSettings = (
app.state.settings.DYNAMIC_SERVICES.DYNAMIC_SCHEDULER
)
+ tracing_settings: TracingSettings | None = (
+ app.state.settings.DIRECTOR_V2_TRACING
+ )
# timeouts
self._health_request_timeout = Timeout(1.0, connect=1.0)
@@ -53,6 +57,7 @@ def __init__(self, app: FastAPI):
scheduler_settings.DYNAMIC_SIDECAR_API_REQUEST_TIMEOUT,
connect=scheduler_settings.DYNAMIC_SIDECAR_API_CONNECT_TIMEOUT,
),
+ tracing_settings=tracing_settings,
)
def _get_url(
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py
index f344c93422f..b6885bae7b3 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py
@@ -184,6 +184,11 @@ def _get_environment_variables(
"S3_SECRET_KEY": r_clone_settings.R_CLONE_S3.S3_SECRET_KEY,
"SC_BOOT_MODE": f"{app_settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR.DYNAMIC_SIDECAR_SC_BOOT_MODE}",
"SSL_CERT_FILE": app_settings.DIRECTOR_V2_SELF_SIGNED_SSL_FILENAME,
+ "DYNAMIC_SIDECAR_TRACING": (
+ app_settings.DIRECTOR_V2_TRACING.json()
+ if app_settings.DIRECTOR_V2_TRACING
+ else "null"
+ ),
# For background info on this special env-var above, see
# - https://stackoverflow.com/questions/31448854/how-to-force-requests-use-the-certificates-on-my-ubuntu-system#comment78596389_37447847
"SIMCORE_HOST_NAME": scheduler_data.service_name,
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/redis.py b/services/director-v2/src/simcore_service_director_v2/modules/redis.py
index 7cb6f86cc82..273061cb188 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/redis.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/redis.py
@@ -1,7 +1,10 @@
+from typing import cast
+
from fastapi import FastAPI
from servicelib.redis import RedisClientsManager, RedisManagerDBConfig
from settings_library.redis import RedisDatabase
+from .._meta import APP_NAME
from ..core.settings import AppSettings
@@ -18,6 +21,7 @@ async def on_startup() -> None:
)
},
settings=settings.REDIS,
+ client_name=APP_NAME,
)
await redis_clients_manager.setup()
@@ -27,3 +31,7 @@ async def on_shutdown() -> None:
app.add_event_handler("startup", on_startup)
app.add_event_handler("shutdown", on_shutdown)
+
+
+def get_redis_client_manager(app: FastAPI) -> RedisClientsManager:
+ return cast(RedisClientsManager, app.state.redis_clients_manager)
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py b/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py
index f1b4280bdcc..58d02975fd7 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py
@@ -23,6 +23,7 @@
)
from models_library.services import ServiceKey, ServiceVersion
from models_library.wallets import WalletID
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
from ..core.errors import PricingPlanUnitNotFoundError
from ..core.settings import AppSettings
@@ -40,6 +41,8 @@ def create(cls, settings: AppSettings) -> "ResourceUsageTrackerClient":
client = httpx.AsyncClient(
base_url=settings.DIRECTOR_V2_RESOURCE_USAGE_TRACKER.api_base_url,
)
+ if settings.DIRECTOR_V2_TRACING:
+ setup_httpx_client_tracing(client=client)
exit_stack = contextlib.AsyncExitStack()
return cls(client=client, exit_stack=exit_stack)
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/storage.py b/services/director-v2/src/simcore_service_director_v2/modules/storage.py
index b0cbb5e9629..08e18de0aeb 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/storage.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/storage.py
@@ -8,9 +8,11 @@
import httpx
from fastapi import FastAPI, HTTPException
from models_library.users import UserID
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
from servicelib.logging_utils import log_decorator
from settings_library.s3 import S3Settings
from settings_library.storage import StorageSettings
+from settings_library.tracing import TracingSettings
# Module's business logic ---------------------------------------------
from starlette import status
@@ -23,19 +25,27 @@
# Module's setup logic ---------------------------------------------
-def setup(app: FastAPI, settings: StorageSettings):
- if not settings:
- settings = StorageSettings()
+def setup(
+ app: FastAPI,
+ storage_settings: StorageSettings | None,
+ tracing_settings: TracingSettings | None,
+):
+
+ if not storage_settings:
+ storage_settings = StorageSettings()
def on_startup() -> None:
+ client = httpx.AsyncClient(
+ base_url=f"{storage_settings.api_base_url}",
+ timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT,
+ )
+ if tracing_settings:
+ setup_httpx_client_tracing(client=client)
StorageClient.create(
app,
- client=httpx.AsyncClient(
- base_url=f"{settings.api_base_url}",
- timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT,
- ),
+ client=client,
)
- logger.debug("created client for storage: %s", settings.api_base_url)
+ logger.debug("created client for storage: %s", storage_settings.api_base_url)
async def on_shutdown() -> None:
client = StorageClient.instance(app).client
diff --git a/services/director-v2/tests/conftest.py b/services/director-v2/tests/conftest.py
index d04ff9134aa..d0a70389caa 100644
--- a/services/director-v2/tests/conftest.py
+++ b/services/director-v2/tests/conftest.py
@@ -190,6 +190,7 @@ def mock_env(
"SIMCORE_SERVICES_NETWORK_NAME": "test_network_name",
"SWARM_STACK_NAME": "pytest-simcore",
"TRAEFIK_SIMCORE_ZONE": "test_traefik_zone",
+ "DIRECTOR_V2_TRACING": "null",
},
)
@@ -217,7 +218,7 @@ async def initialized_app(mock_env: EnvVarsDict) -> AsyncIterable[FastAPI]:
@pytest.fixture()
async def async_client(initialized_app: FastAPI) -> AsyncIterable[httpx.AsyncClient]:
async with httpx.AsyncClient(
- app=initialized_app,
+ transport=httpx.ASGITransport(app=initialized_app),
base_url="http://director-v2.testserver.io",
headers={"Content-Type": "application/json"},
) as client:
diff --git a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py
index a160afd664f..bb50fb1fa5d 100644
--- a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py
+++ b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py
@@ -64,6 +64,7 @@
from settings_library.rabbit import RabbitSettings
from settings_library.redis import RedisSettings
from settings_library.storage import StorageSettings
+from settings_library.tracing import TracingSettings
from simcore_postgres_database.models.comp_pipeline import comp_pipeline
from simcore_postgres_database.models.comp_tasks import comp_tasks
from simcore_postgres_database.models.projects_networks import projects_networks
@@ -340,8 +341,14 @@ async def patch_storage_setup(
original_setup = dv2_modules_storage.setup
- def setup(app: FastAPI, settings: StorageSettings) -> None:
- original_setup(app, local_settings)
+ def setup(
+ app: FastAPI,
+ storage_settings: StorageSettings,
+ tracing_settings: TracingSettings | None,
+ ) -> None:
+ original_setup(
+ app, storage_settings=local_settings, tracing_settings=tracing_settings
+ )
mocker.patch("simcore_service_director_v2.modules.storage.setup", side_effect=setup)
diff --git a/services/director-v2/tests/unit/_helpers.py b/services/director-v2/tests/unit/_helpers.py
index 2654c63a3e1..779d6cdd117 100644
--- a/services/director-v2/tests/unit/_helpers.py
+++ b/services/director-v2/tests/unit/_helpers.py
@@ -1,4 +1,3 @@
-import asyncio
from dataclasses import dataclass
from typing import Any
@@ -11,9 +10,6 @@
from simcore_service_director_v2.models.comp_pipelines import CompPipelineAtDB
from simcore_service_director_v2.models.comp_runs import CompRunsAtDB
from simcore_service_director_v2.models.comp_tasks import CompTaskAtDB
-from simcore_service_director_v2.modules.comp_scheduler._base_scheduler import (
- BaseCompScheduler,
-)
@dataclass
@@ -28,13 +24,6 @@ class RunningProject(PublishedProject):
runs: CompRunsAtDB
-async def trigger_comp_scheduler(scheduler: BaseCompScheduler) -> None:
- # trigger the scheduler
- scheduler._wake_up_scheduler_now() # pylint: disable=protected-access # noqa: SLF001
- # let the scheduler be actually triggered
- await asyncio.sleep(1)
-
-
async def set_comp_task_state(
aiopg_engine: aiopg.sa.engine.Engine, node_id: str, state: StateType
) -> None:
diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_service_specs_sidecar.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_service_specs_sidecar.py
index 4a73b3e7210..f4870a140c4 100644
--- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_service_specs_sidecar.py
+++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_service_specs_sidecar.py
@@ -37,6 +37,7 @@
"DY_SIDECAR_USER_SERVICES_HAVE_INTERNET_ACCESS",
"DYNAMIC_SIDECAR_COMPOSE_NAMESPACE",
"DYNAMIC_SIDECAR_LOG_LEVEL",
+ "DYNAMIC_SIDECAR_TRACING",
"NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS",
"POSTGRES_DB",
"POSTGRES_ENDPOINT",
diff --git a/services/director-v2/tests/unit/test_utils_distributed_identifier.py b/services/director-v2/tests/unit/test_utils_distributed_identifier.py
index 518552af1e1..88b143612af 100644
--- a/services/director-v2/tests/unit/test_utils_distributed_identifier.py
+++ b/services/director-v2/tests/unit/test_utils_distributed_identifier.py
@@ -173,7 +173,7 @@ async def redis_client_sdk(
RedisDatabase.DISTRIBUTED_IDENTIFIERS
)
- client = RedisClientSDK(redis_resources_dns)
+ client = RedisClientSDK(redis_resources_dns, client_name="pytest")
assert client
assert client.redis_dsn == redis_resources_dns
await client.setup()
diff --git a/services/director-v2/tests/unit/with_dbs/conftest.py b/services/director-v2/tests/unit/with_dbs/conftest.py
index a3234328c9f..ff55e862abb 100644
--- a/services/director-v2/tests/unit/with_dbs/conftest.py
+++ b/services/director-v2/tests/unit/with_dbs/conftest.py
@@ -10,6 +10,7 @@
from typing import Any, cast
from uuid import uuid4
+import arrow
import pytest
import sqlalchemy as sa
from _helpers import PublishedProject, RunningProject
@@ -321,6 +322,7 @@ async def running_project(
) -> RunningProject:
user = registered_user()
created_project = await project(user, workbench=fake_workbench_without_outputs)
+ now_time = arrow.utcnow().datetime
return RunningProject(
project=created_project,
pipeline=pipeline(
@@ -332,9 +334,50 @@ async def running_project(
project=created_project,
state=StateType.RUNNING,
progress=0.0,
- start=datetime.datetime.now(tz=datetime.UTC),
+ start=now_time,
+ ),
+ runs=runs(
+ user=user,
+ project=created_project,
+ started=now_time,
+ result=StateType.RUNNING,
+ ),
+ )
+
+
+@pytest.fixture
+async def running_project_mark_for_cancellation(
+ registered_user: Callable[..., dict[str, Any]],
+ project: Callable[..., Awaitable[ProjectAtDB]],
+ pipeline: Callable[..., CompPipelineAtDB],
+ tasks: Callable[..., list[CompTaskAtDB]],
+ runs: Callable[..., CompRunsAtDB],
+ fake_workbench_without_outputs: dict[str, Any],
+ fake_workbench_adjacency: dict[str, Any],
+) -> RunningProject:
+ user = registered_user()
+ created_project = await project(user, workbench=fake_workbench_without_outputs)
+ now_time = arrow.utcnow().datetime
+ return RunningProject(
+ project=created_project,
+ pipeline=pipeline(
+ project_id=f"{created_project.uuid}",
+ dag_adjacency_list=fake_workbench_adjacency,
+ ),
+ tasks=tasks(
+ user=user,
+ project=created_project,
+ state=StateType.RUNNING,
+ progress=0.0,
+ start=now_time,
+ ),
+ runs=runs(
+ user=user,
+ project=created_project,
+ result=StateType.RUNNING,
+ started=now_time,
+ cancelled=now_time + datetime.timedelta(seconds=5),
),
- runs=runs(user=user, project=created_project, result=StateType.RUNNING),
)
diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py
index 8567c8ccca0..72015d97d99 100644
--- a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py
+++ b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py
@@ -45,6 +45,7 @@
from pytest_mock.plugin import MockerFixture
from pytest_simcore.helpers.typing_env import EnvVarsDict
from servicelib.rabbitmq import RabbitMQClient
+from servicelib.redis import CouldNotAcquireLockError
from settings_library.rabbit import RabbitSettings
from settings_library.redis import RedisSettings
from simcore_postgres_database.models.comp_runs import comp_runs
@@ -66,8 +67,12 @@
from simcore_service_director_v2.models.comp_runs import CompRunsAtDB, RunMetadataDict
from simcore_service_director_v2.models.comp_tasks import CompTaskAtDB, Image
from simcore_service_director_v2.models.dask_subsystem import DaskClientTaskState
-from simcore_service_director_v2.modules.comp_scheduler._base_scheduler import (
+from simcore_service_director_v2.modules.comp_scheduler import (
BaseCompScheduler,
+ get_scheduler,
+)
+from simcore_service_director_v2.modules.comp_scheduler._base_scheduler import (
+ ScheduledPipelineParams,
)
from simcore_service_director_v2.modules.comp_scheduler._dask_scheduler import (
DaskScheduler,
@@ -103,9 +108,9 @@ def _assert_dask_client_correctly_initialized(
)
mocked_dask_client.register_handlers.assert_called_once_with(
TaskHandlers(
- cast(
+ cast( # noqa: SLF001
DaskScheduler, scheduler
- )._task_progress_change_handler, # noqa: SLF001
+ )._task_progress_change_handler,
cast(DaskScheduler, scheduler)._task_log_change_handler, # noqa: SLF001
)
)
@@ -155,8 +160,38 @@ async def _assert_comp_tasks_db(
), f"{expected_progress=}, found: {[t.progress for t in tasks]}"
-async def run_comp_scheduler(scheduler: BaseCompScheduler) -> None:
- await scheduler.schedule_all_pipelines()
+async def schedule_all_pipelines(scheduler: BaseCompScheduler) -> None:
+ # NOTE: we take a copy of the pipelines, as this could change quickly if there are
+ # misconfigured pipelines that would be removed from the scheduler
+ # NOTE: we simulate multiple dv-2 replicas by running several times
+ # the same pipeline scheduling
+ local_pipelines = deepcopy(scheduler.scheduled_pipelines)
+ results = await asyncio.gather(
+ *(
+ scheduler._schedule_pipeline( # noqa: SLF001
+ user_id=user_id,
+ project_id=project_id,
+ iteration=iteration,
+ pipeline_params=params,
+ )
+ for _ in range(3)
+ for (
+ user_id,
+ project_id,
+ iteration,
+ ), params in local_pipelines.items()
+ ),
+ return_exceptions=True,
+ )
+ # we should have exceptions 2/3 of the time
+ could_not_acquire_lock_count = sum(
+ isinstance(r, CouldNotAcquireLockError) for r in results
+ )
+ total_results_count = len(results)
+
+ # Check if 2/3 of the results are CouldNotAcquireLockError
+ # checks that scheduling is done exclusively
+ assert could_not_acquire_lock_count == (2 / 3) * total_results_count
@pytest.fixture
@@ -185,11 +220,11 @@ def minimal_dask_scheduler_config(
def scheduler(
minimal_dask_scheduler_config: None,
aiopg_engine: aiopg.sa.engine.Engine,
- # dask_spec_local_cluster: SpecCluster,
minimal_app: FastAPI,
) -> BaseCompScheduler:
- assert minimal_app.state.scheduler is not None
- return minimal_app.state.scheduler
+ scheduler = get_scheduler(minimal_app)
+ assert scheduler is not None
+ return scheduler
@pytest.fixture
@@ -220,16 +255,21 @@ def mocked_clean_task_output_fct(mocker: MockerFixture) -> mock.MagicMock:
@pytest.fixture
-def with_disabled_scheduler_task(mocker: MockerFixture) -> None:
+def with_disabled_auto_scheduling(mocker: MockerFixture) -> mock.MagicMock:
"""disables the scheduler task, note that it needs to be triggered manually then"""
- mocker.patch(
- "simcore_service_director_v2.modules.comp_scheduler._task.start_periodic_task",
- autospec=True,
- )
- mocker.patch(
- "simcore_service_director_v2.modules.comp_scheduler._task.stop_periodic_task",
+ def _fake_starter(
+ self: BaseCompScheduler,
+ pipeline_params: ScheduledPipelineParams,
+ *args,
+ **kwargs,
+ ) -> None:
+ pipeline_params.scheduler_task = mocker.MagicMock()
+
+ return mocker.patch(
+ "simcore_service_director_v2.modules.comp_scheduler._base_scheduler.BaseCompScheduler._start_scheduling",
autospec=True,
+ side_effect=_fake_starter,
)
@@ -258,7 +298,7 @@ async def test_scheduler_gracefully_starts_and_stops(
minimal_app: FastAPI,
):
# check it started correctly
- assert minimal_app.state.computational_scheduler_task is not None
+ assert get_scheduler(minimal_app) is not None
@pytest.mark.parametrize(
@@ -280,13 +320,14 @@ def test_scheduler_raises_exception_for_missing_dependencies(
settings = AppSettings.create_from_envs()
app = init_app(settings)
- with pytest.raises(ConfigurationError):
- with TestClient(app, raise_server_exceptions=True) as _:
- pass
+ with pytest.raises(ConfigurationError), TestClient(
+ app, raise_server_exceptions=True
+ ) as _:
+ pass
async def test_empty_pipeline_is_not_scheduled(
- with_disabled_scheduler_task: None,
+ with_disabled_auto_scheduling: None,
scheduler: BaseCompScheduler,
registered_user: Callable[..., dict[str, Any]],
project: Callable[..., Awaitable[ProjectAtDB]],
@@ -318,9 +359,6 @@ async def test_empty_pipeline_is_not_scheduled(
use_on_demand_clusters=False,
)
assert len(scheduler.scheduled_pipelines) == 0
- assert (
- scheduler.wake_up_event.is_set() is False
- ), "the scheduler was woken up on an empty pipeline!"
# check the database is empty
async with aiopg_engine.acquire() as conn:
result = await conn.scalar(
@@ -333,7 +371,7 @@ async def test_empty_pipeline_is_not_scheduled(
async def test_misconfigured_pipeline_is_not_scheduled(
- with_disabled_scheduler_task: None,
+ with_disabled_auto_scheduling: None,
scheduler: BaseCompScheduler,
registered_user: Callable[..., dict[str, Any]],
project: Callable[..., Awaitable[ProjectAtDB]],
@@ -360,14 +398,11 @@ async def test_misconfigured_pipeline_is_not_scheduled(
use_on_demand_clusters=False,
)
assert len(scheduler.scheduled_pipelines) == 1
- assert (
- scheduler.wake_up_event.is_set() is True
- ), "the scheduler was NOT woken up on the scheduled pipeline!"
for (u_id, p_id, it), params in scheduler.scheduled_pipelines.items():
assert u_id == user["id"]
assert p_id == sleepers_project.uuid
assert it > 0
- assert params.mark_for_cancellation is False
+ assert params.mark_for_cancellation is None
# check the database was properly updated
async with aiopg_engine.acquire() as conn:
result = await conn.execute(
@@ -379,7 +414,7 @@ async def test_misconfigured_pipeline_is_not_scheduled(
run_entry = CompRunsAtDB.model_validate(await result.first())
assert run_entry.result == RunningState.PUBLISHED
# let the scheduler kick in
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
# check the scheduled pipelines is again empty since it's misconfigured
assert len(scheduler.scheduled_pipelines) == 0
# check the database entry is correctly updated
@@ -411,14 +446,11 @@ async def _assert_start_pipeline(
use_on_demand_clusters=False,
)
assert len(scheduler.scheduled_pipelines) == 1, "the pipeline is not scheduled!"
- assert (
- scheduler.wake_up_event.is_set() is True
- ), "the scheduler was NOT woken up on the scheduled pipeline!"
for (u_id, p_id, it), params in scheduler.scheduled_pipelines.items():
assert u_id == published_project.project.prj_owner
assert p_id == published_project.project.uuid
assert it > 0
- assert params.mark_for_cancellation is False
+ assert params.mark_for_cancellation is None
assert params.run_metadata == run_metadata
# check the database is correctly updated, the run is published
@@ -433,7 +465,7 @@ async def _assert_start_pipeline(
return exp_published_tasks
-async def _assert_schedule_pipeline_PENDING(
+async def _assert_schedule_pipeline_PENDING( # noqa: N802
aiopg_engine,
published_project: PublishedProject,
published_tasks: list[CompTaskAtDB],
@@ -451,7 +483,7 @@ async def _return_tasks_pending(job_ids: list[str]) -> list[DaskClientTaskState]
return [DaskClientTaskState.PENDING for job_id in job_ids]
mocked_dask_client.get_tasks_status.side_effect = _return_tasks_pending
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
_assert_dask_client_correctly_initialized(mocked_dask_client, scheduler)
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.PUBLISHED)
await _assert_comp_tasks_db(
@@ -470,6 +502,7 @@ async def _return_tasks_pending(job_ids: list[str]) -> list[DaskClientTaskState]
expected_progress=None, # since we bypass the API entrypoint this is correct
)
# tasks were send to the backend
+ assert published_project.project.prj_owner is not None
mocked_dask_client.send_computation_tasks.assert_has_calls(
calls=[
mock.call(
@@ -477,7 +510,7 @@ async def _return_tasks_pending(job_ids: list[str]) -> list[DaskClientTaskState]
project_id=published_project.project.uuid,
cluster_id=DEFAULT_CLUSTER_ID,
tasks={f"{p.node_id}": p.image},
- callback=scheduler._wake_up_scheduler_now, # noqa: SLF001
+ callback=mock.ANY,
metadata=mock.ANY,
hardware_info=mock.ANY,
)
@@ -489,7 +522,7 @@ async def _return_tasks_pending(job_ids: list[str]) -> list[DaskClientTaskState]
mocked_dask_client.get_tasks_status.assert_not_called()
mocked_dask_client.get_task_result.assert_not_called()
# there is a second run of the scheduler to move comp_runs to pending, the rest does not change
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.PENDING)
await _assert_comp_tasks_db(
aiopg_engine,
@@ -615,7 +648,7 @@ async def _trigger_progress_event(
@pytest.mark.acceptance_test()
async def test_proper_pipeline_is_scheduled( # noqa: PLR0915
- with_disabled_scheduler_task: None,
+ with_disabled_auto_scheduling: None,
mocked_dask_client: mock.MagicMock,
scheduler: BaseCompScheduler,
aiopg_engine: aiopg.sa.engine.Engine,
@@ -660,7 +693,7 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta
mocked_dask_client.get_tasks_status.side_effect = _return_1st_task_running
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.PENDING)
await _assert_comp_tasks_db(
@@ -706,7 +739,7 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta
node_id=exp_started_task.node_id,
)
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
# comp_run, the comp_task switch to STARTED
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.STARTED)
await _assert_comp_tasks_db(
@@ -772,7 +805,7 @@ async def _return_random_task_result(job_id) -> TaskOutputData:
return TaskOutputData.model_validate({"out_1": None, "out_2": 45})
mocked_dask_client.get_task_result.side_effect = _return_random_task_result
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.STARTED)
await _assert_comp_tasks_db(
aiopg_engine,
@@ -822,7 +855,7 @@ async def _return_random_task_result(job_id) -> TaskOutputData:
tasks={
f"{next_pending_task.node_id}": next_pending_task.image,
},
- callback=scheduler._wake_up_scheduler_now, # noqa: SLF001
+ callback=mock.ANY,
metadata=mock.ANY,
hardware_info=mock.ANY,
)
@@ -869,7 +902,7 @@ async def _return_2nd_task_running(job_ids: list[str]) -> list[DaskClientTaskSta
project_id=exp_started_task.project_id,
node_id=exp_started_task.node_id,
)
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.STARTED)
await _assert_comp_tasks_db(
aiopg_engine,
@@ -913,7 +946,7 @@ async def _return_2nd_task_failed(job_ids: list[str]) -> list[DaskClientTaskStat
mocked_dask_client.get_tasks_status.side_effect = _return_2nd_task_failed
mocked_dask_client.get_task_result.side_effect = None
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.STARTED)
await _assert_comp_tasks_db(
aiopg_engine,
@@ -962,7 +995,7 @@ async def _return_3rd_task_success(job_ids: list[str]) -> list[DaskClientTaskSta
mocked_dask_client.get_task_result.side_effect = _return_random_task_result
# trigger the scheduler, it should switch to FAILED, as we are done
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.FAILED)
await _assert_comp_tasks_db(
@@ -1000,7 +1033,7 @@ async def _return_3rd_task_success(job_ids: list[str]) -> list[DaskClientTaskSta
async def test_task_progress_triggers(
- with_disabled_scheduler_task: None,
+ with_disabled_auto_scheduling: None,
mocked_dask_client: mock.MagicMock,
scheduler: BaseCompScheduler,
aiopg_engine: aiopg.sa.engine.Engine,
@@ -1039,11 +1072,9 @@ async def test_task_progress_triggers(
parent_project_id=None,
),
)
- await cast(
+ await cast( # noqa: SLF001
DaskScheduler, scheduler
- )._task_progress_change_handler( # noqa: SLF001
- progress_event.model_dump_json()
- )
+ )._task_progress_change_handler(progress_event.model_dump_json())
# NOTE: not sure whether it should switch to STARTED.. it would make sense
await _assert_comp_tasks_db(
aiopg_engine,
@@ -1065,7 +1096,7 @@ async def test_task_progress_triggers(
],
)
async def test_handling_of_disconnected_dask_scheduler(
- with_disabled_scheduler_task: None,
+ with_disabled_auto_scheduling: None,
mocked_dask_client: mock.MagicMock,
scheduler: BaseCompScheduler,
aiopg_engine: aiopg.sa.engine.Engine,
@@ -1109,7 +1140,7 @@ async def test_handling_of_disconnected_dask_scheduler(
project_id=published_project.project.uuid,
)
# we ensure the scheduler was run
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
# after this step the tasks are marked as ABORTED
await _assert_comp_tasks_db(
aiopg_engine,
@@ -1123,7 +1154,7 @@ async def test_handling_of_disconnected_dask_scheduler(
expected_progress=1,
)
# then we have another scheduler run
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
# now the run should be ABORTED
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.ABORTED)
@@ -1208,7 +1239,7 @@ class RebootState:
],
)
async def test_handling_scheduling_after_reboot(
- with_disabled_scheduler_task: None,
+ with_disabled_auto_scheduling: None,
mocked_dask_client: mock.MagicMock,
aiopg_engine: aiopg.sa.engine.Engine,
running_project: RunningProject,
@@ -1217,7 +1248,7 @@ async def test_handling_scheduling_after_reboot(
mocked_clean_task_output_fct: mock.MagicMock,
reboot_state: RebootState,
):
- """After the dask client is rebooted, or that the director-v2 reboots the scheduler
+ """After the dask client is rebooted, or that the director-v2 reboots the dv-2 internal scheduler
shall continue scheduling correctly. Even though the task might have continued to run
in the dask-scheduler."""
@@ -1233,7 +1264,7 @@ async def mocked_get_task_result(_job_id: str) -> TaskOutputData:
mocked_dask_client.get_task_result.side_effect = mocked_get_task_result
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
# the status will be called once for all RUNNING tasks
mocked_dask_client.get_tasks_status.assert_called_once()
if reboot_state.expected_run_state in COMPLETED_STATES:
@@ -1289,6 +1320,93 @@ async def mocked_get_task_result(_job_id: str) -> TaskOutputData:
)
+async def test_handling_cancellation_of_jobs_after_reboot(
+ with_disabled_auto_scheduling: None,
+ mocked_dask_client: mock.MagicMock,
+ aiopg_engine: aiopg.sa.engine.Engine,
+ running_project_mark_for_cancellation: RunningProject,
+ scheduler: BaseCompScheduler,
+ mocked_parse_output_data_fct: mock.MagicMock,
+ mocked_clean_task_output_fct: mock.MagicMock,
+):
+ """A running pipeline was cancelled by a user and the DV-2 was restarted BEFORE
+ It could actually cancel the task. On reboot the DV-2 shall recover
+ and actually cancel the pipeline properly"""
+
+ # check initial status
+ await _assert_comp_run_db(
+ aiopg_engine, running_project_mark_for_cancellation, RunningState.STARTED
+ )
+ await _assert_comp_tasks_db(
+ aiopg_engine,
+ running_project_mark_for_cancellation.project.uuid,
+ [t.node_id for t in running_project_mark_for_cancellation.tasks],
+ expected_state=RunningState.STARTED,
+ expected_progress=0,
+ )
+
+ # the backend shall report the tasks as running
+ async def mocked_get_tasks_status(job_ids: list[str]) -> list[DaskClientTaskState]:
+ return [DaskClientTaskState.PENDING_OR_STARTED for j in job_ids]
+
+ mocked_dask_client.get_tasks_status.side_effect = mocked_get_tasks_status
+ # Running the scheduler, should actually cancel the run now
+ await schedule_all_pipelines(scheduler)
+ mocked_dask_client.abort_computation_task.assert_called()
+ assert mocked_dask_client.abort_computation_task.call_count == len(
+ [
+ t.node_id
+ for t in running_project_mark_for_cancellation.tasks
+ if t.node_class == NodeClass.COMPUTATIONAL
+ ]
+ )
+ # in the DB they are still running, they will be stopped in the next iteration
+ await _assert_comp_tasks_db(
+ aiopg_engine,
+ running_project_mark_for_cancellation.project.uuid,
+ [
+ t.node_id
+ for t in running_project_mark_for_cancellation.tasks
+ if t.node_class == NodeClass.COMPUTATIONAL
+ ],
+ expected_state=RunningState.STARTED,
+ expected_progress=0,
+ )
+ await _assert_comp_run_db(
+ aiopg_engine, running_project_mark_for_cancellation, RunningState.STARTED
+ )
+
+ # the backend shall now report the tasks as aborted
+ async def mocked_get_tasks_status_aborted(
+ job_ids: list[str],
+ ) -> list[DaskClientTaskState]:
+ return [DaskClientTaskState.ABORTED for j in job_ids]
+
+ mocked_dask_client.get_tasks_status.side_effect = mocked_get_tasks_status_aborted
+
+ async def _return_random_task_result(job_id) -> TaskOutputData:
+ raise TaskCancelledError
+
+ mocked_dask_client.get_task_result.side_effect = _return_random_task_result
+ await schedule_all_pipelines(scheduler)
+ # now should be stopped
+ await _assert_comp_tasks_db(
+ aiopg_engine,
+ running_project_mark_for_cancellation.project.uuid,
+ [
+ t.node_id
+ for t in running_project_mark_for_cancellation.tasks
+ if t.node_class == NodeClass.COMPUTATIONAL
+ ],
+ expected_state=RunningState.ABORTED,
+ expected_progress=1,
+ )
+ await _assert_comp_run_db(
+ aiopg_engine, running_project_mark_for_cancellation, RunningState.ABORTED
+ )
+ mocked_clean_task_output_fct.assert_called()
+
+
@pytest.fixture
def with_fast_service_heartbeat_s(monkeypatch: pytest.MonkeyPatch) -> int:
seconds = 1
@@ -1297,7 +1415,7 @@ def with_fast_service_heartbeat_s(monkeypatch: pytest.MonkeyPatch) -> int:
async def test_running_pipeline_triggers_heartbeat(
- with_disabled_scheduler_task: None,
+ with_disabled_auto_scheduling: None,
with_fast_service_heartbeat_s: int,
mocked_dask_client: mock.MagicMock,
scheduler: BaseCompScheduler,
@@ -1344,7 +1462,7 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta
project_id=exp_started_task.project_id,
node_id=exp_started_task.node_id,
)
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
messages = await _assert_message_received(
resource_tracking_rabbit_client_parser,
@@ -1356,8 +1474,8 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta
# -------------------------------------------------------------------------------
# 3. wait a bit and run again we should get another heartbeat, but only one!
await asyncio.sleep(with_fast_service_heartbeat_s + 1)
- await run_comp_scheduler(scheduler)
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
+ await schedule_all_pipelines(scheduler)
messages = await _assert_message_received(
resource_tracking_rabbit_client_parser,
1,
@@ -1368,8 +1486,8 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta
# -------------------------------------------------------------------------------
# 4. wait a bit and run again we should get another heartbeat, but only one!
await asyncio.sleep(with_fast_service_heartbeat_s + 1)
- await run_comp_scheduler(scheduler)
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
+ await schedule_all_pipelines(scheduler)
messages = await _assert_message_received(
resource_tracking_rabbit_client_parser,
1,
@@ -1387,7 +1505,7 @@ async def mocked_get_or_create_cluster(mocker: MockerFixture) -> mock.Mock:
async def test_pipeline_with_on_demand_cluster_with_not_ready_backend_waits(
- with_disabled_scheduler_task: None,
+ with_disabled_auto_scheduling: None,
scheduler: BaseCompScheduler,
aiopg_engine: aiopg.sa.engine.Engine,
published_project: PublishedProject,
@@ -1425,7 +1543,7 @@ async def test_pipeline_with_on_demand_cluster_with_not_ready_backend_waits(
published_project.tasks[1],
published_project.tasks[3],
]
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
mocked_get_or_create_cluster.assert_called()
assert mocked_get_or_create_cluster.call_count == 1
mocked_get_or_create_cluster.reset_mock()
@@ -1440,7 +1558,7 @@ async def test_pipeline_with_on_demand_cluster_with_not_ready_backend_waits(
expected_progress=None,
)
# again will trigger the same response
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
mocked_get_or_create_cluster.assert_called()
assert mocked_get_or_create_cluster.call_count == 1
mocked_get_or_create_cluster.reset_mock()
@@ -1461,7 +1579,7 @@ async def test_pipeline_with_on_demand_cluster_with_not_ready_backend_waits(
[ClustersKeeperNotAvailableError],
)
async def test_pipeline_with_on_demand_cluster_with_no_clusters_keeper_fails(
- with_disabled_scheduler_task: None,
+ with_disabled_auto_scheduling: None,
scheduler: BaseCompScheduler,
aiopg_engine: aiopg.sa.engine.Engine,
published_project: PublishedProject,
@@ -1494,7 +1612,7 @@ async def test_pipeline_with_on_demand_cluster_with_no_clusters_keeper_fails(
published_project.tasks[1],
published_project.tasks[3],
]
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
mocked_get_or_create_cluster.assert_called()
assert mocked_get_or_create_cluster.call_count == 1
mocked_get_or_create_cluster.reset_mock()
@@ -1507,7 +1625,7 @@ async def test_pipeline_with_on_demand_cluster_with_no_clusters_keeper_fails(
expected_progress=1.0,
)
# again will not re-trigger the call to clusters-keeper
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
mocked_get_or_create_cluster.assert_not_called()
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.FAILED)
await _assert_comp_tasks_db(
diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py
index 16032677a98..2d96c0248d9 100644
--- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py
+++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py
@@ -250,6 +250,7 @@ def expected_dynamic_sidecar_spec(
"FORWARD_ENV_DISPLAY": ":0",
"NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS": "3",
"DYNAMIC_SIDECAR_LOG_LEVEL": "DEBUG",
+ "DYNAMIC_SIDECAR_TRACING": "null",
"DY_DEPLOYMENT_REGISTRY_SETTINGS": (
'{"REGISTRY_AUTH":false,"REGISTRY_PATH":null,'
'"REGISTRY_URL":"foo.bar.com","REGISTRY_USER":'
diff --git a/services/director/requirements/_tools.txt b/services/director/requirements/_tools.txt
index 24945ba6807..e69de29bb2d 100644
--- a/services/director/requirements/_tools.txt
+++ b/services/director/requirements/_tools.txt
@@ -1,68 +0,0 @@
-#
-# This file is autogenerated by pip-compile with python 3.6
-# To update, run:
-#
-# pip-compile --output-file=requirements/_tools.txt --strip-extras requirements/_tools.in
-#
-appdirs==1.4.4
- # via black
-black==20.8b1
- # via -r requirements/_tools.in
-bump2version==1.0.1
- # via -r requirements/_tools.in
-click==8.0.3
- # via
- # black
- # pip-tools
-dataclasses==0.7
- # via
- # -c requirements/_base.txt
- # -c requirements/_test.txt
- # black
-importlib-metadata==2.0.0
- # via
- # -c requirements/_test.txt
- # click
- # pep517
-mypy-extensions==0.4.3
- # via black
-pathspec==0.9.0
- # via black
-pep517==0.12.0
- # via pip-tools
-pip==24.3.1
- # via pip-tools
-pip-tools==6.4.0
- # via -r requirements/_tools.in
-pyyaml==5.4
- # via
- # -c requirements/_base.txt
- # -c requirements/_test.txt
- # watchdog
-regex==2022.1.18
- # via black
-toml==0.10.2
- # via
- # -c requirements/_test.txt
- # black
-tomli==1.2.3
- # via pep517
-typed-ast==1.4.1
- # via
- # -c requirements/_test.txt
- # black
-typing-extensions==4.0.1
- # via black
-watchdog==2.1.6
- # via -r requirements/_tools.in
-wheel==0.37.1
- # via pip-tools
-zipp==3.4.0
- # via
- # -c requirements/_test.txt
- # importlib-metadata
- # pep517
-
-# The following packages are considered to be unsafe in a requirements file:
-# pip
-# setuptools
diff --git a/services/docker-compose-ops.yml b/services/docker-compose-ops.yml
index 9beacf76c34..c80befe2316 100644
--- a/services/docker-compose-ops.yml
+++ b/services/docker-compose-ops.yml
@@ -111,6 +111,7 @@ services:
- "4318:4318" # OTLP HTTP receiver
networks:
- simcore_default
+ - interactive_services_subnet
environment:
TRACING_OPENTELEMETRY_COLLECTOR_BATCH_SIZE: ${TRACING_OPENTELEMETRY_COLLECTOR_BATCH_SIZE}
TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE: ${TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE}
diff --git a/services/docker-compose.yml b/services/docker-compose.yml
index 45e843ad712..2f039977889 100644
--- a/services/docker-compose.yml
+++ b/services/docker-compose.yml
@@ -42,6 +42,7 @@ services:
WEBSERVER_HOST: ${WB_API_WEBSERVER_HOST}
WEBSERVER_PORT: ${WB_API_WEBSERVER_PORT}
WEBSERVER_SESSION_SECRET_KEY: ${WEBSERVER_SESSION_SECRET_KEY}
+ API_SERVER_TRACING: ${API_SERVER_TRACING}
TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT}
TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT}
@@ -127,6 +128,7 @@ services:
REGISTRY_URL: ${REGISTRY_URL}
REGISTRY_SSL: ${REGISTRY_SSL}
REGISTRY_AUTH: ${REGISTRY_AUTH}
+ AUTOSCALING_TRACING: ${AUTOSCALING_TRACING}
TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT}
TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT}
volumes:
@@ -168,6 +170,7 @@ services:
RABBIT_PORT: ${RABBIT_PORT}
RABBIT_SECURE: ${RABBIT_SECURE}
RABBIT_USER: ${RABBIT_USER}
+ CATALOG_TRACING: ${CATALOG_TRACING}
TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT}
TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT}
networks:
@@ -236,6 +239,7 @@ services:
WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS: ${WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS}
WORKERS_EC2_INSTANCES_SUBNET_ID: ${WORKERS_EC2_INSTANCES_SUBNET_ID}
WORKERS_EC2_INSTANCES_CUSTOM_TAGS: ${WORKERS_EC2_INSTANCES_CUSTOM_TAGS}
+ CLUSTERS_KEEPER_TRACING: ${CLUSTERS_KEEPER_TRACING}
TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT}
TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT}
secrets: *dask_tls_secrets
@@ -278,6 +282,7 @@ services:
SIMCORE_SERVICES_NETWORK_NAME: interactive_services_subnet
STORAGE_ENDPOINT: ${STORAGE_ENDPOINT}
SWARM_STACK_NAME: ${SWARM_STACK_NAME:-simcore}
+ DIRECTOR_TRACING: ${DIRECTOR_TRACING}
TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE: ${TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE}
TRACING_OPENTELEMETRY_COLLECTOR_EXPORTER_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_EXPORTER_ENDPOINT}
TRAEFIK_SIMCORE_ZONE: ${TRAEFIK_SIMCORE_ZONE:-internal_simcore_stack}
@@ -383,6 +388,7 @@ services:
SIMCORE_SERVICES_NETWORK_NAME: ${SIMCORE_SERVICES_NETWORK_NAME}
SWARM_STACK_NAME: ${SWARM_STACK_NAME}
TRAEFIK_SIMCORE_ZONE: ${TRAEFIK_SIMCORE_ZONE}
+ DIRECTOR_V2_TRACING: ${DIRECTOR_V2_TRACING}
TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT}
TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT}
@@ -434,6 +440,7 @@ services:
EFS_MOUNTED_PATH: ${EFS_MOUNTED_PATH}
EFS_ONLY_ENABLED_FOR_USERIDS: ${EFS_ONLY_ENABLED_FOR_USERIDS}
EFS_PROJECT_SPECIFIC_DATA_DIRECTORY: ${EFS_PROJECT_SPECIFIC_DATA_DIRECTORY}
+ EFS_GUARDIAN_TRACING: ${EFS_GUARDIAN_TRACING}
TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT}
TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT}
invitations:
@@ -452,6 +459,7 @@ services:
INVITATIONS_USERNAME: ${INVITATIONS_USERNAME}
LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED}
LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING}
+ INVITATIONS_TRACING: ${INVITATIONS_TRACING}
TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT}
TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT}
payments:
@@ -496,6 +504,7 @@ services:
SMTP_PORT: ${SMTP_PORT}
SMTP_PROTOCOL: ${SMTP_PROTOCOL}
SMTP_USERNAME: ${SMTP_USERNAME}
+ PAYMENTS_TRACING: ${PAYMENTS_TRACING}
TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT}
TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT}
@@ -532,6 +541,7 @@ services:
RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_INTERVAL_SEC: ${RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_INTERVAL_SEC}
RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_COUNTER_FAIL: ${RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_COUNTER_FAIL}
RESOURCE_USAGE_TRACKER_S3: ${RESOURCE_USAGE_TRACKER_S3}
+ RESOURCE_USAGE_TRACKER_TRACING: ${RESOURCE_USAGE_TRACKER_TRACING}
TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT}
TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT}
RESOURCE_USAGE_TRACKER_PORT: ${RESOURCE_USAGE_TRACKER_PORT}
@@ -560,6 +570,7 @@ services:
DYNAMIC_SCHEDULER_LOGLEVEL: ${DYNAMIC_SCHEDULER_LOGLEVEL}
DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT: ${DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT}
DYNAMIC_SCHEDULER_PROFILING: ${DYNAMIC_SCHEDULER_PROFILING}
+ DYNAMIC_SCHEDULER_TRACING: ${DYNAMIC_SCHEDULER_TRACING}
TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT}
TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT}
static-webserver:
@@ -1042,6 +1053,10 @@ services:
RABBIT_USER: ${RABBIT_USER}
RABBIT_SECURE: ${RABBIT_SECURE}
+ AGENT_TRACING: ${AGENT_TRACING}
+ TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT}
+ TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT}
+
dask-sidecar:
image: ${DOCKER_REGISTRY:-itisfoundation}/dask-sidecar:${DOCKER_IMAGE_TAG:-latest}
init: true
diff --git a/services/dynamic-scheduler/requirements/_base.txt b/services/dynamic-scheduler/requirements/_base.txt
index e1302cc23b9..2259db3f8c3 100644
--- a/services/dynamic-scheduler/requirements/_base.txt
+++ b/services/dynamic-scheduler/requirements/_base.txt
@@ -51,9 +51,7 @@ arrow==1.3.0
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
async-timeout==4.0.3
- # via
- # asyncpg
- # redis
+ # via asyncpg
asyncpg==0.29.0
# via sqlalchemy
attrs==24.2.0
@@ -196,6 +194,7 @@ opentelemetry-api==1.27.0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -215,6 +214,7 @@ opentelemetry-instrumentation==0.48b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.48b0
@@ -223,6 +223,8 @@ opentelemetry-instrumentation-asyncpg==0.48b0
# via -r requirements/../../../packages/postgres-database/requirements/_base.in
opentelemetry-instrumentation-fastapi==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.48b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_base.in
opentelemetry-instrumentation-requests==0.48b0
@@ -242,6 +244,7 @@ opentelemetry-semantic-conventions==0.48b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -249,6 +252,7 @@ opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.7
# via
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py
index 9b7fd4be9d4..19d93b3a6f1 100644
--- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py
@@ -37,6 +37,7 @@ def __init__(self, app: FastAPI) -> None:
DEFAULT_LEGACY_WB_TO_DV2_HTTP_REQUESTS_TIMEOUT_S
),
extra_allowed_method_names={"attach_lifespan_to"},
+ tracing_settings=settings.DYNAMIC_SCHEDULER_TRACING,
)
@retry_on_errors()
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py
index 84131eaf54b..ff7d53920bf 100644
--- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py
@@ -4,6 +4,8 @@
from servicelib.redis import RedisClientSDK, RedisClientsManager, RedisManagerDBConfig
from settings_library.redis import RedisDatabase, RedisSettings
+from .._meta import APP_NAME
+
_DECODE_DBS: Final[set[RedisDatabase]] = {
RedisDatabase.LOCKS,
}
@@ -24,6 +26,7 @@ async def on_startup() -> None:
{RedisManagerDBConfig(x, decode_responses=False) for x in _BINARY_DBS}
| {RedisManagerDBConfig(x, decode_responses=True) for x in _DECODE_DBS},
settings,
+ client_name=APP_NAME,
)
await manager.setup()
diff --git a/services/dynamic-scheduler/tests/conftest.py b/services/dynamic-scheduler/tests/conftest.py
index 8d5b52096c1..1071b9a103e 100644
--- a/services/dynamic-scheduler/tests/conftest.py
+++ b/services/dynamic-scheduler/tests/conftest.py
@@ -74,7 +74,10 @@ def app_environment(
) -> EnvVarsDict:
return setenvs_from_dict(
monkeypatch,
- {**docker_compose_service_dynamic_scheduler_env_vars},
+ {
+ **docker_compose_service_dynamic_scheduler_env_vars,
+ "DYNAMIC_SCHEDULER_TRACING": "null",
+ },
)
@@ -132,7 +135,9 @@ async def app(
@pytest.fixture
async def remove_redis_data(redis_service: RedisSettings) -> None:
async with RedisClientsManager(
- {RedisManagerDBConfig(x) for x in RedisDatabase}, redis_service
+ {RedisManagerDBConfig(x) for x in RedisDatabase},
+ redis_service,
+ client_name="pytest",
) as manager:
await logged_gather(
*[manager.client(d).redis.flushall() for d in RedisDatabase]
diff --git a/services/dynamic-sidecar/requirements/_base.txt b/services/dynamic-sidecar/requirements/_base.txt
index 77259d78369..e87c25b3aa3 100644
--- a/services/dynamic-sidecar/requirements/_base.txt
+++ b/services/dynamic-sidecar/requirements/_base.txt
@@ -92,7 +92,6 @@ async-timeout==4.0.3
# via
# aiopg
# asyncpg
- # redis
asyncpg==0.29.0
# via sqlalchemy
attrs==23.2.0
@@ -287,6 +286,7 @@ opentelemetry-api==1.27.0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -310,6 +310,7 @@ opentelemetry-instrumentation==0.48b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-aiopg==0.48b0
@@ -324,6 +325,8 @@ opentelemetry-instrumentation-dbapi==0.48b0
# via opentelemetry-instrumentation-aiopg
opentelemetry-instrumentation-fastapi==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.48b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.48b0
# via
# -r requirements/../../../packages/service-library/requirements/_base.in
@@ -349,6 +352,7 @@ opentelemetry-semantic-conventions==0.48b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -356,6 +360,7 @@ opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.0
# via
diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py
index cce1b9e8a4e..9e0bfbb25e1 100644
--- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py
+++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py
@@ -9,6 +9,7 @@
get_common_oas_options,
override_fastapi_openapi_method,
)
+from servicelib.fastapi.tracing import setup_tracing
from servicelib.logging_utils import config_all_loggers
from simcore_sdk.node_ports_common.exceptions import NodeNotFound
@@ -192,6 +193,9 @@ def create_app():
if application_settings.are_prometheus_metrics_enabled:
setup_prometheus_metrics(app)
+ if application_settings.DYNAMIC_SIDECAR_TRACING:
+ setup_tracing(app, application_settings.DYNAMIC_SIDECAR_TRACING, PROJECT_NAME)
+
# ERROR HANDLERS ------------
app.add_exception_handler(
NodeNotFound, node_not_found_error_handler # type: ignore[arg-type]
diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py
index f915ccce867..4db0148dda6 100644
--- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py
+++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py
@@ -31,6 +31,7 @@
from settings_library.resource_usage_tracker import (
DEFAULT_RESOURCE_USAGE_HEARTBEAT_INTERVAL,
)
+from settings_library.tracing import TracingSettings
from settings_library.utils_logging import MixinLoggingSettings
@@ -197,6 +198,10 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings):
json_schema_extra={"auto_default_from_env": True}
)
+ DYNAMIC_SIDECAR_TRACING: TracingSettings | None = Field(
+ auto_default_from_env=True, description="settings for opentelemetry tracing"
+ )
+
@property
def are_prometheus_metrics_enabled(self) -> bool:
return ( # pylint: disable=no-member
diff --git a/services/dynamic-sidecar/tests/conftest.py b/services/dynamic-sidecar/tests/conftest.py
index 04e76ecd7e2..d575cdc0db8 100644
--- a/services/dynamic-sidecar/tests/conftest.py
+++ b/services/dynamic-sidecar/tests/conftest.py
@@ -199,6 +199,7 @@ def base_mock_envs(
"REGISTRY_SSL": "false",
}
),
+ "DYNAMIC_SIDECAR_TRACING": "null",
}
diff --git a/services/efs-guardian/requirements/_base.txt b/services/efs-guardian/requirements/_base.txt
index 0718b5e25f0..b627b9f2146 100644
--- a/services/efs-guardian/requirements/_base.txt
+++ b/services/efs-guardian/requirements/_base.txt
@@ -84,9 +84,7 @@ arrow==1.3.0
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
async-timeout==4.0.3
- # via
- # asyncpg
- # redis
+ # via asyncpg
asyncpg==0.29.0
# via sqlalchemy
attrs==24.2.0
@@ -279,6 +277,7 @@ opentelemetry-api==1.27.0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-propagator-aws-xray
@@ -302,6 +301,7 @@ opentelemetry-instrumentation==0.48b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.48b0
@@ -312,6 +312,8 @@ opentelemetry-instrumentation-botocore==0.48b0
# via -r requirements/../../../packages/aws-library/requirements/_base.in
opentelemetry-instrumentation-fastapi==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.48b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.48b0
# via
# -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in
@@ -339,6 +341,7 @@ opentelemetry-semantic-conventions==0.48b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -346,6 +349,7 @@ opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.7
# via
diff --git a/services/efs-guardian/requirements/_test.txt b/services/efs-guardian/requirements/_test.txt
index 57f9f8951b4..c2b186a1310 100644
--- a/services/efs-guardian/requirements/_test.txt
+++ b/services/efs-guardian/requirements/_test.txt
@@ -27,10 +27,6 @@ anyio==4.6.2.post1
# httpx
asgi-lifespan==2.1.0
# via -r requirements/_test.in
-async-timeout==4.0.3
- # via
- # -c requirements/_base.txt
- # redis
attrs==24.2.0
# via
# -c requirements/_base.txt
diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/redis.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/redis.py
index 20cbcc0a4db..4876e5b8b21 100644
--- a/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/redis.py
+++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/redis.py
@@ -5,6 +5,8 @@
from servicelib.redis import RedisClientSDK
from settings_library.redis import RedisDatabase, RedisSettings
+from ..._meta import APP_NAME
+
logger = logging.getLogger(__name__)
@@ -13,7 +15,9 @@ async def on_startup() -> None:
app.state.redis_lock_client_sdk = None
settings: RedisSettings = app.state.settings.EFS_GUARDIAN_REDIS
redis_locks_dsn = settings.build_redis_dsn(RedisDatabase.LOCKS)
- app.state.redis_lock_client_sdk = lock_client = RedisClientSDK(redis_locks_dsn)
+ app.state.redis_lock_client_sdk = lock_client = RedisClientSDK(
+ redis_locks_dsn, client_name=APP_NAME
+ )
await lock_client.setup()
async def on_shutdown() -> None:
diff --git a/services/invitations/requirements/_base.txt b/services/invitations/requirements/_base.txt
index 0f52a35fd85..2f391b7a2c8 100644
--- a/services/invitations/requirements/_base.txt
+++ b/services/invitations/requirements/_base.txt
@@ -43,8 +43,6 @@ arrow==1.3.0
# -r requirements/../../../packages/service-library/requirements/_base.in
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
-async-timeout==4.0.3
- # via redis
attrs==23.2.0
# via
# aiohttp
@@ -173,6 +171,7 @@ opentelemetry-api==1.26.0
# opentelemetry-instrumentation
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -191,12 +190,15 @@ opentelemetry-instrumentation==0.47b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.47b0
# via opentelemetry-instrumentation-fastapi
opentelemetry-instrumentation-fastapi==0.47b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.47b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.47b0
# via -r requirements/../../../packages/service-library/requirements/_base.in
opentelemetry-instrumentation-requests==0.47b0
@@ -215,6 +217,7 @@ opentelemetry-semantic-conventions==0.47b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -222,6 +225,7 @@ opentelemetry-util-http==0.47b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.0
# via
diff --git a/services/invitations/tests/unit/conftest.py b/services/invitations/tests/unit/conftest.py
index fde239d5332..1bed4825448 100644
--- a/services/invitations/tests/unit/conftest.py
+++ b/services/invitations/tests/unit/conftest.py
@@ -75,6 +75,7 @@ def app_environment(
"INVITATIONS_DEFAULT_PRODUCT": default_product,
"INVITATIONS_USERNAME": fake_user_name,
"INVITATIONS_PASSWORD": fake_password,
+ "INVITATIONS_TRACING": "null",
},
)
diff --git a/services/payments/requirements/_base.txt b/services/payments/requirements/_base.txt
index de6926f398d..9c37389b68b 100644
--- a/services/payments/requirements/_base.txt
+++ b/services/payments/requirements/_base.txt
@@ -52,9 +52,7 @@ arrow==1.3.0
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
async-timeout==4.0.3
- # via
- # asyncpg
- # redis
+ # via asyncpg
asyncpg==0.29.0
# via sqlalchemy
attrs==23.2.0
@@ -238,6 +236,7 @@ opentelemetry-api==1.27.0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -257,6 +256,7 @@ opentelemetry-instrumentation==0.48b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.48b0
@@ -265,6 +265,8 @@ opentelemetry-instrumentation-asyncpg==0.48b0
# via -r requirements/../../../packages/postgres-database/requirements/_base.in
opentelemetry-instrumentation-fastapi==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.48b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_base.in
opentelemetry-instrumentation-requests==0.48b0
@@ -284,6 +286,7 @@ opentelemetry-semantic-conventions==0.48b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -291,6 +294,7 @@ opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.6
# via
diff --git a/services/payments/src/simcore_service_payments/services/payments_gateway.py b/services/payments/src/simcore_service_payments/services/payments_gateway.py
index f7989f6fa56..b854c915d41 100644
--- a/services/payments/src/simcore_service_payments/services/payments_gateway.py
+++ b/services/payments/src/simcore_service_payments/services/payments_gateway.py
@@ -25,6 +25,7 @@
HealthMixinMixin,
)
from servicelib.fastapi.httpx_utils import to_curl_command
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
from ..core.settings import ApplicationSettings
from ..models.payments_gateway import (
@@ -214,5 +215,7 @@ def setup_payments_gateway(app: FastAPI):
secret=settings.PAYMENTS_GATEWAY_API_SECRET.get_secret_value()
),
)
+ if settings.PAYMENTS_TRACING:
+ setup_httpx_client_tracing(api.client)
api.attach_lifespan_to(app)
api.set_to_app_state(app)
diff --git a/services/payments/src/simcore_service_payments/services/resource_usage_tracker.py b/services/payments/src/simcore_service_payments/services/resource_usage_tracker.py
index 0051bc6035f..6ae5d424fdf 100644
--- a/services/payments/src/simcore_service_payments/services/resource_usage_tracker.py
+++ b/services/payments/src/simcore_service_payments/services/resource_usage_tracker.py
@@ -25,6 +25,7 @@
BaseHTTPApi,
HealthMixinMixin,
)
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
from ..core.settings import ApplicationSettings
@@ -73,5 +74,7 @@ def setup_resource_usage_tracker(app: FastAPI):
api = ResourceUsageTrackerApi.from_client_kwargs(
base_url=settings.PAYMENTS_RESOURCE_USAGE_TRACKER.base_url,
)
+ if settings.PAYMENTS_TRACING:
+ setup_httpx_client_tracing(api.client)
api.set_to_app_state(app)
api.attach_lifespan_to(app)
diff --git a/services/payments/src/simcore_service_payments/services/stripe.py b/services/payments/src/simcore_service_payments/services/stripe.py
index c640c10f716..9a701965beb 100644
--- a/services/payments/src/simcore_service_payments/services/stripe.py
+++ b/services/payments/src/simcore_service_payments/services/stripe.py
@@ -19,6 +19,7 @@
BaseHTTPApi,
HealthMixinMixin,
)
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
from ..core.errors import StripeRuntimeError
from ..core.settings import ApplicationSettings
@@ -91,6 +92,8 @@ def setup_stripe(app: FastAPI):
base_url=settings.PAYMENTS_STRIPE_URL,
auth=_StripeBearerAuth(settings.PAYMENTS_STRIPE_API_SECRET.get_secret_value()),
)
+ if settings.PAYMENTS_TRACING:
+ setup_httpx_client_tracing(api.client)
api.set_to_app_state(app)
api.attach_lifespan_to(app)
diff --git a/services/payments/tests/conftest.py b/services/payments/tests/conftest.py
index 3691f3c43d2..220e1edc48a 100644
--- a/services/payments/tests/conftest.py
+++ b/services/payments/tests/conftest.py
@@ -83,6 +83,7 @@ def app_environment(
"PAYMENTS_ACCESS_TOKEN_SECRET_KEY": secret_key,
"PAYMENTS_USERNAME": faker.user_name(),
"PAYMENTS_PASSWORD": faker.password(),
+ "PAYMENTS_TRACING": "null",
},
)
diff --git a/services/resource-usage-tracker/requirements/_base.txt b/services/resource-usage-tracker/requirements/_base.txt
index 2b6c53792b1..b1c8e1f6878 100644
--- a/services/resource-usage-tracker/requirements/_base.txt
+++ b/services/resource-usage-tracker/requirements/_base.txt
@@ -84,9 +84,7 @@ arrow==1.3.0
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
async-timeout==4.0.3
- # via
- # asyncpg
- # redis
+ # via asyncpg
asyncpg==0.29.0
# via sqlalchemy
attrs==23.2.0
@@ -301,6 +299,7 @@ opentelemetry-api==1.26.0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-propagator-aws-xray
@@ -324,6 +323,7 @@ opentelemetry-instrumentation==0.47b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.47b0
@@ -334,6 +334,8 @@ opentelemetry-instrumentation-botocore==0.47b0
# via -r requirements/../../../packages/aws-library/requirements/_base.in
opentelemetry-instrumentation-fastapi==0.47b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.47b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.47b0
# via
# -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in
@@ -361,6 +363,7 @@ opentelemetry-semantic-conventions==0.47b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -368,6 +371,7 @@ opentelemetry-util-http==0.47b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.0
# via
diff --git a/services/resource-usage-tracker/requirements/_test.txt b/services/resource-usage-tracker/requirements/_test.txt
index 61e96529dd0..484ce4158da 100644
--- a/services/resource-usage-tracker/requirements/_test.txt
+++ b/services/resource-usage-tracker/requirements/_test.txt
@@ -14,10 +14,6 @@ anyio==4.3.0
# httpx
asgi-lifespan==2.1.0
# via -r requirements/_test.in
-async-timeout==4.0.3
- # via
- # -c requirements/_base.txt
- # redis
attrs==23.2.0
# via
# -c requirements/_base.txt
diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py
index ab1dfa3467c..d433237ea2a 100644
--- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py
+++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py
@@ -21,7 +21,7 @@
info.api_prefix_path_tag
)
SUMMARY: Final[str] = info.get_summary()
-
+APP_NAME: Final[str] = PROJECT_NAME
# NOTE: https://texteditor.com/ascii-frames/
APP_STARTED_BANNER_MSG = r"""
diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/redis.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/redis.py
index 922b0e7e49e..0aece119077 100644
--- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/redis.py
+++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/redis.py
@@ -5,6 +5,8 @@
from servicelib.redis import RedisClientSDK
from settings_library.redis import RedisDatabase, RedisSettings
+from ..._meta import APP_NAME
+
logger = logging.getLogger(__name__)
@@ -13,7 +15,9 @@ async def on_startup() -> None:
app.state.redis_client_sdk = None
settings: RedisSettings = app.state.settings.RESOURCE_USAGE_TRACKER_REDIS
redis_locks_dsn = settings.build_redis_dsn(RedisDatabase.LOCKS)
- app.state.redis_client_sdk = client = RedisClientSDK(redis_locks_dsn)
+ app.state.redis_client_sdk = client = RedisClientSDK(
+ redis_locks_dsn, client_name=APP_NAME
+ )
await client.setup()
async def on_shutdown() -> None:
diff --git a/services/static-webserver/client/source/class/osparc/Application.js b/services/static-webserver/client/source/class/osparc/Application.js
index c5f760188e5..0b18c01bd22 100644
--- a/services/static-webserver/client/source/class/osparc/Application.js
+++ b/services/static-webserver/client/source/class/osparc/Application.js
@@ -222,9 +222,6 @@ qx.Class.define("osparc.Application", {
__setDeviceSpecificIcons: function() {
const isIOS = /iPad|iPhone|iPod/.test(navigator.userAgent) && !window.MSStream;
const isAndroid = /android/i.test(navigator.userAgent);
- const isWindows = /windows/i.test(navigator.userAgent);
- // const productColor = qx.theme.manager.Color.getInstance().resolve("product-color");
- // const backgroundColor = qx.theme.manager.Color.getInstance().resolve("primary-background-color");
// default icons
this.__updateMetaTags();
this.__setDefaultIcons()
@@ -232,8 +229,6 @@ qx.Class.define("osparc.Application", {
this.__setIOSpIcons();
} else if (isAndroid) {
this.__setGoogleIcons();
- } else if (isWindows) {
- // this.__updateBrowserConfig(this.__getProductMetaData().productColor);
}
},
@@ -246,16 +241,14 @@ qx.Class.define("osparc.Application", {
}
const productColor = qx.theme.manager.Color.getInstance().resolve("product-color");
- const backgroundColor = qx.theme.manager.Color.getInstance().resolve("primary-background-color");
return {
productName: productName,
productColor: productColor,
- backgroundColor: backgroundColor
}
},
__updateMetaTags: function() {
- // check device type and only set the icons for the divice type
+ // check device type and only set the icons for the device type
// i.e iOS, Android or windows etc
const themeColorMeta = document.querySelector("meta[name='theme-color']");
const tileColorMeta = document.querySelector("meta[name='msapplication-TileColor']");
diff --git a/services/static-webserver/client/source/class/osparc/NewRelease.js b/services/static-webserver/client/source/class/osparc/NewRelease.js
index af6c23f34eb..bac9d1efb25 100644
--- a/services/static-webserver/client/source/class/osparc/NewRelease.js
+++ b/services/static-webserver/client/source/class/osparc/NewRelease.js
@@ -44,13 +44,19 @@ qx.Class.define("osparc.NewRelease", {
/**
* Compare the latest version provided by the backend with the one loaded in the browser (might be an old cached one)
*/
- isMyFrontendOld: async function() {
- const lastUICommit = await osparc.store.AppSummary.getLatestUIFromBE();
- const thisUICommit = osparc.utils.LibVersions.getVcsRefUI();
- if (lastUICommit && thisUICommit) {
- return lastUICommit !== thisUICommit;
- }
- return false;
+ isMyFrontendOld: function() {
+ return new Promise((resolve, reject) => {
+ osparc.store.AppSummary.getLatestUIFromBE()
+ .then(lastUICommit => {
+ const thisUICommit = osparc.utils.LibVersions.getVcsRefUI();
+ if (lastUICommit && thisUICommit) {
+ resolve(lastUICommit !== thisUICommit)
+ } else {
+ reject();
+ }
+ })
+ .catch(() => reject());
+ });
}
},
diff --git a/services/static-webserver/client/source/class/osparc/NewUITracker.js b/services/static-webserver/client/source/class/osparc/NewUITracker.js
index 04a19536128..c85fb3f9390 100644
--- a/services/static-webserver/client/source/class/osparc/NewUITracker.js
+++ b/services/static-webserver/client/source/class/osparc/NewUITracker.js
@@ -27,21 +27,24 @@ qx.Class.define("osparc.NewUITracker", {
__checkInterval: null,
startTracker: function() {
- const checkNewUI = async () => {
- const newReleaseAvailable = await osparc.NewRelease.isMyFrontendOld();
- if (newReleaseAvailable) {
- let msg = "";
- msg += qx.locale.Manager.tr("A new version of the application is now available.");
- msg += "
";
- msg += qx.locale.Manager.tr("Click the Reload button to get the latest features.");
- // permanent message
- const flashMessage = osparc.FlashMessenger.getInstance().logAs(msg, "INFO", 0).set({
- maxWidth: 500
- });
- const reloadButton = osparc.utils.Utils.reloadNoCacheButton();
- flashMessage.addWidget(reloadButton);
- this.stopTracker();
- }
+ const checkNewUI = () => {
+ osparc.NewRelease.isMyFrontendOld()
+ .then(newReleaseAvailable => {
+ if (newReleaseAvailable) {
+ let msg = "";
+ msg += qx.locale.Manager.tr("A new version of the application is now available.");
+ msg += "
";
+ msg += qx.locale.Manager.tr("Click the Reload button to get the latest features.");
+ // permanent message
+ const flashMessage = osparc.FlashMessenger.getInstance().logAs(msg, "INFO", 0).set({
+ maxWidth: 500
+ });
+ const reloadButton = osparc.utils.Utils.reloadNoCacheButton();
+ flashMessage.addWidget(reloadButton);
+ this.stopTracker();
+ }
+ })
+ .catch(() => setTimeout(() => checkNewUI(), 5*1000));
};
checkNewUI();
this.__checkInterval = setInterval(checkNewUI, this.self().CHECK_INTERVAL);
diff --git a/services/static-webserver/client/source/class/osparc/auth/LoginPageS4L.js b/services/static-webserver/client/source/class/osparc/auth/LoginPageS4L.js
index 8118ecdc9f8..1e7cf123b37 100644
--- a/services/static-webserver/client/source/class/osparc/auth/LoginPageS4L.js
+++ b/services/static-webserver/client/source/class/osparc/auth/LoginPageS4L.js
@@ -29,7 +29,7 @@ qx.Class.define("osparc.auth.LoginPageS4L", {
const layout = new qx.ui.layout.HBox();
this._setLayout(layout);
- this.setBackgroundColor("primary-background-color");
+ this.setBackgroundColor("rgba(0, 20, 46, 1)");
this._removeAll();
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js b/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js
index 8d59dee3728..1b7a8fe6e82 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js
@@ -926,7 +926,7 @@ qx.Class.define("osparc.dashboard.CardBase", {
},
_filterTags: function(tags) {
- const checks = this.getTags().map(tag => tag.id);
+ const checks = this.getTags().map(tag => tag.getTagId());
return this.self().filterTags(checks, tags);
},
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js b/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js
index cc714440242..4a1420ade43 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js
@@ -181,9 +181,6 @@ qx.Class.define("osparc.dashboard.Dashboard", {
const store = osparc.store.Store.getInstance();
preResourcePromises.push(store.getAllGroupsAndMembers());
preResourcePromises.push(osparc.store.Services.getServicesLatest(false));
- if (permissions.canDo("study.tag")) {
- preResourcePromises.push(osparc.data.Resources.get("tags"));
- }
Promise.all(preResourcePromises)
.then(() => {
[
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js
index 526f7032c27..0971a7d4990 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js
@@ -46,7 +46,8 @@ qx.Class.define("osparc.dashboard.FolderButtonItem", {
"folderSelected": "qx.event.type.Data",
"folderUpdated": "qx.event.type.Data",
"moveFolderToRequested": "qx.event.type.Data",
- "deleteFolderRequested": "qx.event.type.Data"
+ "deleteFolderRequested": "qx.event.type.Data",
+ "changeContext": "qx.event.type.Data",
},
properties: {
@@ -186,19 +187,38 @@ qx.Class.define("osparc.dashboard.FolderButtonItem", {
position: "bottom-right"
});
- const editButton = new qx.ui.menu.Button(this.tr("Rename..."), "@FontAwesome5Solid/pencil-alt/12");
- editButton.addListener("execute", () => this.__editFolder(), this);
- menu.add(editButton);
+ const studyBrowserContext = osparc.store.Store.getInstance().getStudyBrowserContext();
+ if (
+ studyBrowserContext === "search" ||
+ studyBrowserContext === "studiesAndFolders"
+ ) {
+ const editButton = new qx.ui.menu.Button(this.tr("Rename..."), "@FontAwesome5Solid/pencil-alt/12");
+ editButton.addListener("execute", () => this.__editFolder(), this);
+ menu.add(editButton);
+
+ if (studyBrowserContext === "search") {
+ const openLocationButton = new qx.ui.menu.Button(this.tr("Open location"), "@FontAwesome5Solid/external-link-alt/12");
+ openLocationButton.addListener("execute", () => {
+ const folder = this.getFolder();
+ this.fireDataEvent("changeContext", {
+ context: "studiesAndFolders",
+ workspaceId: folder.getWorkspaceId(),
+ folderId: folder.getParentFolderId(),
+ });
+ }, this);
+ menu.add(openLocationButton);
+ }
- const moveToButton = new qx.ui.menu.Button(this.tr("Move to..."), "@FontAwesome5Solid/folder/12");
- moveToButton.addListener("execute", () => this.fireDataEvent("moveFolderToRequested", this.getFolderId()), this);
- menu.add(moveToButton);
+ const moveToButton = new qx.ui.menu.Button(this.tr("Move to..."), "@FontAwesome5Solid/folder/12");
+ moveToButton.addListener("execute", () => this.fireDataEvent("moveFolderToRequested", this.getFolderId()), this);
+ menu.add(moveToButton);
- menu.addSeparator();
+ menu.addSeparator();
- const deleteButton = new qx.ui.menu.Button(this.tr("Delete"), "@FontAwesome5Solid/trash/12");
- deleteButton.addListener("execute", () => this.__deleteFolderRequested(), this);
- menu.add(deleteButton);
+ const deleteButton = new qx.ui.menu.Button(this.tr("Delete"), "@FontAwesome5Solid/trash/12");
+ deleteButton.addListener("execute", () => this.__deleteFolderRequested(), this);
+ menu.add(deleteButton);
+ }
menuButton.setMenu(menu);
},
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js
index 148a6b114bb..828a0c74ba7 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js
@@ -262,7 +262,7 @@ qx.Class.define("osparc.dashboard.GridButtonItem", {
tagsContainer.setVisibility(tags.length ? "visible" : "excluded");
tagsContainer.removeAll();
tags.forEach(tag => {
- const tagUI = new osparc.ui.basic.Tag(tag.name, tag.color, "searchBarFilter");
+ const tagUI = new osparc.ui.basic.Tag(tag, "searchBarFilter");
tagUI.set({
font: "text-12",
toolTipText: this.tr("Click to filter by this Tag")
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js
index e89e03a0943..71f59b970df 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js
@@ -237,7 +237,7 @@ qx.Class.define("osparc.dashboard.ListButtonItem", {
const tagsContainer = this.getChildControl("tags");
tagsContainer.removeAll();
tags.forEach(tag => {
- const tagUI = new osparc.ui.basic.Tag(tag.name, tag.color, "searchBarFilter");
+ const tagUI = new osparc.ui.basic.Tag(tag, "searchBarFilter");
tagUI.set({
alignY: "middle",
font: "text-12",
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/MoveResourceTo.js b/services/static-webserver/client/source/class/osparc/dashboard/MoveResourceTo.js
index 11a744ba9ea..cd9a98d1d6f 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/MoveResourceTo.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/MoveResourceTo.js
@@ -39,7 +39,14 @@ qx.Class.define("osparc.dashboard.MoveResourceTo", {
const item = selection.getItem(0);
this.__selectedWorkspaceId = item.getWorkspaceId();
this.__selectedFolderId = item.getFolderId();
- moveButton.setEnabled(this.__currentWorkspaceId !== this.__selectedWorkspaceId || this.__currentFolderId !== this.__selectedFolderId);
+ if (this.__selectedWorkspaceId === -1) {
+ // "Shared Workspaces"
+ moveButton.setEnabled(false);
+ } else {
+ // In principle, valid location
+ // disable if it's the current location
+ moveButton.setEnabled(this.__currentWorkspaceId !== this.__selectedWorkspaceId || this.__currentFolderId !== this.__selectedFolderId);
+ }
}
}, this);
moveButton.addListener("execute", () => {
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js
index 9334861f11c..344507aad9a 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js
@@ -95,7 +95,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", {
return isLogged;
},
- startStudyById: function(studyId, openCB, cancelCB, isStudyCreation = false) {
+ startStudyById: function(studyId, openCB, cancelCB, showStudyOptions = false) {
if (!osparc.dashboard.ResourceBrowserBase.checkLoggedIn()) {
return;
}
@@ -116,7 +116,11 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", {
};
osparc.data.Resources.fetch("studies", "getWallet", params)
.then(wallet => {
- if (isStudyCreation || wallet === null || osparc.desktop.credits.Utils.getWallet(wallet["walletId"]) === null) {
+ if (
+ showStudyOptions ||
+ wallet === null ||
+ osparc.desktop.credits.Utils.getWallet(wallet["walletId"]) === null
+ ) {
// pop up study options if the study was just created or if it has no wallet assigned or user has no access to it
const resourceSelector = new osparc.study.StudyOptions(studyId);
const win = osparc.study.StudyOptions.popUpInWindow(resourceSelector);
@@ -276,6 +280,14 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", {
const workspaceId = e.getData();
this._workspaceSelected(workspaceId);
}, this);
+ resourcesContainer.addListener("changeContext", e => {
+ const {
+ context,
+ workspaceId,
+ folderId,
+ } = e.getData();
+ this._changeContext(context, workspaceId, folderId);
+ }, this);
resourcesContainer.addListener("workspaceUpdated", e => this._workspaceUpdated(e.getData()));
resourcesContainer.addListener("deleteWorkspaceRequested", e => this._deleteWorkspaceRequested(e.getData()));
@@ -475,6 +487,10 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", {
throw new Error("Abstract method called!");
},
+ _changeContext: function(context, workspaceId, folderId) {
+ throw new Error("Abstract method called!");
+ },
+
_folderSelected: function(folderId) {
throw new Error("Abstract method called!");
},
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js
index 187f6b441d3..fa99ba050dd 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js
@@ -79,6 +79,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", {
"workspaceSelected": "qx.event.type.Data",
"workspaceUpdated": "qx.event.type.Data",
"deleteWorkspaceRequested": "qx.event.type.Data",
+ "changeContext": "qx.event.type.Data",
},
statics: {
@@ -207,7 +208,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", {
},
__createCard: function(resourceData) {
- const tags = resourceData.tags ? osparc.store.Store.getInstance().getTags().filter(tag => resourceData.tags.includes(tag.id)) : [];
+ const tags = resourceData.tags ? osparc.store.Tags.getInstance().getTags().filter(tag => resourceData.tags.includes(tag.getTagId())) : [];
const card = this.getMode() === "grid" ? new osparc.dashboard.GridButtonItem() : new osparc.dashboard.ListButtonItem();
card.set({
appearance: resourceData.type ? `pb-${resourceData.type}` : `pb-${resourceData.resourceType}`,
@@ -419,6 +420,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", {
"folderUpdated",
"moveFolderToRequested",
"deleteFolderRequested",
+ "changeContext",
].forEach(eName => card.addListener(eName, e => this.fireDataEvent(eName, e.getData())));
return card;
},
@@ -432,7 +434,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", {
},
__groupByTags: function(cards, resourceData) {
- const tags = resourceData.tags ? osparc.store.Store.getInstance().getTags().filter(tag => resourceData.tags.includes(tag.id)) : [];
+ const tags = resourceData.tags ? osparc.store.Tags.getInstance().getTags().filter(tag => resourceData.tags.includes(tag.getTagId())) : [];
if (tags.length === 0) {
let noGroupContainer = this.__getGroupContainer("no-group");
const card = this.__createCard(resourceData);
@@ -441,9 +443,11 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", {
cards.push(card);
} else {
tags.forEach(tag => {
- let groupContainer = this.__getGroupContainer(tag.id);
+ let groupContainer = this.__getGroupContainer(tag.getTagId());
if (groupContainer === null) {
- groupContainer = this.__createGroupContainer(tag.id, tag.name, tag.color);
+ groupContainer = this.__createGroupContainer(tag.getTagId(), tag.getName(), tag.getColor());
+ tag.bind("name", groupContainer, "headerLabel");
+ tag.bind("color", groupContainer, "headerColor");
groupContainer.setHeaderIcon("@FontAwesome5Solid/tag/24");
this.__groupedContainers.add(groupContainer);
this.__groupedContainers.getChildren().sort((a, b) => a.getHeaderLabel().localeCompare(b.getHeaderLabel()));
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js
index a1ae4d742fa..76e9f628829 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js
@@ -364,7 +364,7 @@ qx.Class.define("osparc.dashboard.ResourceDetails", {
const resourceData = this.__resourceData;
if (osparc.utils.Resources.isStudy(resourceData)) {
const id = "Billing";
- const title = this.tr("Billing Settings");
+ const title = this.tr("Tier Settings");
const iconSrc = "@FontAwesome5Solid/cogs/22";
const page = this.__billingSettings = new osparc.dashboard.resources.pages.BasePage(title, iconSrc, id);
this.__addOpenButton(page);
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js
index 142cdab7d3f..0c452e3e33a 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js
@@ -158,16 +158,15 @@ qx.Class.define("osparc.dashboard.ResourceFilter", {
const maxTags = 5;
this.__tagButtons = [];
layout.removeAll();
- osparc.store.Store.getInstance().getTags().forEach((tag, idx) => {
- const button = new qx.ui.form.ToggleButton(tag.name, "@FontAwesome5Solid/tag/18");
+ osparc.store.Tags.getInstance().getTags().forEach((tag, idx) => {
+ const button = new qx.ui.form.ToggleButton(null, "@FontAwesome5Solid/tag/18");
+ button.id = tag.getTagId();
+ tag.bind("name", button, "label");
+ tag.bind("color", button.getChildControl("icon"), "textColor");
osparc.utils.Utils.setIdToWidget(button, this.__resourceType + "-tagFilterItem");
- button.id = tag.id;
button.set({
appearance: "filter-toggle-button",
- value: selectedTagIds.includes(tag.id)
- });
- button.getChildControl("icon").set({
- textColor: tag.color
+ value: selectedTagIds.includes(tag.getTagId())
});
layout.add(button);
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js b/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js
index b836a93ef44..5b376a6b404 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js
@@ -208,14 +208,14 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", {
},
__addTags: function(menuButton) {
- const tags = osparc.store.Store.getInstance().getTags();
+ const tags = osparc.store.Tags.getInstance().getTags();
menuButton.setVisibility(tags.length ? "visible" : "excluded");
if (tags.length) {
const tagsMenu = new qx.ui.menu.Menu();
osparc.utils.Utils.setIdToWidget(tagsMenu, "searchBarFilter-tags-menu");
tags.forEach(tag => {
- const tagButton = new qx.ui.menu.Button(tag.name, "@FontAwesome5Solid/tag/12");
- tagButton.getChildControl("icon").setTextColor(tag.color);
+ const tagButton = new qx.ui.menu.Button(tag.getName(), "@FontAwesome5Solid/tag/12");
+ tagButton.getChildControl("icon").setTextColor(tag.getColor());
tagsMenu.add(tagButton);
tagButton.addListener("execute", () => this.addTagActiveFilter(tag), this);
});
@@ -271,16 +271,17 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", {
},
addTagActiveFilter: function(tag) {
- this.__addChip("tag", tag.id, tag.name);
+ this.__addChip("tag", tag.getTagId(), tag.getName());
},
setTagsActiveFilter: function(tagIds) {
- const tags = osparc.store.Store.getInstance().getTags();
+ const tags = osparc.store.Tags.getInstance().getTags();
tags.forEach(tag => {
- if (tagIds.includes(tag.id)) {
- this.__addChip("tag", tag.id, tag.name);
+ const tagId = tag.getTagId();
+ if (tagIds.includes(tagId)) {
+ this.__addChip("tag", tagId, tag.getName());
} else {
- this.__removeChip("tag", tag.id, tag.name);
+ this.__removeChip("tag", tagId, tag.getName());
}
});
},
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js
index 7349d7d46b5..a2de2032524 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js
@@ -171,17 +171,30 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
if (
!osparc.auth.Manager.getInstance().isLoggedIn() ||
!osparc.utils.DisabledPlugins.isFoldersEnabled() ||
- this.getCurrentContext() !== "studiesAndFolders" ||
+ this.getCurrentContext() === "workspaces" ||
this.__loadingFolders
) {
return;
}
- const workspaceId = this.getCurrentWorkspaceId();
- const folderId = this.getCurrentFolderId();
this.__loadingFolders = true;
+ let request = null;
+ switch (this.getCurrentContext()) {
+ case "search": {
+ const filterData = this._searchBarFilter.getFilterData();
+ const text = filterData.text ? encodeURIComponent(filterData.text) : ""; // name, description and uuid
+ request = osparc.store.Folders.getInstance().searchFolders(text, this.getOrderBy());
+ break;
+ }
+ case "studiesAndFolders": {
+ const workspaceId = this.getCurrentWorkspaceId();
+ const folderId = this.getCurrentFolderId();
+ request = osparc.store.Folders.getInstance().fetchFolders(folderId, workspaceId, this.getOrderBy());
+ break;
+ }
+ }
this.__setFoldersToList([]);
- osparc.store.Folders.getInstance().fetchFolders(folderId, workspaceId, this.getOrderBy())
+ request
.then(folders => {
this.__setFoldersToList(folders);
})
@@ -374,18 +387,17 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
newWorkspaceCard.setCardKey("new-workspace");
newWorkspaceCard.subscribeToFilterGroup("searchBarFilter");
[
- "createWorkspace",
- "updateWorkspace"
+ "workspaceCreated",
+ "workspaceDeleted",
+ "workspaceUpdated",
].forEach(e => {
- newWorkspaceCard.addListener(e, () => {
- this.__reloadWorkspaces();
- });
+ newWorkspaceCard.addListener(e, () => this.__reloadWorkspaces());
});
this._resourcesContainer.addNewWorkspaceCard(newWorkspaceCard);
},
_workspaceSelected: function(workspaceId) {
- this.__changeContext("studiesAndFolders", workspaceId, null);
+ this._changeContext("studiesAndFolders", workspaceId, null);
},
_workspaceUpdated: function() {
@@ -445,7 +457,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
},
_folderSelected: function(folderId) {
- this.__changeContext("studiesAndFolders", this.getCurrentWorkspaceId(), folderId);
+ this._changeContext("studiesAndFolders", this.getCurrentWorkspaceId(), folderId);
},
_folderUpdated: function() {
@@ -654,17 +666,23 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
const requestParams = {};
requestParams.orderBy = JSON.stringify(this.getOrderBy());
- const filterData = this._searchBarFilter.getFilterData();
- // Use the ``search`` functionality only if the user types some text
- // tags should only be used to filter the current context (search context ot workspace/folder context)
- if (filterData.text) {
- requestParams.text = filterData.text ? encodeURIComponent(filterData.text) : ""; // name, description and uuid
- requestParams["tagIds"] = filterData.tags.length ? filterData.tags.join(",") : "";
- return requestParams;
+ switch (this.getCurrentContext()) {
+ case "studiesAndFolders":
+ requestParams.workspaceId = this.getCurrentWorkspaceId();
+ requestParams.folderId = this.getCurrentFolderId();
+ break;
+ case "search": {
+ // Use the ``search`` functionality only if the user types some text
+ // tags should only be used to filter the current context (search context ot workspace/folder context)
+ const filterData = this._searchBarFilter.getFilterData();
+ if (filterData.text) {
+ requestParams.text = filterData.text ? encodeURIComponent(filterData.text) : ""; // name, description and uuid
+ requestParams["tagIds"] = filterData.tags.length ? filterData.tags.join(",") : "";
+ }
+ break;
+ }
}
- requestParams.workspaceId = this.getCurrentWorkspaceId();
- requestParams.folderId = this.getCurrentFolderId();
return requestParams;
},
@@ -689,10 +707,16 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
resolveWResponse: true
};
- if ("text" in requestParams) {
- return osparc.data.Resources.fetch("studies", "getPageSearch", params, options);
+ let request = null;
+ switch (this.getCurrentContext()) {
+ case "search":
+ request = osparc.data.Resources.fetch("studies", "getPageSearch", params, options);
+ break;
+ case "studiesAndFolders":
+ request = osparc.data.Resources.fetch("studies", "getPage", params, options);
+ break;
}
- return osparc.data.Resources.fetch("studies", "getPage", params, options);
+ return request;
},
invalidateStudies: function() {
@@ -722,7 +746,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
break;
case "tis":
case "tiplite":
- this.__addTIPPlusButtons();
+ this.__addTIPPlusButton();
break;
case "s4l":
case "s4lacad":
@@ -746,24 +770,27 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
this._resourcesContainer.addNonResourceCard(newStudyBtn);
},
- __addTIPPlusButtons: function() {
- osparc.data.Resources.get("templates")
- .then(templates => {
- if (templates) {
- osparc.utils.Utils.fetchJSON("/resource/osparc/new_studies.json")
- .then(newStudiesData => {
- const product = osparc.product.Utils.getProductName()
- if (product in newStudiesData) {
- const mode = this._resourcesContainer.getMode();
- const title = this.tr("New Plan");
- const newStudyBtn = (mode === "grid") ? new osparc.dashboard.GridButtonNew(title) : new osparc.dashboard.ListButtonNew(title);
- newStudyBtn.setCardKey("new-study");
- newStudyBtn.subscribeToFilterGroup("searchBarFilter");
- osparc.utils.Utils.setIdToWidget(newStudyBtn, "newStudyBtn");
- this._resourcesContainer.addNonResourceCard(newStudyBtn);
- newStudyBtn.addListener("execute", () => {
- newStudyBtn.setValue(false);
+ __addTIPPlusButton: function() {
+ const mode = this._resourcesContainer.getMode();
+ const title = this.tr("New Plan");
+ const newStudyBtn = (mode === "grid") ? new osparc.dashboard.GridButtonNew(title) : new osparc.dashboard.ListButtonNew(title);
+ newStudyBtn.setCardKey("new-study");
+ newStudyBtn.subscribeToFilterGroup("searchBarFilter");
+ osparc.utils.Utils.setIdToWidget(newStudyBtn, "newStudyBtn");
+ this._resourcesContainer.addNonResourceCard(newStudyBtn);
+ newStudyBtn.setEnabled(false);
+
+ osparc.utils.Utils.fetchJSON("/resource/osparc/new_studies.json")
+ .then(newStudiesData => {
+ const product = osparc.product.Utils.getProductName()
+ if (product in newStudiesData) {
+ newStudyBtn.setEnabled(true);
+ newStudyBtn.addListener("execute", () => {
+ newStudyBtn.setValue(false);
+ osparc.data.Resources.get("templates")
+ .then(templates => {
+ if (templates) {
const newStudies = new osparc.dashboard.NewStudies(newStudiesData[product]);
newStudies.addListener("templatesLoaded", () => {
newStudies.setGroupBy("category");
@@ -782,9 +809,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
});
osparc.utils.Utils.setIdToWidget(win, "newStudiesWindow");
});
- });
- }
- });
+ }
+ });
+ });
}
});
},
@@ -887,10 +914,11 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
});
this._resourcesContainer.addListener("changeSelection", e => {
+ const currentContext = this.getCurrentContext();
const selection = e.getData();
studiesMoveButton.set({
- visibility: selection.length ? "visible" : "excluded",
+ visibility: selection.length && currentContext === "studiesAndFolders" ? "visible" : "excluded",
label: selection.length > 1 ? this.tr("Move selected")+" ("+selection.length+")" : this.tr("Move")
});
@@ -911,7 +939,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
header.addListener("locationChanged", () => {
const workspaceId = header.getCurrentWorkspaceId();
const folderId = header.getCurrentFolderId();
- this.__changeContext("studiesAndFolders", workspaceId, folderId);
+ this._changeContext("studiesAndFolders", workspaceId, folderId);
}, this);
const workspacesAndFoldersTree = this._resourceFilter.getWorkspacesAndFoldersTree();
@@ -919,27 +947,27 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
const context = e.getData();
const workspaceId = context["workspaceId"];
if (workspaceId === -1) {
- this.__changeContext("workspaces");
+ this._changeContext("workspaces");
} else {
const folderId = context["folderId"];
- this.__changeContext("studiesAndFolders", workspaceId, folderId);
+ this._changeContext("studiesAndFolders", workspaceId, folderId);
}
}, this);
this._searchBarFilter.addListener("filterChanged", e => {
const filterData = e.getData();
if (filterData.text) {
- this.__changeContext("search");
+ this._changeContext("search");
} else {
const workspaceId = this.getCurrentWorkspaceId();
const folderId = this.getCurrentFolderId();
- this.__changeContext("studiesAndFolders", workspaceId, folderId);
+ this._changeContext("studiesAndFolders", workspaceId, folderId);
}
});
}
},
- __changeContext: function(context, workspaceId = null, folderId = null) {
+ _changeContext: function(context, workspaceId = null, folderId = null) {
if (osparc.utils.DisabledPlugins.isFoldersEnabled()) {
if (
context !== "search" && // reload studies for a new search
@@ -951,6 +979,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
return;
}
+ osparc.store.Store.getInstance().setStudyBrowserContext(context);
this.set({
currentContext: context,
currentWorkspaceId: workspaceId,
@@ -963,7 +992,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
this._resourcesContainer.setResourcesToList([]);
if (context === "search") {
- this.__setFoldersToList([]);
+ this.__reloadFolders();
this.__reloadStudies();
} else if (context === "workspaces") {
this._searchBarFilter.resetFilters();
@@ -1170,7 +1199,8 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
__newStudyBtnClicked: function(button) {
button.setValue(false);
const minStudyData = osparc.data.model.Study.createMinStudyObject();
- const title = osparc.utils.Utils.getUniqueStudyName(minStudyData.name, this._resourcesList);
+ const existingNames = this._resourcesList.map(study => study["name"]);
+ const title = osparc.utils.Utils.getUniqueName(minStudyData.name, existingNames);
minStudyData["name"] = title;
minStudyData["workspaceId"] = this.getCurrentWorkspaceId();
minStudyData["folderId"] = this.getCurrentFolderId();
@@ -1190,7 +1220,8 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
__newPlanBtnClicked: function(templateData, newStudyName) {
// do not override cached template data
const templateCopyData = osparc.utils.Utils.deepCloneObject(templateData);
- const title = osparc.utils.Utils.getUniqueStudyName(newStudyName, this._resourcesList);
+ const existingNames = this._resourcesList.map(study => study["name"]);
+ const title = osparc.utils.Utils.getUniqueName(newStudyName, existingNames);
templateCopyData.name = title;
this._showLoadingPage(this.tr("Creating ") + (newStudyName || osparc.product.Utils.getStudyAlias()));
const contextProps = {
@@ -1198,7 +1229,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
folderId: this.getCurrentFolderId(),
};
osparc.study.Utils.createStudyFromTemplate(templateCopyData, this._loadingPage, contextProps)
- .then(studyId => this.__startStudyAfterCreating(studyId))
+ .then(studyData => this.__startStudyAfterCreating(studyData["uuid"]))
.catch(err => {
this._hideLoadingPage();
osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR");
@@ -1341,7 +1372,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
__getOpenLocationMenuButton: function(studyData) {
const openLocationButton = new qx.ui.menu.Button(this.tr("Open location"), "@FontAwesome5Solid/external-link-alt/12");
openLocationButton.addListener("execute", () => {
- this.__changeContext("studiesAndFolders", studyData["workspaceId"], studyData["folderId"]);
+ this._changeContext("studiesAndFolders", studyData["workspaceId"], studyData["folderId"]);
}, this);
return openLocationButton;
},
@@ -1411,7 +1442,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
},
__getBillingMenuButton: function(card) {
- const text = osparc.utils.Utils.capitalize(this.tr("Billing Settings..."));
+ const text = osparc.utils.Utils.capitalize(this.tr("Tier Settings..."));
const studyBillingSettingsButton = new qx.ui.menu.Button(text);
studyBillingSettingsButton["billingSettingsButton"] = true;
studyBillingSettingsButton.addListener("tap", () => card.openBilling(), this);
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js
index 9e2ca51b434..87a6a366b58 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js
@@ -339,10 +339,10 @@ qx.Class.define("osparc.dashboard.StudyBrowserHeader", {
__editWorkspace: function() {
const workspace = osparc.store.Workspaces.getInstance().getWorkspace(this.getCurrentWorkspaceId());
- const permissionsView = new osparc.editor.WorkspaceEditor(workspace);
+ const workspaceEditor = new osparc.editor.WorkspaceEditor(workspace);
const title = this.tr("Edit Workspace");
- const win = osparc.ui.window.Window.popUpInWindow(permissionsView, title, 300, 200);
- permissionsView.addListener("workspaceUpdated", () => {
+ const win = osparc.ui.window.Window.popUpInWindow(workspaceEditor, title, 300, 150);
+ workspaceEditor.addListener("workspaceUpdated", () => {
win.close();
this.__buildLayout();
}, this);
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js
index d597d8a438c..7f4f0362cab 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js
@@ -137,27 +137,85 @@ qx.Class.define("osparc.dashboard.TemplateBrowser", {
return;
}
- this._showLoadingPage(this.tr("Creating ") + (templateData.name || osparc.product.Utils.getStudyAlias({firstUpperCase: true})));
- osparc.study.Utils.createStudyFromTemplate(templateData, this._loadingPage)
- .then(studyId => {
- const openCB = () => this._hideLoadingPage();
- const cancelCB = () => {
- this._hideLoadingPage();
- const params = {
- url: {
- studyId
- }
+ const studyAlias = osparc.product.Utils.getStudyAlias({firstUpperCase: true});
+ this._showLoadingPage(this.tr("Creating ") + (templateData.name || studyAlias));
+
+ const studyOptions = new osparc.study.StudyOptions();
+ // they will be patched once the study is created
+ studyOptions.setPatchStudy(false);
+ studyOptions.setStudyData(templateData);
+ const win = osparc.study.StudyOptions.popUpInWindow(studyOptions);
+ win.moveItUp();
+ const cancelStudyOptions = () => {
+ this._hideLoadingPage();
+ win.close();
+ }
+ win.addListener("cancel", () => cancelStudyOptions());
+ studyOptions.addListener("cancel", () => cancelStudyOptions());
+ studyOptions.addListener("startStudy", () => {
+ const newName = studyOptions.getChildControl("title-field").getValue();
+ const walletSelection = studyOptions.getChildControl("wallet-selector").getSelection();
+ const nodesPricingUnits = studyOptions.getChildControl("study-pricing-units").getNodePricingUnits();
+ win.close();
+ this._showLoadingPage(this.tr("Creating ") + (newName || studyAlias));
+ osparc.study.Utils.createStudyFromTemplate(templateData, this._loadingPage)
+ .then(newStudyData => {
+ const studyId = newStudyData["uuid"];
+ const openCB = () => {
+ this._hideLoadingPage();
};
- osparc.data.Resources.fetch("studies", "delete", params);
- };
- const isStudyCreation = true;
- this._startStudyById(studyId, openCB, cancelCB, isStudyCreation);
- })
- .catch(err => {
- this._hideLoadingPage();
- osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR");
- console.error(err);
- });
+ const cancelCB = () => {
+ this._hideLoadingPage();
+ const params = {
+ url: {
+ studyId
+ }
+ };
+ osparc.data.Resources.fetch("studies", "delete", params);
+ };
+
+ const promises = [];
+ // patch the name
+ if (newStudyData["name"] !== newName) {
+ promises.push(osparc.study.StudyOptions.updateName(newStudyData, newName));
+ }
+ // patch the wallet
+ if (walletSelection.length && walletSelection[0]["walletId"]) {
+ const walletId = walletSelection[0]["walletId"];
+ promises.push(osparc.study.StudyOptions.updateWallet(newStudyData["uuid"], walletId));
+ }
+ // patch the pricing units
+ // the nodeIds are coming from the original template, they need to be mapped to the newStudy
+ const workbench = newStudyData["workbench"];
+ const nodesIdsListed = [];
+ Object.keys(workbench).forEach(nodeId => {
+ const node = workbench[nodeId];
+ if (osparc.study.StudyPricingUnits.includeInList(node)) {
+ nodesIdsListed.push(nodeId);
+ }
+ });
+ nodesPricingUnits.forEach((nodePricingUnits, idx) => {
+ const selectedPricingUnitId = nodePricingUnits.getPricingUnits().getSelectedUnitId();
+ if (selectedPricingUnitId) {
+ const nodeId = nodesIdsListed[idx];
+ const pricingPlanId = nodePricingUnits.getPricingPlanId();
+ promises.push(osparc.study.NodePricingUnits.patchPricingUnitSelection(studyId, nodeId, pricingPlanId, selectedPricingUnitId));
+ }
+ });
+
+ Promise.all(promises)
+ .then(() => {
+ win.close();
+ const showStudyOptions = false;
+ this._startStudyById(studyId, openCB, cancelCB, showStudyOptions);
+ });
+ })
+ .catch(err => {
+ this._hideLoadingPage();
+ osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR");
+ console.error(err);
+ });
+ });
},
// LAYOUT //
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js
index 5581ec3212b..4d5253410bf 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js
@@ -185,7 +185,7 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonItem", {
const workspace = this.getWorkspace();
const workspaceEditor = new osparc.editor.WorkspaceEditor(workspace);
const title = this.tr("Edit Workspace");
- const win = osparc.ui.window.Window.popUpInWindow(workspaceEditor, title, 300, 200);
+ const win = osparc.ui.window.Window.popUpInWindow(workspaceEditor, title, 300, 150);
workspaceEditor.addListener("workspaceUpdated", () => {
win.close();
this.fireDataEvent("workspaceUpdated", workspace.getWorkspaceId());
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js
index fc1526b387d..ac87579355e 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js
@@ -46,26 +46,29 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonNew", {
},
events: {
- "createWorkspace": "qx.event.type.Data",
- "updateWorkspace": "qx.event.type.Data"
+ "workspaceCreated": "qx.event.type.Event",
+ "workspaceDeleted": "qx.event.type.Event",
+ "workspaceUpdated": "qx.event.type.Event",
},
members: {
__itemSelected: function(newVal) {
if (newVal) {
- const workspaceCreator = new osparc.editor.WorkspaceEditor();
+ const workspaceEditor = new osparc.editor.WorkspaceEditor();
const title = this.tr("New Workspace");
- const win = osparc.ui.window.Window.popUpInWindow(workspaceCreator, title, 300, 200);
- workspaceCreator.addListener("workspaceCreated", e => {
- win.close();
- const newWorkspace = e.getData();
- this.fireDataEvent("createWorkspace", newWorkspace.getWorkspaceId(), this);
- const permissionsView = new osparc.share.CollaboratorsWorkspace(newWorkspace);
- const title2 = qx.locale.Manager.tr("Share Workspace");
- osparc.ui.window.Window.popUpInWindow(permissionsView, title2, 500, 500);
- permissionsView.addListener("updateAccessRights", () => this.fireDataEvent("updateWorkspace", newWorkspace.getWorkspaceId()), this);
+ const win = osparc.ui.window.Window.popUpInWindow(workspaceEditor, title, 500, 500).set({
+ modal: true,
+ clickAwayClose: false,
});
- workspaceCreator.addListener("cancel", () => win.close());
+ workspaceEditor.addListener("workspaceCreated", () => this.fireEvent("workspaceCreated"));
+ workspaceEditor.addListener("workspaceDeleted", () => this.fireEvent("workspaceDeleted"));
+ workspaceEditor.addListener("workspaceUpdated", () => {
+ win.close();
+ this.fireEvent("workspaceUpdated");
+ }, this);
+ workspaceEditor.addListener("updateAccessRights", () => this.fireEvent("workspaceUpdated"));
+ win.getChildControl("close-button").addListener("tap", () => workspaceEditor.cancel());
+ workspaceEditor.addListener("cancel", () => win.close());
}
this.setValue(false);
}
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js
index 93f1125049e..7f35c3ff320 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js
@@ -74,7 +74,7 @@ qx.Class.define("osparc.dashboard.WorkspacesAndFoldersTree", {
osparc.store.Workspaces.getInstance().addListener("workspaceRemoved", e => {
const workspace = e.getData();
- this.__removeWorkspace(workspace);
+ this.__workspaceRemoved(workspace);
}, this);
this.getSelection().addListener("change", () => {
@@ -227,11 +227,21 @@ qx.Class.define("osparc.dashboard.WorkspacesAndFoldersTree", {
this.__populateFolder(workspaceModel, workspace.getWorkspaceId(), null);
},
- __removeWorkspace: function(workspace) {
+ __workspaceRemoved: function(workspace) {
+ // remove it from the tree
const sharedWorkspaceModel = this.__getModel(-1, null);
const idx = sharedWorkspaceModel.getChildren().toArray().findIndex(w => workspace.getWorkspaceId() === w.getWorkspaceId());
if (idx > -1) {
- sharedWorkspaceModel.getChildren().toArray().splice(idx, 1);
+ sharedWorkspaceModel.getChildren().removeAt(idx);
+ }
+
+ // remove it from the cached models
+ const modelFound = this.__getModel(workspace.getWorkspaceId(), null);
+ if (modelFound) {
+ const index = this.__models.indexOf(modelFound);
+ if (index > -1) { // only splice array when item is found
+ this.__models.splice(index, 1); // 2nd parameter means remove one item only
+ }
}
},
@@ -283,7 +293,19 @@ qx.Class.define("osparc.dashboard.WorkspacesAndFoldersTree", {
if (parentModel) {
const idx = parentModel.getChildren().toArray().findIndex(c => folder.getWorkspaceId() === c.getWorkspaceId() && folder.getFolderId() === c.getFolderId());
if (idx > -1) {
- parentModel.getChildren().toArray().splice(idx, 1);
+ parentModel.getChildren().removeAt(idx);
+ }
+ }
+
+ if (oldParentFolderId === undefined) {
+ // it was removed, not moved
+ // remove it from the cached models
+ const modelFound = this.__getModel(folder.getWorkspaceId(), folder.getFolderId());
+ if (modelFound) {
+ const index = this.__models.indexOf(modelFound);
+ if (index > -1) { // only splice array when item is found
+ this.__models.splice(index, 1); // 2nd parameter means remove one item only
+ }
}
}
},
diff --git a/services/static-webserver/client/source/class/osparc/data/Resources.js b/services/static-webserver/client/source/class/osparc/data/Resources.js
index 5484107fd96..007ba33eddd 100644
--- a/services/static-webserver/client/source/class/osparc/data/Resources.js
+++ b/services/static-webserver/client/source/class/osparc/data/Resources.js
@@ -301,6 +301,11 @@ qx.Class.define("osparc.data.Resources", {
method: "GET",
url: statics.API + "/folders?workspace_id={workspaceId}&folder_id={folderId}&offset={offset}&limit={limit}&order_by={orderBy}"
},
+ getPageSearch: {
+ useCache: false,
+ method: "GET",
+ url: statics.API + "/folders:search?offset={offset}&limit={limit}&text={text}&order_by={orderBy}"
+ },
getOne: {
method: "GET",
url: statics.API + "/folders/{folderId}"
@@ -1368,7 +1373,7 @@ qx.Class.define("osparc.data.Resources", {
});
},
- getAllPages: function(resource, params = {}) {
+ getAllPages: function(resource, params = {}, endpoint = "getPage") {
return new Promise((resolve, reject) => {
let resources = [];
let offset = 0;
@@ -1377,7 +1382,6 @@ qx.Class.define("osparc.data.Resources", {
}
params["url"]["offset"] = offset;
params["url"]["limit"] = 10;
- const endpoint = "getPage";
const options = {
resolveWResponse: true
};
diff --git a/services/static-webserver/client/source/class/osparc/data/model/Folder.js b/services/static-webserver/client/source/class/osparc/data/model/Folder.js
index 1dd99d015a2..b8b9eb03b21 100644
--- a/services/static-webserver/client/source/class/osparc/data/model/Folder.js
+++ b/services/static-webserver/client/source/class/osparc/data/model/Folder.js
@@ -37,6 +37,7 @@ qx.Class.define("osparc.data.model.Folder", {
owner: folderData.owner,
createdAt: new Date(folderData.createdAt),
lastModified: new Date(folderData.modifiedAt),
+ trashedAt: folderData.trashedAt ? new Date(folderData.trashedAt) : this.getTrashedAt(),
});
},
@@ -95,7 +96,13 @@ qx.Class.define("osparc.data.model.Folder", {
nullable: true,
init: null,
event: "changeLastModified"
- }
+ },
+
+ trashedAt: {
+ check: "Date",
+ nullable: true,
+ init: null,
+ },
},
statics: {
diff --git a/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js b/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js
index 9620c80daf1..fa037642af4 100644
--- a/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js
+++ b/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js
@@ -84,7 +84,9 @@ qx.Class.define("osparc.data.model.IframeHandler", {
this.__unresponsiveRetries = 5;
this.__nodeState();
- this.getIFrame().resetSource();
+ if (this.getIFrame()) {
+ this.getIFrame().resetSource();
+ }
},
__initIFrame: function() {
@@ -365,7 +367,9 @@ qx.Class.define("osparc.data.model.IframeHandler", {
// will switch to the loading page
node.resetServiceUrl();
- this.getIFrame().resetSource();
+ if (this.getIFrame()) {
+ this.getIFrame().resetSource();
+ }
this.fireEvent("iframeChanged");
}
},
@@ -396,8 +400,10 @@ qx.Class.define("osparc.data.model.IframeHandler", {
const status = node.getStatus().getInteractive();
// it might have been stopped
if (["running", "ready"].includes(status)) {
- this.getIFrame().resetSource();
- this.getIFrame().setSource(node.getServiceUrl());
+ if (this.getIFrame()) {
+ this.getIFrame().resetSource();
+ this.getIFrame().setSource(node.getServiceUrl());
+ }
// fire event to force switching to iframe's content:
// it is required in those cases where the native 'load' event isn't triggered (voila)
diff --git a/services/static-webserver/client/source/class/osparc/data/model/Study.js b/services/static-webserver/client/source/class/osparc/data/model/Study.js
index 598e0575d22..ab178aca669 100644
--- a/services/static-webserver/client/source/class/osparc/data/model/Study.js
+++ b/services/static-webserver/client/source/class/osparc/data/model/Study.js
@@ -58,7 +58,8 @@ qx.Class.define("osparc.data.model.Study", {
state: studyData.state || this.getState(),
quality: studyData.quality || this.getQuality(),
permalink: studyData.permalink || this.getPermalink(),
- dev: studyData.dev || this.getDev()
+ dev: studyData.dev || this.getDev(),
+ trashedAt: studyData.trashedAt ? new Date(studyData.trashedAt) : this.getTrashedAt(),
});
const wbData = studyData.workbench || this.getWorkbench();
@@ -209,7 +210,13 @@ qx.Class.define("osparc.data.model.Study", {
nullable: true,
event: "changeReadOnly",
init: true
- }
+ },
+
+ trashedAt: {
+ check: "Date",
+ nullable: true,
+ init: null,
+ },
// ------ ignore for serializing ------
},
@@ -218,7 +225,8 @@ qx.Class.define("osparc.data.model.Study", {
"permalink",
"state",
"pipelineRunning",
- "readOnly"
+ "readOnly",
+ "trashedAt",
],
IgnoreModelizationProps: [
diff --git a/services/static-webserver/client/source/class/osparc/data/model/Tag.js b/services/static-webserver/client/source/class/osparc/data/model/Tag.js
new file mode 100644
index 00000000000..fc7e00a5fcc
--- /dev/null
+++ b/services/static-webserver/client/source/class/osparc/data/model/Tag.js
@@ -0,0 +1,86 @@
+/* ************************************************************************
+
+ osparc - the simcore frontend
+
+ https://osparc.io
+
+ Copyright:
+ 2024 IT'IS Foundation, https://itis.swiss
+
+ License:
+ MIT: https://opensource.org/licenses/MIT
+
+ Authors:
+ * Odei Maiz (odeimaiz)
+
+************************************************************************ */
+
+/**
+ * Class that stores Tag data.
+ */
+
+qx.Class.define("osparc.data.model.Tag", {
+ extend: qx.core.Object,
+
+ /**
+ * @param tagData {Object} Object containing the serialized Tag Data
+ */
+ construct: function(tagData) {
+ this.base(arguments);
+
+ this.set({
+ tagId: tagData.id,
+ name: tagData.name,
+ description: tagData.description,
+ color: tagData.color,
+ accessRights: tagData.accessRights,
+ });
+ },
+
+ properties: {
+ tagId: {
+ check: "Number",
+ nullable: true,
+ init: null,
+ event: "changeTagId"
+ },
+
+ name: {
+ check: "String",
+ nullable: false,
+ init: null,
+ event: "changeName"
+ },
+
+ description: {
+ check: "String",
+ nullable: true,
+ init: null,
+ event: "changeDescription"
+ },
+
+ color: {
+ check: "Color",
+ event: "changeColor",
+ init: "#303030"
+ },
+
+ accessRights: {
+ check: "Object",
+ nullable: false,
+ init: null,
+ event: "changeAccessRights"
+ },
+ },
+
+ members: {
+ serialize: function() {
+ const jsonObject = {};
+ const propertyKeys = this.self().getProperties();
+ propertyKeys.forEach(key => {
+ jsonObject[key] = this.get(key);
+ });
+ return jsonObject;
+ }
+ }
+});
diff --git a/services/static-webserver/client/source/class/osparc/desktop/MainPage.js b/services/static-webserver/client/source/class/osparc/desktop/MainPage.js
index d2b72acfdcc..0ccb9bbe8b9 100644
--- a/services/static-webserver/client/source/class/osparc/desktop/MainPage.js
+++ b/services/static-webserver/client/source/class/osparc/desktop/MainPage.js
@@ -66,7 +66,7 @@ qx.Class.define("osparc.desktop.MainPage", {
preloadPromises.push(store.reloadWallets());
}
preloadPromises.push(store.getAllClassifiers(true));
- preloadPromises.push(store.getTags());
+ preloadPromises.push(osparc.store.Tags.getInstance().fetchTags());
Promise.all(preloadPromises)
.then(() => {
const mainStack = this.__createMainStack();
diff --git a/services/static-webserver/client/source/class/osparc/desktop/MainPageDesktop.js b/services/static-webserver/client/source/class/osparc/desktop/MainPageDesktop.js
index 93f5f50c74d..40c99616a40 100644
--- a/services/static-webserver/client/source/class/osparc/desktop/MainPageDesktop.js
+++ b/services/static-webserver/client/source/class/osparc/desktop/MainPageDesktop.js
@@ -61,7 +61,7 @@ qx.Class.define("osparc.desktop.MainPageDesktop", {
preloadPromises.push(store.reloadWallets());
}
preloadPromises.push(store.getAllClassifiers(true));
- preloadPromises.push(store.getTags());
+ preloadPromises.push(osparc.store.Tags.getInstance().fetchTags());
Promise.all(preloadPromises)
.then(() => {
const desktopCenter = new osparc.desktop.credits.DesktopCenter();
diff --git a/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js b/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js
index 593088bc4cd..e05a37f56a1 100644
--- a/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js
+++ b/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js
@@ -24,7 +24,7 @@ qx.Class.define("osparc.desktop.SlideshowView", {
this._setLayout(new qx.ui.layout.VBox());
const slideshowToolbar = this.__slideshowToolbar = new osparc.desktop.SlideshowToolbar().set({
- backgroundColor: "tab_navigation_bar_background_color"
+ backgroundColor: "workbench-view-navbar"
});
const collapseWithUserMenu = this.__collapseWithUserMenu = new osparc.desktop.CollapseWithUserMenu();
diff --git a/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js b/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js
index 1daeea1c0f1..accb850ab5d 100644
--- a/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js
+++ b/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js
@@ -40,16 +40,10 @@ qx.Class.define("osparc.desktop.WorkbenchView", {
TAB_BUTTON_HEIGHT: 46,
decorateSplitter: function(splitter) {
- const colorManager = qx.theme.manager.Color.getInstance();
- const binaryColor = osparc.utils.Utils.getRoundedBinaryColor(colorManager.resolve("background-main"));
splitter.set({
width: 2,
- backgroundColor: binaryColor
+ backgroundColor: "workbench-view-splitter"
});
- colorManager.addListener("changeTheme", () => {
- const newBinaryColor = osparc.utils.Utils.getRoundedBinaryColor(colorManager.resolve("background-main"));
- splitter.setBackgroundColor(newBinaryColor);
- }, this);
},
decorateSlider: function(slider) {
@@ -202,7 +196,6 @@ qx.Class.define("osparc.desktop.WorkbenchView", {
control = new qx.ui.tabview.TabView().set({
contentPadding: osparc.widget.CollapsibleViewLight.CARET_WIDTH + 2, // collapse bar + padding
contentPaddingRight: 2,
- backgroundColor: this.self().PRIMARY_COL_BG_COLOR,
barPosition: "top"
});
const collapsibleViewLeft = this.getChildControl("collapsible-view-left");
@@ -322,7 +315,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", {
const topBar = tabViewPrimary.getChildControl("bar");
topBar.set({
height: this.self().TAB_BUTTON_HEIGHT,
- backgroundColor: "tab_navigation_bar_background_color"
+ backgroundColor: "workbench-view-navbar"
});
this.__addTopBarSpacer(topBar);
@@ -392,7 +385,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", {
const topBar = tabViewSecondary.getChildControl("bar");
topBar.set({
height: this.self().TAB_BUTTON_HEIGHT,
- backgroundColor: "tab_navigation_bar_background_color"
+ backgroundColor: "workbench-view-navbar"
});
this.__addTopBarSpacer(topBar);
@@ -427,7 +420,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", {
topBar.set({
height: this.self().TAB_BUTTON_HEIGHT,
alignY: "top",
- backgroundColor: "tab_navigation_bar_background_color"
+ backgroundColor: "workbench-view-navbar"
});
this.__addTopBarSpacer(topBar);
@@ -483,7 +476,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", {
__addTopBarSpacer: function(tabViewTopBar) {
const spacer = new qx.ui.core.Widget().set({
- backgroundColor: "tab_navigation_bar_background_color"
+ backgroundColor: "workbench-view-navbar"
});
tabViewTopBar.add(spacer, {
flex: 1
@@ -492,7 +485,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", {
__createCollapsibleViewSpacer: function() {
const spacer = new qx.ui.core.Widget().set({
- backgroundColor: "tab_navigation_bar_background_color",
+ backgroundColor: "workbench-view-navbar",
height: this.self().TAB_BUTTON_HEIGHT
});
return spacer;
@@ -1079,13 +1072,6 @@ qx.Class.define("osparc.desktop.WorkbenchView", {
const nodeOptions = new osparc.widget.NodeOptions(node);
nodeOptions.buildLayout();
- [
- "versionChanged",
- "bootModeChanged",
- "limitsChanged"
- ].forEach(eventName => {
- nodeOptions.addListener(eventName, () => this.__populateSecondaryColumn(node));
- });
return nodeOptions;
},
diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js
index e5cb935cdf8..eb694304233 100644
--- a/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js
+++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js
@@ -358,7 +358,8 @@ qx.Class.define("osparc.desktop.organizations.MembersList", {
}
})
.catch(err => {
- osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong adding the user"), "ERROR");
+ const errorMessage = err["message"] || this.tr("Something went wrong adding the user");
+ osparc.FlashMessenger.getInstance().logAs(errorMessage, "ERROR");
console.error(err);
});
},
diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationDetails.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationDetails.js
index 6871348d8a0..c9d0501c0cd 100644
--- a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationDetails.js
+++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationDetails.js
@@ -94,17 +94,9 @@ qx.Class.define("osparc.desktop.organizations.OrganizationDetails", {
__openEditOrganization: function() {
const org = this.__orgModel;
-
- const newOrg = false;
- const orgEditor = new osparc.editor.OrganizationEditor(newOrg);
- org.bind("gid", orgEditor, "gid");
- org.bind("label", orgEditor, "label");
- org.bind("description", orgEditor, "description");
- org.bind("thumbnail", orgEditor, "thumbnail", {
- converter: val => val ? val : ""
- });
const title = this.tr("Organization Details Editor");
- const win = osparc.ui.window.Window.popUpInWindow(orgEditor, title, 400, 250);
+ const orgEditor = new osparc.editor.OrganizationEditor(org);
+ const win = osparc.ui.window.Window.popUpInWindow(orgEditor, title, 400, 200);
orgEditor.addListener("updateOrg", () => {
this.__updateOrganization(win, orgEditor.getChildControl("save"), orgEditor);
});
diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js
index 705e943ef5a..c2f8656ed83 100644
--- a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js
+++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js
@@ -99,10 +99,9 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", {
allowGrowX: false
});
createOrgBtn.addListener("execute", function() {
- const newOrg = true;
- const orgEditor = new osparc.editor.OrganizationEditor(newOrg);
- const title = this.tr("Organization Details Editor");
- const win = osparc.ui.window.Window.popUpInWindow(orgEditor, title, 400, 250);
+ const title = this.tr("New Organization");
+ const orgEditor = new osparc.editor.OrganizationEditor();
+ const win = osparc.ui.window.Window.popUpInWindow(orgEditor, title, 400, 200);
orgEditor.addListener("createOrg", () => {
this.__createOrganization(win, orgEditor.getChildControl("create"), orgEditor);
});
@@ -176,7 +175,7 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", {
}
},
- reloadOrganizations: function() {
+ reloadOrganizations: function(orgId) {
this.__orgsUIList.resetSelection();
const orgsModel = this.__orgsModel;
orgsModel.removeAll();
@@ -199,6 +198,9 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", {
orgsList.sort(this.self().sortOrganizations);
orgsList.forEach(org => orgsModel.append(qx.data.marshal.Json.createModel(org)));
this.setOrganizationsLoaded(true);
+ if (orgId) {
+ this.fireDataEvent("organizationSelected", orgId);
+ }
});
},
@@ -208,16 +210,9 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", {
return;
}
- const newOrg = false;
- const orgEditor = new osparc.editor.OrganizationEditor(newOrg);
- org.bind("gid", orgEditor, "gid");
- org.bind("label", orgEditor, "label");
- org.bind("description", orgEditor, "description");
- org.bind("thumbnail", orgEditor, "thumbnail", {
- converter: val => val ? val : ""
- });
const title = this.tr("Organization Details Editor");
- const win = osparc.ui.window.Window.popUpInWindow(orgEditor, title, 400, 250);
+ const orgEditor = new osparc.editor.OrganizationEditor(org);
+ const win = osparc.ui.window.Window.popUpInWindow(orgEditor, title, 400, 200);
orgEditor.addListener("updateOrg", () => {
this.__updateOrganization(win, orgEditor.getChildControl("save"), orgEditor);
});
@@ -287,18 +282,20 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", {
}
};
osparc.data.Resources.fetch("organizations", "post", params)
- .then(() => {
+ .then(org => {
osparc.FlashMessenger.getInstance().logAs(name + this.tr(" successfully created"));
button.setFetching(false);
osparc.store.Store.getInstance().reset("organizations");
// reload "profile", "organizations" are part of the information in this endpoint
osparc.data.Resources.getOne("profile", {}, null, false)
.then(() => {
- this.reloadOrganizations();
+ // open it
+ this.reloadOrganizations(org["gid"]);
});
})
.catch(err => {
- osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong creating ") + name, "ERROR");
+ const errorMessage = err["message"] || this.tr("Something went wrong creating ") + name;
+ osparc.FlashMessenger.getInstance().logAs(errorMessage, "ERROR");
button.setFetching(false);
console.error(err);
})
diff --git a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TagsPage.js b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TagsPage.js
index 7265c65cebd..add2f2f3040 100644
--- a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TagsPage.js
+++ b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TagsPage.js
@@ -48,13 +48,10 @@ qx.Class.define("osparc.desktop.preferences.pages.TagsPage", {
icon: "@FontAwesome5Solid/plus/14"
});
osparc.utils.Utils.setIdToWidget(this.__addTagButton, "addTagBtn");
- osparc.data.Resources.get("tags")
- .then(tags => {
- this.__tagItems = tags.map(tag => new osparc.form.tag.TagItem().set({...tag}));
- this.__renderLayout();
- this.__attachEventHandlers();
- })
- .catch(err => console.error(err));
+ const tags = osparc.store.Tags.getInstance().getTags();
+ this.__tagItems = tags.map(tag => new osparc.form.tag.TagItem().set({tag}));
+ this.__renderLayout();
+ this.__attachEventHandlers();
},
__renderLayout: function() {
diff --git a/services/static-webserver/client/source/class/osparc/editor/OrganizationEditor.js b/services/static-webserver/client/source/class/osparc/editor/OrganizationEditor.js
index b817d11a1d0..b528e760c01 100644
--- a/services/static-webserver/client/source/class/osparc/editor/OrganizationEditor.js
+++ b/services/static-webserver/client/source/class/osparc/editor/OrganizationEditor.js
@@ -18,7 +18,7 @@
qx.Class.define("osparc.editor.OrganizationEditor", {
extend: qx.ui.core.Widget,
- construct: function(newOrg = true) {
+ construct: function(organization) {
this.base(arguments);
this._setLayout(new qx.ui.layout.VBox(8));
@@ -29,7 +29,32 @@ qx.Class.define("osparc.editor.OrganizationEditor", {
manager.add(title);
this.getChildControl("description");
this.getChildControl("thumbnail");
- newOrg ? this.getChildControl("create") : this.getChildControl("save");
+ organization ? this.getChildControl("save") : this.getChildControl("create");
+
+ if (organization) {
+ organization.bind("gid", this, "gid");
+ organization.bind("label", this, "label");
+ organization.bind("description", this, "description");
+ organization.bind("thumbnail", this, "thumbnail", {
+ converter: val => val ? val : ""
+ });
+ } else {
+ osparc.store.Store.getInstance().getGroupsOrganizations()
+ .then(orgs => {
+ const existingNames = orgs.map(org => org["label"]);
+ const defaultName = osparc.utils.Utils.getUniqueName("New Organization", existingNames)
+ title.setValue(defaultName);
+ })
+ .catch(err => {
+ console.error(err);
+ title.setValue("New Organization");
+ });
+ }
+
+ this.addListener("appear", () => {
+ title.focus();
+ title.activate();
+ });
},
properties: {
@@ -77,7 +102,7 @@ qx.Class.define("osparc.editor.OrganizationEditor", {
font: "text-14",
backgroundColor: "background-main",
placeholder: this.tr("Title"),
- height: 35
+ height: 30,
});
this.bind("label", control, "value");
control.bind("value", this, "label");
@@ -85,12 +110,10 @@ qx.Class.define("osparc.editor.OrganizationEditor", {
break;
}
case "description": {
- control = new qx.ui.form.TextArea().set({
+ control = new qx.ui.form.TextField().set({
font: "text-14",
placeholder: this.tr("Description"),
- autoSize: true,
- minHeight: 70,
- maxHeight: 140
+ height: 30,
});
this.bind("description", control, "value");
control.bind("value", this, "description");
@@ -101,7 +124,7 @@ qx.Class.define("osparc.editor.OrganizationEditor", {
control = new qx.ui.form.TextField().set({
font: "text-14",
placeholder: this.tr("Thumbnail"),
- height: 35
+ height: 30,
});
this.bind("thumbnail", control, "value");
control.bind("value", this, "thumbnail");
diff --git a/services/static-webserver/client/source/class/osparc/editor/WorkspaceEditor.js b/services/static-webserver/client/source/class/osparc/editor/WorkspaceEditor.js
index 6b89ee2af78..dab5a9807c3 100644
--- a/services/static-webserver/client/source/class/osparc/editor/WorkspaceEditor.js
+++ b/services/static-webserver/client/source/class/osparc/editor/WorkspaceEditor.js
@@ -33,20 +33,33 @@ qx.Class.define("osparc.editor.WorkspaceEditor", {
manager.add(title);
this.getChildControl("description");
this.getChildControl("thumbnail");
- workspace ? this.getChildControl("save") : this.getChildControl("create");
+ this.getChildControl("cancel");
+ this.getChildControl("save");
if (workspace) {
- this.__workspaceId = workspace.getWorkspaceId();
- this.set({
- label: workspace.getName(),
- description: workspace.getDescription(),
- thumbnail: workspace.getThumbnail(),
- });
+ // editing
+ this.setWorkspace(workspace);
+ } else {
+ // creating
+ this.__creatingWorkspace = true;
+ this.__createWorkspace()
+ .then(newWorkspace => {
+ this.setWorkspace(newWorkspace);
+ this.fireDataEvent("workspaceCreated");
+ this.getChildControl("sharing");
+ });
}
this.addListener("appear", this.__onAppear, this);
},
properties: {
+ workspace: {
+ check: "osparc.data.model.Workspace",
+ init: null,
+ nullable: false,
+ apply: "__applyWorkspace"
+ },
+
label: {
check: "String",
init: "",
@@ -70,13 +83,26 @@ qx.Class.define("osparc.editor.WorkspaceEditor", {
},
events: {
- "workspaceCreated": "qx.event.type.Data",
+ "workspaceCreated": "qx.event.type.Event",
+ "workspaceDeleted": "qx.event.type.Event",
"workspaceUpdated": "qx.event.type.Event",
+ "updateAccessRights": "qx.event.type.Event",
"cancel": "qx.event.type.Event"
},
+ statics: {
+ POS: {
+ INTRO: 0,
+ TITLE: 1,
+ DESCRIPTION: 2,
+ THUMBNAIL: 3,
+ SHARING: 4,
+ BUTTONS: 5,
+ }
+ },
+
members: {
- __workspaceId: null,
+ __creatingWorkspace: null,
_createChildControlImpl: function(id) {
let control;
@@ -89,7 +115,7 @@ qx.Class.define("osparc.editor.WorkspaceEditor", {
rich: true,
wrap: true
});
- this._add(control);
+ this._addAt(control, this.self().POS.INTRO);
break;
}
case "title": {
@@ -97,71 +123,64 @@ qx.Class.define("osparc.editor.WorkspaceEditor", {
font: "text-14",
backgroundColor: "background-main",
placeholder: this.tr("Title"),
- minHeight: 27
+ height: 30,
});
this.bind("label", control, "value");
control.bind("value", this, "label");
- this._add(control);
+ this._addAt(control, this.self().POS.TITLE);
break;
}
case "description": {
- control = new qx.ui.form.TextArea().set({
+ control = new qx.ui.form.TextField().set({
font: "text-14",
placeholder: this.tr("Description"),
- autoSize: true,
- minHeight: 70,
+ height: 30,
});
this.bind("description", control, "value");
control.bind("value", this, "description");
- this._add(control);
+ this._addAt(control, this.self().POS.DESCRIPTION);
break;
}
case "thumbnail": {
control = new qx.ui.form.TextField().set({
font: "text-14",
placeholder: this.tr("Thumbnail"),
+ height: 30,
});
this.bind("thumbnail", control, "value");
control.bind("value", this, "thumbnail");
- this._add(control);
+ this._addAt(control, this.self().POS.THUMBNAIL);
break;
}
- case "create": {
- const buttons = this.getChildControl("buttonsLayout");
- control = new osparc.ui.form.FetchButton(this.tr("Create")).set({
- appearance: "form-button"
- });
- control.addListener("execute", () => {
- if (this.__validator.validate()) {
- this.__createWorkspace(control);
- }
- }, this);
- buttons.addAt(control, 1);
+ case "sharing": {
+ control = new osparc.share.CollaboratorsWorkspace(this.getWorkspace());
+ control.addListener("updateAccessRights", () => this.fireDataEvent("updateAccessRights", this.getWorkspace().getWorkspaceId()), this);
+ this._addAt(control, this.self().POS.SHARING);
+ break;
+ }
+ case "buttons-layout": {
+ control = new qx.ui.container.Composite(new qx.ui.layout.HBox(8).set({
+ alignX: "right"
+ }));
+ this._addAt(control, this.self().POS.BUTTONS);
break;
}
case "save": {
- const buttons = this.getChildControl("buttonsLayout");
+ const buttons = this.getChildControl("buttons-layout");
control = new osparc.ui.form.FetchButton(this.tr("Save")).set({
appearance: "form-button"
});
- control.addListener("execute", () => {
- if (this.__validator.validate()) {
- this.__editWorkspace(control);
- }
- }, this);
+ control.addListener("execute", () => this.__saveWorkspace(control), this);
buttons.addAt(control, 1);
break;
}
- case "buttonsLayout": {
- control = new qx.ui.container.Composite(new qx.ui.layout.HBox(8).set({
- alignX: "right"
- }));
- const cancelButton = new qx.ui.form.Button(this.tr("Cancel")).set({
+ case "cancel": {
+ const buttons = this.getChildControl("buttons-layout");
+ control = new qx.ui.form.Button(this.tr("Cancel")).set({
appearance: "form-button-text"
});
- cancelButton.addListener("execute", () => this.fireEvent("cancel"), this);
- control.addAt(cancelButton, 0);
- this._add(control);
+ control.addListener("execute", () => this.cancel(), this);
+ buttons.addAt(control, 0);
break;
}
}
@@ -169,36 +188,55 @@ qx.Class.define("osparc.editor.WorkspaceEditor", {
return control || this.base(arguments, id);
},
- __createWorkspace: function(createButton) {
- createButton.setFetching(true);
+ __applyWorkspace: function(workspace) {
+ this.set({
+ label: workspace.getName(),
+ description: workspace.getDescription(),
+ thumbnail: workspace.getThumbnail(),
+ });
+ },
+
+ __createWorkspace: function() {
+ const workspaceStore = osparc.store.Workspaces.getInstance();
+ const workspaces = workspaceStore.getWorkspaces();
+ const existingNames = workspaces.map(workspace => workspace.getName());
+ const defaultName = osparc.utils.Utils.getUniqueName("New Workspace", existingNames)
const newWorkspaceData = {
- name: this.getLabel(),
+ name: this.getLabel() || defaultName,
description: this.getDescription(),
thumbnail: this.getThumbnail(),
};
- osparc.store.Workspaces.getInstance().postWorkspace(newWorkspaceData)
- .then(newWorkspace => this.fireDataEvent("workspaceCreated", newWorkspace))
- .catch(err => {
- console.error(err);
- osparc.FlashMessenger.logAs(err.message, "ERROR");
- })
- .finally(() => createButton.setFetching(false));
+ return workspaceStore.postWorkspace(newWorkspaceData)
},
- __editWorkspace: function(editButton) {
- editButton.setFetching(true);
- const updateData = {
- name: this.getLabel(),
- description: this.getDescription(),
- thumbnail: this.getThumbnail(),
- };
- osparc.store.Workspaces.getInstance().putWorkspace(this.__workspaceId, updateData)
- .then(() => this.fireEvent("workspaceUpdated"))
- .catch(err => {
- console.error(err);
- osparc.FlashMessenger.logAs(err.message, "ERROR");
- })
- .finally(() => editButton.setFetching(false));
+ __saveWorkspace: function(editButton) {
+ if (this.__validator.validate()) {
+ editButton.setFetching(true);
+ const updateData = {
+ name: this.getLabel(),
+ description: this.getDescription(),
+ thumbnail: this.getThumbnail(),
+ };
+ osparc.store.Workspaces.getInstance().putWorkspace(this.getWorkspace().getWorkspaceId(), updateData)
+ .then(() => this.fireEvent("workspaceUpdated"))
+ .catch(err => {
+ console.error(err);
+ osparc.FlashMessenger.logAs(err.message, "ERROR");
+ })
+ .finally(() => editButton.setFetching(false));
+ }
+ },
+
+ cancel: function() {
+ if (this.__creatingWorkspace) {
+ osparc.store.Workspaces.getInstance().deleteWorkspace(this.getWorkspace().getWorkspaceId())
+ .then(() => this.fireEvent("workspaceDeleted"))
+ .catch(err => {
+ console.error(err);
+ osparc.FlashMessenger.logAs(err.message, "ERROR");
+ });
+ }
+ this.fireEvent("cancel");
},
__onAppear: function() {
diff --git a/services/static-webserver/client/source/class/osparc/filter/UserTagsFilter.js b/services/static-webserver/client/source/class/osparc/filter/UserTagsFilter.js
index c0a74265e01..caf5914e5d3 100644
--- a/services/static-webserver/client/source/class/osparc/filter/UserTagsFilter.js
+++ b/services/static-webserver/client/source/class/osparc/filter/UserTagsFilter.js
@@ -18,11 +18,11 @@ qx.Class.define("osparc.filter.UserTagsFilter", {
},
members: {
__buildMenu: function() {
- osparc.store.Store.getInstance().getTags()
+ osparc.store.Tags.getInstance().getTags()
.forEach(tag => {
- const menuButton = this._addOption(tag.name);
+ const menuButton = this._addOption(tag.getName());
menuButton.setIcon("@FontAwesome5Solid/square/12");
- menuButton.getChildControl("icon").setTextColor(tag.color);
+ menuButton.getChildControl("icon").setTextColor(tag.getColor());
});
},
__attachEventListeners: function(filterId, filterGroupId) {
diff --git a/services/static-webserver/client/source/class/osparc/form/tag/TagItem.js b/services/static-webserver/client/source/class/osparc/form/tag/TagItem.js
index 7e79bb54bf3..77282a5db7f 100644
--- a/services/static-webserver/client/source/class/osparc/form/tag/TagItem.js
+++ b/services/static-webserver/client/source/class/osparc/form/tag/TagItem.js
@@ -26,37 +26,51 @@ qx.Class.define("osparc.form.tag.TagItem", {
},
properties: {
+ tag: {
+ check: "osparc.data.model.Tag",
+ nullable: false,
+ init: null,
+ event: "changeTag",
+ apply: "__applyTag",
+ },
+
id: {
check: "Integer"
},
+
name: {
check: "String",
event: "changeName",
init: ""
},
+
description: {
check: "String",
nullable: true,
event: "changeDescription",
init: ""
},
+
color: {
check: "Color",
event: "changeColor",
init: "#303030"
},
+
accessRights: {
check: "Object",
nullable: false,
+ event: "changeAccessRights",
apply: "__renderLayout",
- event: "changeAccessRights"
},
+
mode: {
check: "String",
init: "display",
nullable: false,
apply: "_applyMode"
},
+
appearance: {
init: "tagitem",
refine: true
@@ -78,57 +92,7 @@ qx.Class.define("osparc.form.tag.TagItem", {
__colorButton: null,
__loadingIcon: null,
__validationManager: null,
- /**
- * Renders this tag item from scratch.
- */
- __renderLayout: function() {
- this._removeAll();
- if (this.getMode() === this.self().modes.EDIT) {
- this.__renderEditMode();
- } else if (this.getMode() === this.self().modes.DISPLAY) {
- this.__renderDisplayMode();
- }
- },
- __renderEditMode: function() {
- const nameContainer = new qx.ui.container.Composite(new qx.ui.layout.VBox()).set({
- width: 90
- });
- nameContainer.add(new qx.ui.basic.Label(this.tr("Name")).set({
- buddy: this.getChildControl("nameinput")
- }));
- nameContainer.add(this.getChildControl("nameinput").set({
- value: this.getName()
- }));
- this._add(nameContainer);
- const descInputContainer = new qx.ui.container.Composite(new qx.ui.layout.VBox());
- descInputContainer.add(new qx.ui.basic.Label(this.tr("Description")).set({
- buddy: this.getChildControl("descriptioninput")
- }));
- descInputContainer.add(this.getChildControl("descriptioninput").set({
- value: this.getDescription()
- }));
- this._add(descInputContainer, {
- flex: 1
- });
- this._add(this.__colorPicker());
- this._add(this.__tagItemEditButtons());
- },
- __renderDisplayMode: function() {
- const tagContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox()).set({
- width: 100
- });
- tagContainer.add(this.getChildControl("tag"));
- this._add(tagContainer);
- const descriptionContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox());
- descriptionContainer.add(this.getChildControl("description"), {
- width: "100%"
- });
- this._add(descriptionContainer, {
- flex: 1
- });
- this._add(this.__tagItemButtons());
- this.resetBackgroundColor();
- },
+
_createChildControlImpl: function(id) {
let control;
switch (id) {
@@ -151,7 +115,7 @@ qx.Class.define("osparc.form.tag.TagItem", {
}
control = this.__description;
break;
- case "nameinput":
+ case "name-input":
// Tag name input in edit mode
if (this.__nameInput === null) {
this.__nameInput = new qx.ui.form.TextField().set({
@@ -162,7 +126,7 @@ qx.Class.define("osparc.form.tag.TagItem", {
}
control = this.__nameInput;
break;
- case "descriptioninput":
+ case "description-input":
// Tag description input in edit mode
if (this.__descriptionInput === null) {
this.__descriptionInput = new qx.ui.form.TextArea().set({
@@ -172,7 +136,7 @@ qx.Class.define("osparc.form.tag.TagItem", {
}
control = this.__descriptionInput;
break;
- case "colorinput":
+ case "color-input":
// Color input in edit mode
if (this.__colorInput === null) {
this.__colorInput = new qx.ui.form.TextField().set({
@@ -180,20 +144,20 @@ qx.Class.define("osparc.form.tag.TagItem", {
width: 60,
required: true
});
- this.__colorInput.bind("value", this.getChildControl("colorbutton"), "backgroundColor");
- this.__colorInput.bind("value", this.getChildControl("colorbutton"), "textColor", {
+ this.__colorInput.bind("value", this.getChildControl("color-button"), "backgroundColor");
+ this.__colorInput.bind("value", this.getChildControl("color-button"), "textColor", {
converter: value => osparc.utils.Utils.getContrastedBinaryColor(value)
});
this.__validationManager.add(this.__colorInput, osparc.utils.Validators.hexColor);
}
control = this.__colorInput;
break;
- case "colorbutton":
+ case "color-button":
// Random color generator button in edit mode
if (this.__colorButton === null) {
this.__colorButton = new qx.ui.form.Button(null, "@FontAwesome5Solid/sync-alt/12");
this.__colorButton.addListener("execute", () => {
- this.getChildControl("colorinput").setValue(osparc.utils.Utils.getRandomColor());
+ this.getChildControl("color-input").setValue(osparc.utils.Utils.getRandomColor());
}, this);
}
control = this.__colorButton;
@@ -201,6 +165,69 @@ qx.Class.define("osparc.form.tag.TagItem", {
}
return control || this.base(arguments, id);
},
+
+ __applyTag: function(tag) {
+ tag.bind("tagId", this, "id");
+ tag.bind("name", this, "name");
+ tag.bind("description", this, "description");
+ tag.bind("color", this, "color");
+ tag.bind("accessRights", this, "accessRights");
+ },
+
+ /**
+ * Renders this tag item from scratch.
+ */
+ __renderLayout: function() {
+ this._removeAll();
+ if (this.getMode() === this.self().modes.EDIT) {
+ this.__renderEditMode();
+ } else if (this.getMode() === this.self().modes.DISPLAY) {
+ this.__renderDisplayMode();
+ }
+ },
+
+ __renderEditMode: function() {
+ const nameContainer = new qx.ui.container.Composite(new qx.ui.layout.VBox()).set({
+ width: 90
+ });
+ nameContainer.add(new qx.ui.basic.Label(this.tr("Name")).set({
+ buddy: this.getChildControl("name-input")
+ }));
+ nameContainer.add(this.getChildControl("name-input").set({
+ value: this.getName()
+ }));
+ this._add(nameContainer);
+ const descInputContainer = new qx.ui.container.Composite(new qx.ui.layout.VBox());
+ descInputContainer.add(new qx.ui.basic.Label(this.tr("Description")).set({
+ buddy: this.getChildControl("description-input")
+ }));
+ descInputContainer.add(this.getChildControl("description-input").set({
+ value: this.getDescription()
+ }));
+ this._add(descInputContainer, {
+ flex: 1
+ });
+ this._add(this.__colorPicker());
+ this._add(this.__tagItemEditButtons());
+ },
+
+ __renderDisplayMode: function() {
+ const tagContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox()).set({
+ width: 100
+ });
+ tagContainer.add(this.getChildControl("tag"));
+ this._add(tagContainer);
+ const descriptionContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox());
+ descriptionContainer.add(this.getChildControl("description"), {
+ width: "100%"
+ });
+ this._add(descriptionContainer, {
+ flex: 1
+ });
+ this._add(this.__tagItemButtons());
+ this.resetBackgroundColor();
+ },
+
/**
* Generates and returns the buttons for deleting and editing an existing label (display mode)
*/
@@ -224,12 +251,7 @@ qx.Class.define("osparc.form.tag.TagItem", {
editButton.addListener("execute", () => this.setMode(this.self().modes.EDIT), this);
deleteButton.addListener("execute", () => {
deleteButton.setFetching(true);
- const params = {
- url: {
- tagId: this.getId()
- }
- };
- osparc.data.Resources.fetch("tags", "delete", params)
+ osparc.store.Tags.getInstance().deleteTag(this.getId())
.then(() => this.fireEvent("deleteTag"))
.catch(console.error)
.finally(() => deleteButton.setFetching(false));
@@ -256,21 +278,15 @@ qx.Class.define("osparc.form.tag.TagItem", {
saveButton.addListener("execute", () => {
if (this.__validationManager.validate()) {
const data = this.__serializeData();
- const params = {
- data
- };
saveButton.setFetching(true);
let fetch;
if (this.isPropertyInitialized("id")) {
- params.url = {
- tagId: this.getId()
- };
- fetch = osparc.data.Resources.fetch("tags", "put", params);
+ fetch = osparc.store.Tags.getInstance().putTag(this.getId(), data);
} else {
- fetch = osparc.data.Resources.fetch("tags", "post", params);
+ fetch = osparc.store.Tags.getInstance().postTag(data);
}
fetch
- .then(tag => this.set(tag))
+ .then(tag => this.setTag(tag))
.catch(console.error)
.finally(() => {
this.fireEvent("tagSaved");
@@ -295,24 +311,27 @@ qx.Class.define("osparc.form.tag.TagItem", {
__colorPicker: function() {
const container = new qx.ui.container.Composite(new qx.ui.layout.VBox());
container.add(new qx.ui.basic.Label(this.tr("Color")).set({
- buddy: this.getChildControl("colorinput")
+ buddy: this.getChildControl("color-input")
}));
const innerContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox());
- const refreshButton = this.getChildControl("colorbutton");
- const colorInput = this.getChildControl("colorinput");
+ const refreshButton = this.getChildControl("color-button");
+ const colorInput = this.getChildControl("color-input");
innerContainer.add(refreshButton);
innerContainer.add(colorInput);
container.add(innerContainer);
return container;
},
/**
- * Creates an object containing the udpated tag info
+ * Creates an object containing the updated tag info
*/
__serializeData: function() {
+ const name = this.getChildControl("name-input").getValue();
+ const description = this.getChildControl("description-input").getValue();
+ const color = this.getChildControl("color-input").getValue();
return {
- name: this.getChildControl("nameinput").getValue().trim(),
- description: this.getChildControl("descriptioninput").getValue().trim(),
- color: this.getChildControl("colorinput").getValue()
+ name: name.trim(),
+ description: description ? description.trim() : "",
+ color: color
};
},
_applyMode: function() {
diff --git a/services/static-webserver/client/source/class/osparc/form/tag/TagManager.js b/services/static-webserver/client/source/class/osparc/form/tag/TagManager.js
index ae3ef918adb..6f704c1f222 100644
--- a/services/static-webserver/client/source/class/osparc/form/tag/TagManager.js
+++ b/services/static-webserver/client/source/class/osparc/form/tag/TagManager.js
@@ -88,8 +88,8 @@ qx.Class.define("osparc.form.tag.TagManager", {
newItem.addListener("tagSaved", () => this.__repopulateTags(), this);
newItem.addListener("cancelNewTag", e => tagsContainer.remove(e.getTarget()), this);
newItem.addListener("deleteTag", e => tagsContainer.remove(e.getTarget()), this);
- this.__repopulateTags();
tagsContainer.add(newItem);
+ this.__repopulateTags();
});
this._add(addTagButton);
@@ -119,25 +119,26 @@ qx.Class.define("osparc.form.tag.TagManager", {
__repopulateTags: function() {
this.__tagsContainer.removeAll();
- const tags = osparc.store.Store.getInstance().getTags();
+ const tags = osparc.store.Tags.getInstance().getTags();
tags.forEach(tag => this.__tagsContainer.add(this.__tagButton(tag)));
},
__tagButton: function(tag) {
- const tagButton = new osparc.form.tag.TagToggleButton(tag, this.__selectedTags.includes(tag.id));
+ const tagId = tag.getTagId();
+ const tagButton = new osparc.form.tag.TagToggleButton(tag, this.__selectedTags.includes(tagId));
tagButton.addListener("changeValue", evt => {
const selected = evt.getData();
if (this.isLiveUpdate()) {
tagButton.setFetching(true);
if (selected) {
- this.__saveAddTag(tag.id, tagButton);
+ this.__saveAddTag(tagId, tagButton);
} else {
- this.__saveRemoveTag(tag.id, tagButton);
+ this.__saveRemoveTag(tagId, tagButton);
}
} else if (selected) {
- this.__selectedTags.push(tag.id);
+ this.__selectedTags.push(tagId);
} else {
- this.__selectedTags.remove(tag.id);
+ this.__selectedTags.remove(tagId);
}
}, this);
tagButton.subscribeToFilterGroup("studyBrowserTagManager");
diff --git a/services/static-webserver/client/source/class/osparc/form/tag/TagToggleButton.js b/services/static-webserver/client/source/class/osparc/form/tag/TagToggleButton.js
index 3075d738cf3..35feee0c3bc 100644
--- a/services/static-webserver/client/source/class/osparc/form/tag/TagToggleButton.js
+++ b/services/static-webserver/client/source/class/osparc/form/tag/TagToggleButton.js
@@ -23,11 +23,11 @@ qx.Class.define("osparc.form.tag.TagToggleButton", {
appearance: "tagbutton"
});
this.setIcon("@FontAwesome5Solid/square/14");
- this.getChildControl("icon").setTextColor(tag.color);
- if (tag.description) {
- this.setLabel(tag.name + " : " + tag.description);
+ this.getChildControl("icon").setTextColor(tag.getColor());
+ if (tag.getDescription()) {
+ this.setLabel(tag.getName() + " : " + tag.getDescription());
} else {
- this.setLabel(tag.name);
+ this.setLabel(tag.getName());
}
this.getChildControl("check");
diff --git a/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js b/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js
index a95b78be639..217c13e58e9 100644
--- a/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js
+++ b/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js
@@ -20,14 +20,14 @@ qx.Class.define("osparc.info.ServiceLarge", {
extend: osparc.info.CardLarge,
/**
- * @param serviceData {Object} Serialized Service Object
+ * @param metadata {Object} Serialized Service Object
* @param instance {Object} instance related data
* @param openOptions {Boolean} open edit options in new window or fire event
*/
- construct: function(serviceData, instance = null, openOptions = true) {
+ construct: function(metadata, instance = null, openOptions = true) {
this.base(arguments);
- this.setService(serviceData);
+ this.setService(metadata);
if (instance) {
if ("nodeId" in instance) {
@@ -79,6 +79,19 @@ qx.Class.define("osparc.info.ServiceLarge", {
}
},
+ statics: {
+ popUpInWindow: function(serviceLarge) {
+ const metadata = serviceLarge.getService();
+ const versionDisplay = osparc.service.Utils.extractVersionDisplay(metadata);
+ const title = `${metadata["name"]} ${versionDisplay}`;
+ const width = osparc.info.CardLarge.WIDTH;
+ const height = osparc.info.CardLarge.HEIGHT;
+ osparc.ui.window.Window.popUpInWindow(serviceLarge, title, width, height).set({
+ maxHeight: height
+ });
+ },
+ },
+
members: {
_rebuildLayout: function() {
this._removeAll();
@@ -90,72 +103,85 @@ qx.Class.define("osparc.info.ServiceLarge", {
vBox.add(deprecated);
}
- const title = this.__createTitle();
- const titleLayout = this.__createViewWithEdit(title, this.__openTitleEditor);
-
- const extraInfo = this.__extraInfo();
- const extraInfoLayout = this.__createExtraInfo(extraInfo);
-
- const bounds = this.getBounds();
- const offset = 30;
- const maxThumbnailHeight = extraInfo.length*20;
- let widgetWidth = bounds ? bounds.width - offset : 500 - offset;
- let thumbnailWidth = widgetWidth - 2 * osparc.info.CardLarge.PADDING - osparc.info.CardLarge.EXTRA_INFO_WIDTH;
- thumbnailWidth = Math.min(thumbnailWidth - 20, osparc.info.CardLarge.THUMBNAIL_MAX_WIDTH);
- const thumbnail = this.__createThumbnail(thumbnailWidth, maxThumbnailHeight);
- const thumbnailLayout = this.__createViewWithEdit(thumbnail, this.__openThumbnailEditor);
- thumbnailLayout.getLayout().set({
- alignX: "center"
- });
-
- const infoAndThumbnail = new qx.ui.container.Composite(new qx.ui.layout.HBox(3).set({
- alignX: "center"
- }));
- infoAndThumbnail.add(extraInfoLayout);
- infoAndThumbnail.add(thumbnailLayout, {
- flex: 1
- });
-
- let descriptionUi = null;
- if (osparc.service.Utils.canIWrite(this.getService()["accessRights"])) {
- descriptionUi = this.__createDescriptionUi();
- }
-
const description = this.__createDescription();
const editInTitle = this.__createViewWithEdit(description.getChildren()[0], this.__openDescriptionEditor);
description.addAt(editInTitle, 0);
- let resources = null;
- if (!osparc.desktop.credits.Utils.areWalletsEnabled()) {
- resources = this.__createResources();
- }
-
const copyMetadataButton = new qx.ui.form.Button(this.tr("Copy Raw metadata"), "@FontAwesome5Solid/copy/12").set({
allowGrowX: false
});
copyMetadataButton.addListener("execute", () => osparc.utils.Utils.copyTextToClipboard(osparc.utils.Utils.prettifyJson(this.getService())), this);
-
if (
this.getService()["descriptionUi"] &&
!osparc.service.Utils.canIWrite(this.getService()["accessRights"]) &&
description.getChildren().length > 1
) {
- // Show description only
- vBox.add(description.getChildren()[1]);
+ // Show also the copy Id buttons too
+ const buttonsLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(10));
+ if (this.getNodeId()) {
+ const studyAlias = osparc.product.Utils.getStudyAlias({firstUpperCase: true});
+ const copyStudyIdButton = new qx.ui.form.Button(this.tr(`Copy ${studyAlias} Id`), "@FontAwesome5Solid/copy/12").set({
+ toolTipText: qx.locale.Manager.tr("Copy to clipboard"),
+ });
+ copyStudyIdButton.addListener("execute", this.__copyStudyIdToClipboard, this);
+ buttonsLayout.add(copyStudyIdButton);
+ vBox.add(buttonsLayout);
+
+ const copyNodeIdButton = new qx.ui.form.Button(this.tr("Copy Service Id"), "@FontAwesome5Solid/copy/12").set({
+ toolTipText: qx.locale.Manager.tr("Copy to clipboard"),
+ });
+ copyNodeIdButton.addListener("execute", this.__copyNodeIdToClipboard, this);
+ buttonsLayout.add(copyNodeIdButton);
+ vBox.add(buttonsLayout);
+ }
+ // Also copyMetadataButton if tester
if (osparc.data.Permissions.getInstance().isTester()) {
- // Also copyMetadataButton if tester
- vBox.add(copyMetadataButton);
+ buttonsLayout.add(copyMetadataButton);
+ vBox.add(buttonsLayout);
}
+ // Show description only
+ vBox.add(description.getChildren()[1]);
} else {
+ const title = this.__createTitle();
+ const titleLayout = this.__createViewWithEdit(title, this.__openTitleEditor);
vBox.add(titleLayout);
+
+ const extraInfo = this.__extraInfo();
+ const extraInfoLayout = this.__createExtraInfo(extraInfo);
+ const bounds = this.getBounds();
+ const offset = 30;
+ const maxThumbnailHeight = extraInfo.length*20;
+ let widgetWidth = bounds ? bounds.width - offset : 500 - offset;
+ let thumbnailWidth = widgetWidth - 2 * osparc.info.CardLarge.PADDING - osparc.info.CardLarge.EXTRA_INFO_WIDTH;
+ thumbnailWidth = Math.min(thumbnailWidth - 20, osparc.info.CardLarge.THUMBNAIL_MAX_WIDTH);
+ const thumbnail = this.__createThumbnail(thumbnailWidth, maxThumbnailHeight);
+ const thumbnailLayout = this.__createViewWithEdit(thumbnail, this.__openThumbnailEditor);
+ thumbnailLayout.getLayout().set({
+ alignX: "center"
+ });
+ const infoAndThumbnail = new qx.ui.container.Composite(new qx.ui.layout.HBox(3).set({
+ alignX: "center"
+ }));
+ infoAndThumbnail.add(extraInfoLayout);
+ infoAndThumbnail.add(thumbnailLayout, {
+ flex: 1
+ });
vBox.add(infoAndThumbnail);
- if (descriptionUi) {
- vBox.add(descriptionUi);
+
+ if (osparc.service.Utils.canIWrite(this.getService()["accessRights"])) {
+ const descriptionUi = this.__createDescriptionUi();
+ if (descriptionUi) {
+ vBox.add(descriptionUi);
+ }
}
vBox.add(description);
- if (resources) {
- vBox.add(resources);
+
+ if (!osparc.desktop.credits.Utils.areWalletsEnabled()) {
+ const resources = this.__createResources();
+ if (resources) {
+ vBox.add(resources);
+ }
}
vBox.add(copyMetadataButton);
}
@@ -429,6 +455,10 @@ qx.Class.define("osparc.info.ServiceLarge", {
titleEditor.open();
},
+ __copyStudyIdToClipboard: function() {
+ osparc.utils.Utils.copyTextToClipboard(this.getStudyId());
+ },
+
__copyNodeIdToClipboard: function() {
osparc.utils.Utils.copyTextToClipboard(this.getNodeId());
},
diff --git a/services/static-webserver/client/source/class/osparc/info/StudyLarge.js b/services/static-webserver/client/source/class/osparc/info/StudyLarge.js
index 3351ed0fc96..5709bfd70a2 100644
--- a/services/static-webserver/client/source/class/osparc/info/StudyLarge.js
+++ b/services/static-webserver/client/source/class/osparc/info/StudyLarge.js
@@ -324,6 +324,10 @@ qx.Class.define("osparc.info.StudyLarge", {
studyData["resourceType"] = this.__isTemplate ? "template" : "study";
this.fireDataEvent("updateStudy", studyData);
qx.event.message.Bus.getInstance().dispatchByName("updateStudy", studyData);
+ if (this.__isTemplate) {
+ // reload templates
+ osparc.data.Resources.get("templates", {}, false)
+ }
})
.catch(err => {
console.error(err);
diff --git a/services/static-webserver/client/source/class/osparc/info/StudyUtils.js b/services/static-webserver/client/source/class/osparc/info/StudyUtils.js
index 95ea7f20b7f..f1d2c3449e5 100644
--- a/services/static-webserver/client/source/class/osparc/info/StudyUtils.js
+++ b/services/static-webserver/client/source/class/osparc/info/StudyUtils.js
@@ -211,12 +211,12 @@ qx.Class.define("osparc.info.StudyUtils", {
tagsContainer.removeAll();
const noTagsLabel = new qx.ui.basic.Label(qx.locale.Manager.tr("Add tags"));
tagsContainer.add(noTagsLabel);
- osparc.store.Store.getInstance().getTags().filter(tag => model.getTags().includes(tag.id))
+ osparc.store.Tags.getInstance().getTags().filter(tag => model.getTags().includes(tag.getTagId()))
.forEach(selectedTag => {
if (tagsContainer.indexOf(noTagsLabel) > -1) {
tagsContainer.remove(noTagsLabel);
}
- tagsContainer.add(new osparc.ui.basic.Tag(selectedTag.name, selectedTag.color));
+ tagsContainer.add(new osparc.ui.basic.Tag(selectedTag));
});
};
study.addListener("changeTags", () => addTags(study), this);
diff --git a/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudy.js b/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudy.js
index b51dc1c7515..eae2df3f1b9 100644
--- a/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudy.js
+++ b/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudy.js
@@ -147,12 +147,7 @@ qx.Class.define("osparc.metadata.ServicesInStudy", {
studyId: this._studyData["uuid"],
label: node["label"]
});
- const title = this.tr("Service information");
- const width = osparc.info.CardLarge.WIDTH;
- const height = osparc.info.CardLarge.HEIGHT;
- osparc.ui.window.Window.popUpInWindow(serviceDetails, title, width, height).set({
- maxHeight: height
- });
+ osparc.info.ServiceLarge.popUpInWindow(serviceDetails);
}, this);
this._servicesGrid.add(infoButton, {
row: i,
diff --git a/services/static-webserver/client/source/class/osparc/node/BootOptionsView.js b/services/static-webserver/client/source/class/osparc/node/BootOptionsView.js
index a6c38981dff..291c028422d 100644
--- a/services/static-webserver/client/source/class/osparc/node/BootOptionsView.js
+++ b/services/static-webserver/client/source/class/osparc/node/BootOptionsView.js
@@ -18,10 +18,6 @@
qx.Class.define("osparc.node.BootOptionsView", {
extend: osparc.node.ServiceOptionsView,
- events: {
- "bootModeChanged": "qx.event.type.Event"
- },
-
members: {
_applyNode: function(node) {
if (node.hasBootModes()) {
@@ -61,7 +57,6 @@ qx.Class.define("osparc.node.BootOptionsView", {
setTimeout(() => {
buttonsLayout.setEnabled(true);
node.requestStartNode();
- this.fireEvent("bootModeChanged");
}, osparc.desktop.StudyEditor.AUTO_SAVE_INTERVAL);
}
}, this);
diff --git a/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js b/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js
index 2cdfb2c1f74..5f810b18799 100644
--- a/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js
+++ b/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js
@@ -18,10 +18,6 @@
qx.Class.define("osparc.node.LifeCycleView", {
extend: osparc.node.ServiceOptionsView,
- events: {
- "versionChanged": "qx.event.type.Event"
- },
-
members: {
_applyNode: function(node) {
if (node.isUpdatable() || node.isDeprecated() || node.isRetired()) {
@@ -125,7 +121,6 @@ qx.Class.define("osparc.node.LifeCycleView", {
setTimeout(() => {
updateButton.setFetching(false);
node.requestStartNode();
- this.fireEvent("versionChanged");
}, osparc.desktop.StudyEditor.AUTO_SAVE_INTERVAL);
});
diff --git a/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js b/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js
index 34dfc397b37..ffa1431a00e 100644
--- a/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js
+++ b/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js
@@ -105,7 +105,7 @@ qx.Class.define("osparc.node.TierSelectionView", {
if (selection.length) {
tierBox.setEnabled(false);
const selectedUnitId = selection[0].getModel();
- osparc.study.NodePricingUnits.pricingUnitSelected(studyId, nodeId, pricingPlans["pricingPlanId"], selectedUnitId)
+ osparc.study.NodePricingUnits.patchPricingUnitSelection(studyId, nodeId, pricingPlans["pricingPlanId"], selectedUnitId)
.finally(() => {
tierBox.setEnabled(true);
showSelectedTier(selectedUnitId);
diff --git a/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js b/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js
index f6770a7e675..3c75815c296 100644
--- a/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js
+++ b/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js
@@ -18,10 +18,6 @@
qx.Class.define("osparc.node.UpdateResourceLimitsView", {
extend: osparc.node.ServiceOptionsView,
- events: {
- "limitsChanged": "qx.event.type.Event"
- },
-
members: {
__resourceFields: null,
__saveBtn: null,
@@ -159,7 +155,6 @@ qx.Class.define("osparc.node.UpdateResourceLimitsView", {
osparc.data.Resources.fetch("nodesInStudyResources", "put", params)
.then(() => {
osparc.FlashMessenger.getInstance().logAs(this.tr("Limits successfully updated"));
- this.fireEvent("limitsChanged");
})
.catch(err => {
console.error(err);
diff --git a/services/static-webserver/client/source/class/osparc/node/slideshow/BaseNodeView.js b/services/static-webserver/client/source/class/osparc/node/slideshow/BaseNodeView.js
index e7de026cd94..a2ee4daab00 100644
--- a/services/static-webserver/client/source/class/osparc/node/slideshow/BaseNodeView.js
+++ b/services/static-webserver/client/source/class/osparc/node/slideshow/BaseNodeView.js
@@ -217,17 +217,13 @@ qx.Class.define("osparc.node.slideshow.BaseNodeView", {
__openServiceDetails: function() {
const node = this.getNode();
- const serviceDetails = new osparc.info.ServiceLarge(node.getMetaData(), {
+ const metadata = node.getMetaData();
+ const serviceDetails = new osparc.info.ServiceLarge(metadata, {
nodeId: node.getNodeId(),
label: node.getLabel(),
studyId: node.getStudy().getUuid()
});
- const title = this.tr("Service information");
- const width = osparc.info.CardLarge.WIDTH;
- const height = osparc.info.CardLarge.HEIGHT;
- osparc.ui.window.Window.popUpInWindow(serviceDetails, title, width, height).set({
- maxHeight: height
- });
+ osparc.info.ServiceLarge.popUpInWindow(serviceDetails);
},
__openInstructions: function() {
diff --git a/services/static-webserver/client/source/class/osparc/notification/NotificationUI.js b/services/static-webserver/client/source/class/osparc/notification/NotificationUI.js
index da49db7f0a4..67194c84418 100644
--- a/services/static-webserver/client/source/class/osparc/notification/NotificationUI.js
+++ b/services/static-webserver/client/source/class/osparc/notification/NotificationUI.js
@@ -22,6 +22,7 @@ qx.Class.define("osparc.notification.NotificationUI", {
this.base(arguments);
this.set({
+ margin: 4,
maxWidth: this.self().MAX_WIDTH,
padding: this.self().PADDING,
cursor: "pointer"
@@ -216,9 +217,14 @@ qx.Class.define("osparc.notification.NotificationUI", {
}
});
- notification.bind("read", this, "backgroundColor", {
- converter: read => read ? "background-main-3" : "background-main-4"
- });
+ const highlight = mouseOn => {
+ this.set({
+ backgroundColor: mouseOn ? "strong-main" : "transparent"
+ })
+ };
+ this.addListener("mouseover", () => highlight(true));
+ this.addListener("mouseout", () => highlight(false));
+ highlight(false);
},
__notificationTapped: function() {
diff --git a/services/static-webserver/client/source/class/osparc/notification/NotificationsContainer.js b/services/static-webserver/client/source/class/osparc/notification/NotificationsContainer.js
index 34757474f64..c59a8a94a4c 100644
--- a/services/static-webserver/client/source/class/osparc/notification/NotificationsContainer.js
+++ b/services/static-webserver/client/source/class/osparc/notification/NotificationsContainer.js
@@ -27,9 +27,14 @@ qx.Class.define("osparc.notification.NotificationsContainer", {
zIndex: osparc.utils.Utils.FLOATING_Z_INDEX,
maxWidth: osparc.notification.NotificationUI.MAX_WIDTH,
maxHeight: 250,
- backgroundColor: "background-main-3",
+ backgroundColor: "background-main",
decorator: "rounded",
});
+ let color = qx.theme.manager.Color.getInstance().resolve("text");
+ color = qx.util.ColorUtil.stringToRgb(color);
+ color.push(0.3); // add transparency
+ color = qx.util.ColorUtil.rgbToRgbString(color);
+ osparc.utils.Utils.addBorder(this, 1, color);
osparc.utils.Utils.setIdToWidget(this, "notificationsContainer");
const root = qx.core.Init.getApplication().getRoot();
diff --git a/services/static-webserver/client/source/class/osparc/notification/RibbonNotifications.js b/services/static-webserver/client/source/class/osparc/notification/RibbonNotifications.js
index 1cbe3b5f7ea..b2ea90b2b8e 100644
--- a/services/static-webserver/client/source/class/osparc/notification/RibbonNotifications.js
+++ b/services/static-webserver/client/source/class/osparc/notification/RibbonNotifications.js
@@ -97,15 +97,14 @@ qx.Class.define("osparc.notification.RibbonNotifications", {
if (notification.getType() === "announcement") {
const dontShowButton = new qx.ui.form.Button(this.tr("Don't show again")).set({
- backgroundColor: "transparent",
- textColor: "strong-text",
+ appearance: "strong-button",
alignY: "middle",
padding: 4,
allowGrowX: false,
allowGrowY: false,
marginLeft: 15
});
- osparc.utils.Utils.addBorder(dontShowButton, 1, qx.theme.manager.Color.getInstance().resolve("strong-text"));
+ osparc.utils.Utils.addBorder(dontShowButton, 1, qx.theme.manager.Color.getInstance().resolve("text"));
dontShowButton.addListener("tap", () => {
this.removeNotification(notification);
osparc.utils.Utils.localCache.setDontShowAnnouncement(notification.announcementId);
diff --git a/services/static-webserver/client/source/class/osparc/service/ServiceListItem.js b/services/static-webserver/client/source/class/osparc/service/ServiceListItem.js
index 959859389ac..c970c2df3a9 100644
--- a/services/static-webserver/client/source/class/osparc/service/ServiceListItem.js
+++ b/services/static-webserver/client/source/class/osparc/service/ServiceListItem.js
@@ -161,12 +161,7 @@ qx.Class.define("osparc.service.ServiceListItem", {
osparc.store.Services.getService(key, version)
.then(serviceMetadata => {
const serviceDetails = new osparc.info.ServiceLarge(serviceMetadata);
- const title = this.tr("Service information");
- const width = osparc.info.CardLarge.WIDTH;
- const height = osparc.info.CardLarge.HEIGHT;
- osparc.ui.window.Window.popUpInWindow(serviceDetails, title, width, height).set({
- maxHeight: height
- });
+ osparc.info.ServiceLarge.popUpInWindow(serviceDetails);
});
},
diff --git a/services/static-webserver/client/source/class/osparc/store/Folders.js b/services/static-webserver/client/source/class/osparc/store/Folders.js
index 16385de935c..d6e83d8fb23 100644
--- a/services/static-webserver/client/source/class/osparc/store/Folders.js
+++ b/services/static-webserver/client/source/class/osparc/store/Folders.js
@@ -31,6 +31,17 @@ qx.Class.define("osparc.store.Folders", {
"folderMoved": "qx.event.type.Data",
},
+ statics: {
+ curateOrderBy: function(orderBy) {
+ const curatedOrderBy = osparc.utils.Utils.deepCloneObject(orderBy);
+ if (curatedOrderBy.field !== "name") {
+ // only "modified_at" and "name" supported
+ curatedOrderBy.field = "modified_at";
+ }
+ return curatedOrderBy;
+ },
+ },
+
members: {
foldersCached: null,
@@ -40,7 +51,7 @@ qx.Class.define("osparc.store.Folders", {
orderBy = {
field: "modified_at",
direction: "desc"
- }
+ },
) {
if (osparc.auth.Data.getInstance().isGuest()) {
return new Promise(resolve => {
@@ -48,12 +59,7 @@ qx.Class.define("osparc.store.Folders", {
});
}
- const curatedOrderBy = osparc.utils.Utils.deepCloneObject(orderBy);
- if (curatedOrderBy.field !== "name") {
- // only "modified_at" and "name" supported
- curatedOrderBy.field = "modified_at";
- }
-
+ const curatedOrderBy = this.self().curateOrderBy(orderBy);
const params = {
url: {
workspaceId,
@@ -72,6 +78,37 @@ qx.Class.define("osparc.store.Folders", {
});
},
+ searchFolders: function(
+ text,
+ orderBy = {
+ field: "modified_at",
+ direction: "desc"
+ },
+ ) {
+ if (osparc.auth.Data.getInstance().isGuest()) {
+ return new Promise(resolve => {
+ resolve([]);
+ });
+ }
+
+ const curatedOrderBy = this.self().curateOrderBy(orderBy);
+ const params = {
+ url: {
+ text,
+ orderBy: JSON.stringify(curatedOrderBy),
+ }
+ };
+ return osparc.data.Resources.getInstance().getAllPages("folders", params, "getPageSearch")
+ .then(foldersData => {
+ const folders = [];
+ foldersData.forEach(folderData => {
+ const folder = this.__addToCache(folderData);
+ folders.push(folder);
+ });
+ return folders;
+ });
+ },
+
postFolder: function(name, parentFolderId = null, workspaceId = null) {
const newFolderData = {
name,
@@ -135,13 +172,16 @@ qx.Class.define("osparc.store.Folders", {
__addToCache: function(folderData) {
let folder = this.foldersCached.find(f => f.getFolderId() === folderData["folderId"] && f.getWorkspaceId() === folderData["workspaceId"]);
if (folder) {
+ const props = Object.keys(qx.util.PropertyUtil.getProperties(osparc.data.model.Folder));
// put
Object.keys(folderData).forEach(key => {
if (key === "createdAt") {
folder.set("createdAt", new Date(folderData["createdAt"]));
} else if (key === "modifiedAt") {
folder.set("lastModified", new Date(folderData["modifiedAt"]));
- } else {
+ } else if (key === "trashedAt") {
+ folder.set("trashedAt", new Date(folderData["trashedAt"]));
+ } else if (props.includes(key)) {
folder.set(key, folderData[key]);
}
});
diff --git a/services/static-webserver/client/source/class/osparc/store/Services.js b/services/static-webserver/client/source/class/osparc/store/Services.js
index f6851b3aa43..c2abeed32ec 100644
--- a/services/static-webserver/client/source/class/osparc/store/Services.js
+++ b/services/static-webserver/client/source/class/osparc/store/Services.js
@@ -44,7 +44,11 @@ qx.Class.define("osparc.store.Services", {
resolve(servicesObj);
})
- .catch(err => console.error("getServices failed", err));
+ .catch(err => {
+ const msg = err.message || qx.locale.Manager.tr("Unable to fetch Services");
+ osparc.FlashMessenger.getInstance().logAs(msg, "ERROR");
+ console.error(err);
+ });
});
},
diff --git a/services/static-webserver/client/source/class/osparc/store/Store.js b/services/static-webserver/client/source/class/osparc/store/Store.js
index 0e015ed7811..89ccc5e51a0 100644
--- a/services/static-webserver/client/source/class/osparc/store/Store.js
+++ b/services/static-webserver/client/source/class/osparc/store/Store.js
@@ -66,6 +66,12 @@ qx.Class.define("osparc.store.Store", {
init: null,
nullable: true
},
+ studyBrowserContext: {
+ check: ["studiesAndFolders", "workspaces", "search"],
+ init: "studiesAndFolders",
+ nullable: false,
+ event: "changeStudyBrowserContext",
+ },
studies: {
check: "Array",
init: []
diff --git a/services/static-webserver/client/source/class/osparc/store/Tags.js b/services/static-webserver/client/source/class/osparc/store/Tags.js
new file mode 100644
index 00000000000..4ffd9f5cd4f
--- /dev/null
+++ b/services/static-webserver/client/source/class/osparc/store/Tags.js
@@ -0,0 +1,132 @@
+/* ************************************************************************
+
+ osparc - the simcore frontend
+
+ https://osparc.io
+
+ Copyright:
+ 2024 IT'IS Foundation, https://itis.swiss
+
+ License:
+ MIT: https://opensource.org/licenses/MIT
+
+ Authors:
+ * Odei Maiz (odeimaiz)
+
+************************************************************************ */
+
+qx.Class.define("osparc.store.Tags", {
+ extend: qx.core.Object,
+ type: "singleton",
+
+ construct: function() {
+ this.base(arguments);
+
+ this.tagsCached = [];
+ },
+
+ events: {
+ "tagAdded": "qx.event.type.Data",
+ "tagRemoved": "qx.event.type.Data",
+ },
+
+ members: {
+ tagsCached: null,
+
+ fetchTags: function() {
+ if (osparc.auth.Data.getInstance().isGuest()) {
+ return new Promise(resolve => {
+ resolve([]);
+ });
+ }
+
+ return osparc.data.Resources.get("tags")
+ .then(tagsData => {
+ const tags = [];
+ tagsData.forEach(tagData => {
+ const tag = this.__addToCache(tagData);
+ tags.push(tag);
+ });
+ return tags;
+ });
+ },
+
+ getTags: function() {
+ return this.tagsCached;
+ },
+
+ postTag: function(newTagData) {
+ const params = {
+ data: newTagData
+ };
+ return osparc.data.Resources.getInstance().fetch("tags", "post", params)
+ .then(tagData => {
+ const tag = this.__addToCache(tagData);
+ this.fireDataEvent("tagAdded", tag);
+ return tag;
+ });
+ },
+
+ deleteTag: function(tagId) {
+ const params = {
+ url: {
+ tagId
+ }
+ };
+ return osparc.data.Resources.getInstance().fetch("tags", "delete", params)
+ .then(() => {
+ const tag = this.getTag(tagId);
+ if (tag) {
+ this.__deleteFromCache(tagId);
+ this.fireDataEvent("tagRemoved", tag);
+ }
+ })
+ .catch(console.error);
+ },
+
+ putTag: function(tagId, updateData) {
+ const params = {
+ url: {
+ tagId
+ },
+ data: updateData
+ };
+ return osparc.data.Resources.getInstance().fetch("tags", "put", params)
+ .then(tagData => {
+ return this.__addToCache(tagData);
+ })
+ .catch(console.error);
+ },
+
+ getTag: function(tagId = null) {
+ return this.tagsCached.find(f => f.getTagId() === tagId);
+ },
+
+ __addToCache: function(tagData) {
+ let tag = this.tagsCached.find(f => f.getTagId() === tagData["id"]);
+ if (tag) {
+ const props = Object.keys(qx.util.PropertyUtil.getProperties(osparc.data.model.Tag));
+ // put
+ Object.keys(tagData).forEach(key => {
+ if (props.includes(key)) {
+ tag.set(key, tagData[key]);
+ }
+ });
+ } else {
+ // get and post
+ tag = new osparc.data.model.Tag(tagData);
+ this.tagsCached.unshift(tag);
+ }
+ return tag;
+ },
+
+ __deleteFromCache: function(tagId) {
+ const idx = this.tagsCached.findIndex(f => f.getTagId() === tagId);
+ if (idx > -1) {
+ this.tagsCached.splice(idx, 1);
+ return true;
+ }
+ return false;
+ }
+ }
+});
diff --git a/services/static-webserver/client/source/class/osparc/store/Workspaces.js b/services/static-webserver/client/source/class/osparc/store/Workspaces.js
index 8d803de0af5..253ac714a1d 100644
--- a/services/static-webserver/client/source/class/osparc/store/Workspaces.js
+++ b/services/static-webserver/client/source/class/osparc/store/Workspaces.js
@@ -197,6 +197,10 @@ qx.Class.define("osparc.store.Workspaces", {
return this.workspacesCached.find(w => w.getWorkspaceId() === workspaceId);
},
+ getWorkspaces: function() {
+ return this.workspacesCached;
+ },
+
__addToCache: function(workspace) {
const found = this.workspacesCached.find(w => w.getWorkspaceId() === workspace.getWorkspaceId());
if (!found) {
diff --git a/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js b/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js
index d8caa28b68f..76918e12b3e 100644
--- a/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js
+++ b/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js
@@ -30,8 +30,10 @@ qx.Class.define("osparc.study.NodePricingUnits", {
layout: new qx.ui.layout.VBox()
});
- this.__studyId = studyId;
- this.__nodeId = nodeId;
+ this.set({
+ studyId,
+ nodeId,
+ });
if (node instanceof osparc.data.model.Node) {
this.__nodeKey = node.getKey();
this.__nodeVersion = node.getVersion();
@@ -43,8 +45,35 @@ qx.Class.define("osparc.study.NodePricingUnits", {
}
},
+ properties: {
+ studyId: {
+ check: "String",
+ init: null,
+ nullable: false,
+ },
+
+ nodeId: {
+ check: "String",
+ init: null,
+ nullable: false,
+ },
+
+ pricingPlanId: {
+ check: "Number",
+ init: null,
+ nullable: false,
+ },
+
+ patchNode: {
+ check: "Boolean",
+ init: true,
+ nullable: false,
+ event: "changePatchNode",
+ },
+ },
+
statics: {
- pricingUnitSelected: function(studyId, nodeId, planId, selectedUnitId) {
+ patchPricingUnitSelection: function(studyId, nodeId, planId, selectedUnitId) {
const params = {
url: {
studyId,
@@ -58,19 +87,18 @@ qx.Class.define("osparc.study.NodePricingUnits", {
},
members: {
- __studyId: null,
- __nodeId: null,
__nodeKey: null,
__nodeVersion: null,
__nodeLabel: null,
+ __pricingUnits: null,
showPricingUnits: function(inGroupBox = true) {
return new Promise(resolve => {
const nodeKey = this.__nodeKey;
const nodeVersion = this.__nodeVersion;
const nodeLabel = this.__nodeLabel;
- const studyId = this.__studyId;
- const nodeId = this.__nodeId;
+ const studyId = this.getStudyId();
+ const nodeId = this.getNodeId();
const plansParams = {
url: osparc.data.Resources.getServiceUrl(
@@ -79,30 +107,36 @@ qx.Class.define("osparc.study.NodePricingUnits", {
)
};
osparc.data.Resources.fetch("services", "pricingPlans", plansParams)
- .then(pricingPlans => {
- if (pricingPlans) {
+ .then(pricingPlan => {
+ if (pricingPlan) {
const unitParams = {
url: {
studyId,
nodeId
}
};
+ this.set({
+ pricingPlanId: pricingPlan["pricingPlanId"]
+ });
osparc.data.Resources.fetch("studies", "getPricingUnit", unitParams)
.then(preselectedPricingUnit => {
- if (pricingPlans && "pricingUnits" in pricingPlans && pricingPlans["pricingUnits"].length) {
- const unitButtons = new osparc.study.PricingUnits(pricingPlans["pricingUnits"], preselectedPricingUnit);
+ if (pricingPlan && "pricingUnits" in pricingPlan && pricingPlan["pricingUnits"].length) {
+ const pricingUnitButtons = this.__pricingUnits = new osparc.study.PricingUnits(pricingPlan["pricingUnits"], preselectedPricingUnit);
if (inGroupBox) {
const pricingUnitsLayout = osparc.study.StudyOptions.createGroupBox(nodeLabel);
- pricingUnitsLayout.add(unitButtons);
+ pricingUnitsLayout.add(pricingUnitButtons);
this._add(pricingUnitsLayout);
} else {
- this._add(unitButtons);
+ this._add(pricingUnitButtons);
}
- unitButtons.addListener("changeSelectedUnitId", e => {
- unitButtons.setEnabled(false);
- const selectedPricingUnitId = e.getData();
- this.self().pricingUnitSelected(this.__studyId, this.__nodeId, pricingPlans["pricingPlanId"], selectedPricingUnitId)
- .finally(() => unitButtons.setEnabled(true));
+ pricingUnitButtons.addListener("changeSelectedUnitId", e => {
+ if (this.isPatchNode()) {
+ pricingUnitButtons.setEnabled(false);
+ const pricingPlanId = this.getPricingPlanId();
+ const selectedPricingUnitId = e.getData();
+ this.self().patchPricingUnitSelection(studyId, nodeId, pricingPlanId, selectedPricingUnitId)
+ .finally(() => pricingUnitButtons.setEnabled(true));
+ }
});
}
})
@@ -110,6 +144,10 @@ qx.Class.define("osparc.study.NodePricingUnits", {
}
});
});
- }
+ },
+
+ getPricingUnits: function() {
+ return this.__pricingUnits;
+ },
}
});
diff --git a/services/static-webserver/client/source/class/osparc/study/StudyOptions.js b/services/static-webserver/client/source/class/osparc/study/StudyOptions.js
index 54ba001d6d6..5b0fd30cadb 100644
--- a/services/static-webserver/client/source/class/osparc/study/StudyOptions.js
+++ b/services/static-webserver/client/source/class/osparc/study/StudyOptions.js
@@ -22,36 +22,35 @@ qx.Class.define("osparc.study.StudyOptions", {
this.base(arguments);
this._setLayout(new qx.ui.layout.VBox(15));
+ this.__buildLayout();
- this.__studyId = studyId;
-
- const params = {
- url: {
- studyId
- }
- };
- Promise.all([
- osparc.data.Resources.getOne("studies", params),
- osparc.data.Resources.fetch("studies", "getWallet", params)
- ])
- .then(values => {
- const studyData = values[0];
- this.__studyData = osparc.data.model.Study.deepCloneStudyObject(studyData);
- if (values[1] && "walletId" in values[1]) {
- this.__projectWalletId = values[1]["walletId"];
- }
- this.__buildLayout();
- });
+ if (studyId) {
+ this.setStudyId(studyId);
+ }
},
properties: {
+ studyId: {
+ check: "String",
+ init: null,
+ nullable: false,
+ apply: "__fetchStudy"
+ },
+
wallet: {
check: "osparc.data.model.Wallet",
init: null,
nullable: true,
event: "changeWallet",
apply: "__applyWallet"
- }
+ },
+
+ patchStudy: {
+ check: "Boolean",
+ init: true,
+ nullable: false,
+ event: "changePatchStudy",
+ },
},
events: {
@@ -89,13 +88,36 @@ qx.Class.define("osparc.study.StudyOptions", {
});
box.setLayout(new qx.ui.layout.VBox(5));
return box;
- }
+ },
+
+ updateName: function(studyData, name) {
+ return osparc.info.StudyUtils.patchStudyData(studyData, "name", name)
+ .catch(err => {
+ console.error(err);
+ const msg = err.message || qx.locale.Manager.tr("Something went wrong Renaming");
+ osparc.FlashMessenger.logAs(msg, "ERROR");
+ });
+ },
+
+ updateWallet: function(studyId, walletId) {
+ const params = {
+ url: {
+ studyId,
+ walletId,
+ }
+ };
+ return osparc.data.Resources.fetch("studies", "selectWallet", params)
+ .catch(err => {
+ console.error(err);
+ const msg = err.message || qx.locale.Manager.tr("Error selecting Credit Account");
+ osparc.FlashMessenger.getInstance().logAs(msg, "ERROR");
+ });
+ },
},
members: {
- __studyId: null,
__studyData: null,
- __projectWalletId: null,
+ __studyWalletId: null,
_createChildControlImpl: function(id) {
let control;
@@ -105,7 +127,7 @@ qx.Class.define("osparc.study.StudyOptions", {
this._addAt(control, 0);
break;
case "title-field":
- control = new qx.ui.form.TextField(this.__studyData["name"]).set({
+ control = new qx.ui.form.TextField().set({
maxWidth: 220
});
this.getChildControl("title-layout").add(control);
@@ -159,6 +181,27 @@ qx.Class.define("osparc.study.StudyOptions", {
control = this.self().createGroupBox(this.tr("Tiers"));
this.getChildControl("options-layout").add(control);
break;
+ case "study-pricing-units": {
+ control = new osparc.study.StudyPricingUnits();
+ const loadingImage = this.getChildControl("loading-units-spinner");
+ const unitsBoxesLayout = this.getChildControl("services-resources-layout");
+ const unitsLoading = () => {
+ loadingImage.show();
+ unitsBoxesLayout.exclude();
+ };
+ const unitsReady = () => {
+ loadingImage.exclude();
+ unitsBoxesLayout.show();
+ control.getNodePricingUnits().forEach(nodePricingUnits => {
+ this.bind("patchStudy", nodePricingUnits, "patchNode");
+ });
+ };
+ unitsLoading();
+ control.addListener("loadingUnits", () => unitsLoading());
+ control.addListener("unitsReady", () => unitsReady());
+ unitsBoxesLayout.add(control);
+ break;
+ }
case "buttons-layout":
control = new qx.ui.container.Composite(new qx.ui.layout.HBox(5).set({
alignX: "right"
@@ -192,6 +235,37 @@ qx.Class.define("osparc.study.StudyOptions", {
return control || this.base(arguments, id);
},
+ __fetchStudy: function(studyId) {
+ const params = {
+ url: {
+ studyId
+ }
+ };
+ Promise.all([
+ osparc.data.Resources.getOne("studies", params),
+ osparc.data.Resources.fetch("studies", "getWallet", params)
+ ])
+ .then(values => {
+ const studyData = values[0];
+ this.setStudyData(studyData);
+
+ if (values[1] && "walletId" in values[1]) {
+ this.__studyWalletId = values[1]["walletId"];
+ }
+ this.__buildLayout();
+ });
+ },
+
+ setStudyData: function(studyData) {
+ this.__studyData = osparc.data.model.Study.deepCloneStudyObject(studyData);
+
+ const titleField = this.getChildControl("title-field");
+ titleField.setValue(this.__studyData["name"]);
+
+ const studyPricingUnits = this.getChildControl("study-pricing-units");
+ studyPricingUnits.setStudyData(this.__studyData);
+ },
+
__applyWallet: function(wallet) {
if (wallet) {
const walletSelector = this.getChildControl("wallet-selector");
@@ -214,7 +288,6 @@ qx.Class.define("osparc.study.StudyOptions", {
__buildTopSummaryLayout: function() {
const store = osparc.store.Store.getInstance();
- this._createChildControlImpl("title-label");
const titleField = this.getChildControl("title-field");
titleField.addListener("appear", () => {
titleField.focus();
@@ -222,7 +295,6 @@ qx.Class.define("osparc.study.StudyOptions", {
});
// Wallet Selector
- this._createChildControlImpl("wallet-selector-label");
const walletSelector = this.getChildControl("wallet-selector");
const wallets = store.getWallets();
@@ -241,8 +313,8 @@ qx.Class.define("osparc.study.StudyOptions", {
}
});
const preferredWallet = store.getPreferredWallet();
- if (wallets.find(wallet => wallet.getWalletId() === parseInt(this.__projectWalletId))) {
- selectWallet(this.__projectWalletId);
+ if (wallets.find(wallet => wallet.getWalletId() === parseInt(this.__studyWalletId))) {
+ selectWallet(this.__studyWalletId);
} else if (preferredWallet) {
selectWallet(preferredWallet.getWalletId());
} else if (!osparc.desktop.credits.Utils.autoSelectActiveWallet(walletSelector)) {
@@ -251,21 +323,7 @@ qx.Class.define("osparc.study.StudyOptions", {
},
__buildOptionsLayout: function() {
- const loadingImage = this.getChildControl("loading-units-spinner");
- const unitsBoxesLayout = this.getChildControl("services-resources-layout");
- const unitsLoading = () => {
- loadingImage.show();
- unitsBoxesLayout.exclude();
- };
- const unitsReady = () => {
- loadingImage.exclude();
- unitsBoxesLayout.show();
- };
- unitsLoading();
- const studyPricingUnits = new osparc.study.StudyPricingUnits(this.__studyData);
- studyPricingUnits.addListener("loadingUnits", () => unitsLoading());
- studyPricingUnits.addListener("unitsReady", () => unitsReady());
- unitsBoxesLayout.add(studyPricingUnits);
+ this.getChildControl("study-pricing-units");
},
__buildButtons: function() {
@@ -281,47 +339,34 @@ qx.Class.define("osparc.study.StudyOptions", {
const openButton = this.getChildControl("open-button");
openButton.setFetching(true);
- // first, update the name if necessary
- const titleSelection = this.getChildControl("title-field").getValue();
- if (this.__studyData["name"] !== titleSelection) {
- await this.__updateName(this.__studyData, titleSelection);
- }
+ if (this.isPatchStudy()) {
+ // first, update the name if necessary
+ const titleSelection = this.getChildControl("title-field").getValue();
+ if (this.__studyData["name"] !== titleSelection) {
+ await this.self().updateName(this.__studyData, titleSelection);
+ }
- // second, update the wallet if necessary
- const store = osparc.store.Store.getInstance();
- const walletSelection = this.getChildControl("wallet-selector").getSelection();
- if (walletSelection.length && walletSelection[0]["walletId"]) {
- const params = {
- url: {
- "studyId": this.__studyData["uuid"],
- "walletId": walletSelection[0]["walletId"]
- }
- };
- osparc.data.Resources.fetch("studies", "selectWallet", params)
- .then(() => {
- store.setActiveWallet(this.getWallet());
- this.fireEvent("startStudy");
- })
- .catch(err => {
- console.error(err);
- const msg = err.message || this.tr("Error selecting Credit Account");
- osparc.FlashMessenger.getInstance().logAs(msg, "ERROR");
- })
- .finally(() => openButton.setFetching(false));
+ // second, update the wallet if necessary
+ const store = osparc.store.Store.getInstance();
+ const walletSelection = this.getChildControl("wallet-selector").getSelection();
+ if (walletSelection.length && walletSelection[0]["walletId"]) {
+ const studyId = this.getStudyId();
+ const walletId = walletSelection[0]["walletId"];
+ this.self().updateWallet(studyId, walletId)
+ .then(() => {
+ store.setActiveWallet(this.getWallet());
+ this.fireEvent("startStudy");
+ })
+ .finally(() => openButton.setFetching(false));
+ } else {
+ store.setActiveWallet(this.getWallet());
+ this.fireEvent("startStudy");
+ openButton.setFetching(false);
+ }
} else {
- store.setActiveWallet(this.getWallet());
this.fireEvent("startStudy");
openButton.setFetching(false);
}
},
-
- __updateName: function(studyData, name) {
- return osparc.info.StudyUtils.patchStudyData(studyData, "name", name)
- .catch(err => {
- console.error(err);
- const msg = this.tr("Something went wrong Renaming");
- osparc.FlashMessenger.logAs(msg, "ERROR");
- });
- }
}
});
diff --git a/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js b/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js
index 793fee5cb34..e3e8514fbaf 100644
--- a/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js
+++ b/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js
@@ -25,9 +25,11 @@ qx.Class.define("osparc.study.StudyPricingUnits", {
layout: new qx.ui.layout.VBox(5)
});
- this.__studyData = studyData;
+ this.__nodePricingUnits = [];
- this.__showPricingUnits();
+ if (studyData) {
+ this.setStudyData(studyData);
+ }
},
events: {
@@ -35,8 +37,20 @@ qx.Class.define("osparc.study.StudyPricingUnits", {
"unitsReady": "qx.event.type.Event"
},
+ statics: {
+ includeInList: function(node) {
+ return !osparc.data.model.Node.isFrontend(node);
+ },
+ },
+
members: {
__studyData: null,
+ __nodePricingUnits: null,
+
+ setStudyData: function(studyData) {
+ this.__studyData = studyData;
+ this.__showPricingUnits();
+ },
__showPricingUnits: function() {
const unitsLoading = () => this.fireEvent("loadingUnits");
@@ -48,16 +62,20 @@ qx.Class.define("osparc.study.StudyPricingUnits", {
const workbench = this.__studyData["workbench"];
Object.keys(workbench).forEach(nodeId => {
const node = workbench[nodeId];
- if (osparc.data.model.Node.isFrontend(node)) {
- return;
+ if (this.self().includeInList(node)) {
+ const nodePricingUnits = new osparc.study.NodePricingUnits(this.__studyData["uuid"], nodeId, node);
+ this.__nodePricingUnits.push(nodePricingUnits);
+ this._add(nodePricingUnits);
+ promises.push(nodePricingUnits.showPricingUnits());
}
- const nodePricingUnits = new osparc.study.NodePricingUnits(this.__studyData["uuid"], nodeId, node);
- this._add(nodePricingUnits);
- promises.push(nodePricingUnits.showPricingUnits());
});
}
Promise.all(promises)
.then(() => unitsAdded());
- }
+ },
+
+ getNodePricingUnits: function() {
+ return this.__nodePricingUnits;
+ },
}
});
diff --git a/services/static-webserver/client/source/class/osparc/study/Utils.js b/services/static-webserver/client/source/class/osparc/study/Utils.js
index dab2bd53bd8..66ed40201f4 100644
--- a/services/static-webserver/client/source/class/osparc/study/Utils.js
+++ b/services/static-webserver/client/source/class/osparc/study/Utils.js
@@ -116,7 +116,8 @@ qx.Class.define("osparc.study.Utils", {
newStudyLabel = metadata["name"];
}
if (existingStudies) {
- const title = osparc.utils.Utils.getUniqueStudyName(newStudyLabel, existingStudies);
+ const existingNames = existingStudies.map(study => study["name"]);
+ const title = osparc.utils.Utils.getUniqueName(newStudyLabel, existingNames);
minStudyData["name"] = title;
} else {
minStudyData["name"] = newStudyLabel;
@@ -234,7 +235,7 @@ qx.Class.define("osparc.study.Utils", {
// update task
osparc.widget.ProgressSequence.updateTaskProgress(existingTask, {
value: percent,
- progressLabel: percent*100 + "%"
+ progressLabel: parseFloat((percent*100).toFixed(2)) + "%"
});
} else {
// new task
@@ -254,7 +255,7 @@ qx.Class.define("osparc.study.Utils", {
}, this);
task.addListener("resultReceived", e => {
const studyData = e.getData();
- resolve(studyData["uuid"]);
+ resolve(studyData);
}, this);
task.addListener("pollingError", e => {
const err = e.getData();
diff --git a/services/static-webserver/client/source/class/osparc/theme/ColorDark.js b/services/static-webserver/client/source/class/osparc/theme/ColorDark.js
index ca275a2371d..fda2ccd25a4 100644
--- a/services/static-webserver/client/source/class/osparc/theme/ColorDark.js
+++ b/services/static-webserver/client/source/class/osparc/theme/ColorDark.js
@@ -2,29 +2,27 @@ qx.Theme.define("osparc.theme.ColorDark", {
include: osparc.theme.mixin.Color,
colors: {
+ // 105-0
"c00": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105),
- "c01": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 100),
- "c02": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 95),
- "c03": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 85),
- "c04": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 80),
- "c05": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 70),
- "c06": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 55),
- "c07": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 45),
- "c08": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 35),
- "c09": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 30),
- "c10": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 25),
- "c11": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 20),
- "c12": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 15),
- "c13": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 8),
- "c14": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0),
+ "c01": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-5),
+ "c02": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-10),
+ "c03": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-20),
+ "c04": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-25),
+ "c05": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-35),
+ "c06": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-50),
+ "c07": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-60),
+ "c08": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-70),
+ "c09": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-75),
+ "c10": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-80),
+ "c12": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-90),
+ "c14": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-105),
"product-color": "rgba(0, 144, 208, 1)", // override in product
"strong-main": "product-color",
- "strong-text": "rgba(255, 255, 255, 1)",
"a-bit-transparent": "rgba(0, 0, 0, 0.4)",
// main
- "background-main": "#222",
+ "background-main": "c01",
"background-main-1": "c02",
"background-main-2": "c03",
"background-main-3": "c04",
@@ -34,10 +32,7 @@ qx.Theme.define("osparc.theme.ColorDark", {
"background-card-overlay": "rgba(25, 33, 37, 0.8)",
"background-workspace-card-overlay": "rgb(35, 93, 122)",
- "primary-background-color": "rgba(0, 20, 46, 1)",
"navigation_bar_background_color": "rgba(1, 18, 26, 0.8)",
- "tab_navigation_bar_background_color": "c00",
- "modal-backdrop": "rgba(8, 9, 13, 1)",
"fab_text": "contrasted-text-dark",
"fab-background": "rgba(255, 255, 255, 0.2)",
"input_background": "#213248",
@@ -58,18 +53,18 @@ qx.Theme.define("osparc.theme.ColorDark", {
"link": "rgba(10, 182, 255, 1)",
// shadows
- "bg-shadow": "background-main-5",
- "box-shadow": "rgba(0,0,0, 0.15)",
+ "bg-shadow": "background-main-5",
+ "box-shadow": "rgba(0, 0, 0, 0.15)",
"shadow": qx.core.Environment.get("css.rgba") ? "a-bit-transparent" : "bg-shadow",
// window
"window-popup-background": "rgba(66, 66, 66, 1)",
"window-caption-background": "background-main",
- "window-caption-background-active": "background-main-3",
+ "window-caption-background-active": "background-main-3",
"window-caption-text": "text",
"window-caption-text-active": "c12",
- "window-border": "background-main-2",
- "window-border-inner": "background-main-1",
+ "window-border": "background-main-2",
+ "window-border-inner": "background-main-1",
// material-button
"material-button-background": "fab-background",
@@ -88,10 +83,10 @@ qx.Theme.define("osparc.theme.ColorDark", {
// backgrounds
"background-selected": "default-button-background",
- "background-selected-disabled": "default-button-disabled",
- "background-selected-dark": "product-color",
+ "background-selected-disabled": "default-button-disabled",
+ "background-selected-dark": "product-color",
"background-disabled": "background-main",
- "background-disabled-checked": "background-main-1",
+ "background-disabled-checked": "background-main-1",
"background-pane": "background-main",
// tabview
@@ -102,23 +97,23 @@ qx.Theme.define("osparc.theme.ColorDark", {
"tabview-button-background": "transparent",
// scrollbar
- "scrollbar-passive": "background-main-4",
- "scrollbar-active": "background-main-5",
+ "scrollbar-passive": "background-main-4",
+ "scrollbar-active": "background-main-5",
// form
"button": "background-main-4",
- "button-border": "background-main-5",
+ "button-border": "background-main-5",
"button-border-hovered": "c07",
- "button-box": "background-main-3",
- "button-box-pressed": "background-main-4",
+ "button-box": "background-main-3",
+ "button-box-pressed": "background-main-4",
"border-lead": "c07",
// group box
- "white-box-border": "background-main-2",
+ "white-box-border": "background-main-2",
// borders
// 'border-main' is an alias of 'background-selected' (compatibility reasons)
- "border": "background-main-3",
+ "border": "background-main-3",
"border-focused": "c09",
"border-invalid": "failed-red",
"border-disabled": "background-main",
@@ -134,13 +129,13 @@ qx.Theme.define("osparc.theme.ColorDark", {
"table-header": "background-main",
"table-header-foreground": "c09",
"table-header-border": "c07",
- "table-focus-indicator": "background-main-5",
+ "table-focus-indicator": "background-main-5",
// used in table code
"table-header-cell": "background-main",
- "table-row-background-focused-selected": "background-main-4",
- "table-row-background-focused": "background-main-3",
- "table-row-background-selected": "background-main-4",
+ "table-row-background-focused-selected": "background-main-4",
+ "table-row-background-focused": "background-main-3",
+ "table-row-background-selected": "background-main-4",
"table-row-background-even": "background-main",
"table-row-background-odd": "background-main",
@@ -156,11 +151,11 @@ qx.Theme.define("osparc.theme.ColorDark", {
"progressive-table-header": "c08",
"progressive-table-row-background-even": "background-main",
"progressive-table-row-background-odd": "background-main",
- "progressive-progressbar-background": "background-main",
+ "progressive-progressbar-background": "background-main",
"progressive-progressbar-indicator-done": "background-main",
- "progressive-progressbar-indicator-undone": "background-main-1",
- "progressive-progressbar-percent-background": "background-main",
- "progressive-progressbar-percent-text": "background-main-1",
+ "progressive-progressbar-indicator-undone": "background-main-1",
+ "progressive-progressbar-percent-background": "background-main",
+ "progressive-progressbar-percent-text": "background-main-1",
@@ -168,6 +163,8 @@ qx.Theme.define("osparc.theme.ColorDark", {
"workbench-edge-comp-active": "#777777",
"workbench-edge-api-active": "#BBBBBB",
"workbench-start-hint": "#505050",
+ "workbench-view-navbar": "c00",
+ "workbench-view-splitter": "#000000",
"node-background": "rgba(113, 157, 181, 0.5)",
"node-selected-background": "strong-main",
diff --git a/services/static-webserver/client/source/class/osparc/theme/ColorLight.js b/services/static-webserver/client/source/class/osparc/theme/ColorLight.js
index 54f1e83d0ea..c1a6bfb5783 100644
--- a/services/static-webserver/client/source/class/osparc/theme/ColorLight.js
+++ b/services/static-webserver/client/source/class/osparc/theme/ColorLight.js
@@ -2,29 +2,27 @@ qx.Theme.define("osparc.theme.ColorLight", {
include: osparc.theme.mixin.Color,
colors: {
+ // 0-105
"c00": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0),
- "c01": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 8),
- "c02": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 15),
- "c03": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 25),
- "c04": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 35),
- "c05": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 45),
- "c06": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 55),
- "c07": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 60),
- "c08": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 65),
- "c09": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 70),
- "c10": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 80),
- "c11": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 85),
- "c12": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 95),
- "c13": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 100),
- "c14": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105),
+ "c01": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+5),
+ "c02": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+10),
+ "c03": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+20),
+ "c04": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+25),
+ "c05": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+35),
+ "c06": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+50),
+ "c07": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+60),
+ "c08": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+70),
+ "c09": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+75),
+ "c10": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+80),
+ "c12": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+90),
+ "c14": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+105),
"product-color": "rgba(0, 144, 208, 1)", // override in product
"strong-main": "product-color",
- "strong-text": "background-main-1",
"a-bit-transparent": "rgba(255, 255, 255, 0.4)",
// main
- "background-main": "rgba(250,250,250, 1)", // Is manipulated
+ "background-main": "c01",
"background-main-1": "c02",
"background-main-2": "c03",
"background-main-3": "c04",
@@ -34,12 +32,9 @@ qx.Theme.define("osparc.theme.ColorLight", {
"background-card-overlay": "rgba(229, 229, 229, 0.8)",
"background-workspace-card-overlay": "rgb(165, 223, 252)",
- "primary-background-color": "rgba(255, 255, 255, 1)",
"navigation_bar_background_color": "rgba(229, 229, 229, 0.8)",
- "tab_navigation_bar_background_color": "c00",
- "modal-backdrop": "rgba(247, 248, 252, 0.4)",
"fab_text": "contrasted-text-dark",
- "fab-background": "rgba(255, 255, 255, 1)",
+ "fab-background": "rgba(255, 255, 255, 0.2)",
"input_background": "rgba(209, 214, 218, 1)",
"input_background_disable": "rgba(113, 157, 181, 0.04)",
"hint-background": "rgba(201, 201, 201, 1)",
@@ -58,24 +53,25 @@ qx.Theme.define("osparc.theme.ColorLight", {
"link": "rgba(10, 182, 255, 1)",
// shadows
- "bg-shadow": "background-main-5",
- "box-shadow": "rgba(0,0,0, 0.15)",
+ "bg-shadow": "background-main-5",
+ "box-shadow": "rgba(0, 0, 0, 0.15)",
"shadow": qx.core.Environment.get("css.rgba") ? "a-bit-transparent" : "bg-shadow",
// window
- "window-popup-background": "rgba(255, 255, 255, 1)",
+ // OM here
+ "window-popup-background": "rgba(225, 225, 225, 1)",
"window-caption-background": "background-main",
- "window-caption-background-active": "background-main-3",
+ "window-caption-background-active": "background-main-3",
"window-caption-text": "text",
"window-caption-text-active": "c12",
- "window-border": "background-main-2",
- "window-border-inner": "background-main-1",
+ "window-border": "background-main-2",
+ "window-border-inner": "background-main-1",
// material-button
- "material-button-background": "fab-background",
- "material-button-background-disabled": "default-button-disabled-background",
- "material-button-background-hovered": "default-button-hover-background",
- "material-button-background-pressed": "default-button-active-background",
+ "material-button-background": "fab-background",
+ "material-button-background-disabled": "default-button-disabled-background",
+ "material-button-background-hovered": "default-button-hover-background",
+ "material-button-background-pressed": "default-button-active-background",
"material-button-text-disabled": "default-button-disabled-background",
"material-button-text": "default-button-text-outline",
@@ -88,43 +84,43 @@ qx.Theme.define("osparc.theme.ColorLight", {
// backgrounds
"background-selected": "default-button-background",
- "background-selected-disabled": "default-button-disabled",
- "background-selected-dark": "product-color",
+ "background-selected-disabled": "default-button-disabled",
+ "background-selected-dark": "product-color",
"background-disabled": "background-main",
- "background-disabled-checked": "background-main-1",
+ "background-disabled-checked": "background-main-1",
"background-pane": "background-main",
// tabview
"tabview-unselected": "c14",
- "tabview-button-border": "c14",
+ "tabview-button-border": "product-color",
"tabview-label-active-disabled": "c10",
"tabview-pane-background": "transparent",
"tabview-button-background": "transparent",
// scrollbar
- "scrollbar-passive": "background-main-4",
- "scrollbar-active": "background-main-5",
+ "scrollbar-passive": "background-main-4",
+ "scrollbar-active": "background-main-5",
// form
- "button": "background-main-4",
- "button-border": "background-main-5",
+ "button": "background-main-4",
+ "button-border": "background-main-5",
"button-border-hovered": "c07",
- "button-box": "background-main-3",
- "button-box-pressed": "background-main-4",
+ "button-box": "background-main-3",
+ "button-box-pressed": "background-main-4",
"border-lead": "c07",
// group box
- "white-box-border": "background-main-2",
+ "white-box-border": "background-main-2",
// borders
// 'border-main' is an alias of 'background-selected' (compatibility reasons)
- "border": "background-main-3",
+ "border": "background-main-3",
"border-focused": "c09",
"border-invalid": "failed-red",
"border-disabled": "background-main",
// separator
- "border-separator": "fab-background",
+ "border-separator": "background-main-3",
// tooltip
"tooltip": "flash_message_bg",
@@ -135,13 +131,13 @@ qx.Theme.define("osparc.theme.ColorLight", {
"table-header": "background-main",
"table-header-foreground": "c09",
"table-header-border": "c07",
- "table-focus-indicator": "background-main-5",
+ "table-focus-indicator": "background-main-5",
// used in table code
"table-header-cell": "background-main",
- "table-row-background-focused-selected": "background-main-4",
- "table-row-background-focused": "background-main-3",
- "table-row-background-selected": "background-main-4",
+ "table-row-background-focused-selected": "background-main-4",
+ "table-row-background-focused": "background-main-3",
+ "table-row-background-selected": "background-main-4",
"table-row-background-even": "background-main",
"table-row-background-odd": "background-main",
@@ -157,17 +153,19 @@ qx.Theme.define("osparc.theme.ColorLight", {
"progressive-table-header": "c08",
"progressive-table-row-background-even": "background-main",
"progressive-table-row-background-odd": "background-main",
- "progressive-progressbar-background": "background-main",
+ "progressive-progressbar-background": "background-main",
"progressive-progressbar-indicator-done": "background-main",
- "progressive-progressbar-indicator-undone": "background-main-1",
- "progressive-progressbar-percent-background": "background-main",
- "progressive-progressbar-percent-text": "background-main-1",
+ "progressive-progressbar-indicator-undone": "background-main-1",
+ "progressive-progressbar-percent-background": "background-main",
+ "progressive-progressbar-percent-text": "background-main-1",
// OSPARC
"workbench-edge-comp-active": "#888888",
"workbench-edge-api-active": "#444444",
"workbench-start-hint": "#AFAFAF",
+ "workbench-view-navbar": "c02",
+ "workbench-view-splitter": "background-main-3",
"node-background": "rgba(113, 157, 181, 0.35)",
"node-selected-background": "strong-main",
diff --git a/services/static-webserver/client/source/class/osparc/ui/basic/Tag.js b/services/static-webserver/client/source/class/osparc/ui/basic/Tag.js
index 4b23fc0efde..64930674e25 100644
--- a/services/static-webserver/client/source/class/osparc/ui/basic/Tag.js
+++ b/services/static-webserver/client/source/class/osparc/ui/basic/Tag.js
@@ -13,17 +13,19 @@ qx.Class.define("osparc.ui.basic.Tag", {
extend: qx.ui.basic.Label,
/**
* Constructor for the Tag element.
- * @param {String} value Short text to be shown on the tag
- * @param {String} color Color for the background, must be in hex3 or hex6 form
+ * @param {osparc.data.model.Tag} tag Short text to be shown on the tag
* @param {String} [filterGroupId] If present, clicking on the tab will dispatch a bus message with the
* id ``GroupIdTagsTrigger`` to be subscribed by a filter.
*/
- construct: function(value, color, filterGroupId) {
- this.base(arguments, value);
- this.setFont("text-11");
- if (color) {
- this.setColor(color);
+ construct: function(tag, filterGroupId) {
+ this.base(arguments);
+
+ if (tag) {
+ tag.bind("name", this, "value");
+ tag.bind("color", this, "color");
}
+ this.setFont("text-11");
+
if (filterGroupId) {
this.setCursor("pointer");
this.addListener("tap", e => {
diff --git a/services/static-webserver/client/source/class/osparc/utils/Utils.js b/services/static-webserver/client/source/class/osparc/utils/Utils.js
index 5c751c2ee8f..b095d95eee2 100644
--- a/services/static-webserver/client/source/class/osparc/utils/Utils.js
+++ b/services/static-webserver/client/source/class/osparc/utils/Utils.js
@@ -277,12 +277,11 @@ qx.Class.define("osparc.utils.Utils", {
return reloadButton;
},
- getUniqueStudyName: function(preferredName, list) {
+ getUniqueName: function(preferredName, existingNames) {
let title = preferredName;
- const existingTitles = list.map(study => study.name);
- if (existingTitles.includes(title)) {
+ if (existingNames.includes(title)) {
let cont = 1;
- while (existingTitles.includes(`${title} (${cont})`)) {
+ while (existingNames.includes(`${title} (${cont})`)) {
cont++;
}
title += ` (${cont})`;
diff --git a/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js b/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js
index 180de5bb2cb..7cf74384589 100644
--- a/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js
+++ b/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js
@@ -33,12 +33,6 @@ qx.Class.define("osparc.widget.NodeOptions", {
this.setNode(node);
},
- events: {
- "versionChanged": "qx.event.type.Event",
- "bootModeChanged": "qx.event.type.Event",
- "limitsChanged": "qx.event.type.Event"
- },
-
properties: {
node: {
check: "osparc.data.model.Node",
@@ -74,7 +68,6 @@ qx.Class.define("osparc.widget.NodeOptions", {
(node.isUpdatable() || node.isDeprecated() || node.isRetired())
) {
const lifeCycleView = new osparc.node.LifeCycleView(node);
- node.addListener("versionChanged", () => this.fireEvent("versionChanged"));
sections.push(lifeCycleView);
showStartStopButton = true;
@@ -83,7 +76,6 @@ qx.Class.define("osparc.widget.NodeOptions", {
// Boot Options
if (node.hasBootModes()) {
const bootOptionsView = new osparc.node.BootOptionsView(node);
- node.addListener("bootModeChanged", () => this.fireEvent("bootModeChanged"));
sections.push(bootOptionsView);
showStartStopButton = true;
@@ -95,7 +87,6 @@ qx.Class.define("osparc.widget.NodeOptions", {
(node.isComputational() || node.isDynamic())
) {
const updateResourceLimitsView = new osparc.node.UpdateResourceLimitsView(node);
- node.addListener("limitsChanged", () => this.fireEvent("limitsChanged"));
sections.push(updateResourceLimitsView);
showStartStopButton |= node.isDynamic();
diff --git a/services/static-webserver/client/source/class/osparc/widget/NodesTree.js b/services/static-webserver/client/source/class/osparc/widget/NodesTree.js
index 0b543b6a158..e88930b09c5 100644
--- a/services/static-webserver/client/source/class/osparc/widget/NodesTree.js
+++ b/services/static-webserver/client/source/class/osparc/widget/NodesTree.js
@@ -290,15 +290,13 @@ qx.Class.define("osparc.widget.NodesTree", {
});
} else {
const node = study.getWorkbench().getNode(nodeId);
- const serviceDetails = new osparc.info.ServiceLarge(node.getMetaData(), {
+ const metadata = node.getMetaData();
+ const serviceDetails = new osparc.info.ServiceLarge(metadata, {
nodeId,
label: node.getLabel(),
studyId: study.getUuid()
});
- const title = this.tr("Service information");
- osparc.ui.window.Window.popUpInWindow(serviceDetails, title, width, height).set({
- maxHeight: height
- });
+ osparc.info.ServiceLarge.popUpInWindow(serviceDetails);
}
}
},
diff --git a/services/static-webserver/client/source/class/osparc/workbench/DiskUsageIndicator.js b/services/static-webserver/client/source/class/osparc/workbench/DiskUsageIndicator.js
index 173b3689524..e733be3b6bc 100644
--- a/services/static-webserver/client/source/class/osparc/workbench/DiskUsageIndicator.js
+++ b/services/static-webserver/client/source/class/osparc/workbench/DiskUsageIndicator.js
@@ -172,7 +172,7 @@ qx.Class.define("osparc.workbench.DiskUsageIndicator", {
toolTipText += this.tr("Data storage: ") + osparc.utils.Utils.bytesToSize(diskVolsUsage.free) + "
";
toolTipText += this.tr("I/O storage: ") + osparc.utils.Utils.bytesToSize(diskHostUsage.free) + "
";
}
- const bgColor = qx.theme.manager.Color.getInstance().resolve("tab_navigation_bar_background_color");
+ const bgColor = qx.theme.manager.Color.getInstance().resolve("workbench-view-navbar");
const color2 = qx.theme.manager.Color.getInstance().resolve("progressive-progressbar-background");
indicator.getContentElement().setStyles({
"background-color": bgColor,
diff --git a/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js b/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js
index faf60dd0034..b9dd0867a4c 100644
--- a/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js
+++ b/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js
@@ -300,12 +300,7 @@ qx.Class.define("osparc.workbench.ServiceCatalog", {
__showServiceDetails: async function() {
const serviceMetadata = await this.__getSelectedService();
const serviceDetails = new osparc.info.ServiceLarge(serviceMetadata);
- const title = this.tr("Service information");
- const width = osparc.info.CardLarge.WIDTH;
- const height = osparc.info.CardLarge.HEIGHT;
- osparc.ui.window.Window.popUpInWindow(serviceDetails, title, width, height).set({
- maxHeight: height,
- });
+ osparc.info.ServiceLarge.popUpInWindow(serviceDetails);
},
__onCancel: function() {
diff --git a/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js b/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js
index 21c55e487d1..504faf3c33f 100644
--- a/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js
+++ b/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js
@@ -1665,17 +1665,13 @@ qx.Class.define("osparc.workbench.WorkbenchUI", {
__openNodeInfo: function(nodeId) {
if (nodeId) {
const node = this.getStudy().getWorkbench().getNode(nodeId);
- const serviceDetails = new osparc.info.ServiceLarge(node.getMetaData(), {
+ const metadata = node.getMetaData();
+ const serviceDetails = new osparc.info.ServiceLarge(metadata, {
nodeId,
label: node.getLabel(),
studyId: this.getStudy().getUuid()
});
- const title = this.tr("Service information");
- const width = osparc.info.CardLarge.WIDTH;
- const height = osparc.info.CardLarge.HEIGHT;
- osparc.ui.window.Window.popUpInWindow(serviceDetails, title, width, height).set({
- maxHeight: height
- });
+ osparc.info.ServiceLarge.popUpInWindow(serviceDetails);
}
},
diff --git a/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json b/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json
index cacb9ffb83d..492544fa598 100644
--- a/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json
+++ b/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json
@@ -7,7 +7,7 @@
"steps": [{
"anchorEl": "osparc-test-id=dashboardTabs",
"title": "Dashboard Menu",
- "text": "The menu tabs give you quick access to a set of core elements of the platform, namely Projects, Tutorials, Services and Data.",
+ "text": "The menu tabs give you quick access to a set of core elements of the platform, namely Projects, Tutorials and Services.",
"placement": "bottom"
}, {
"beforeClick": {
@@ -28,7 +28,7 @@
"selector": "osparc-test-id=servicesTabBtn"
},
"anchorEl": "osparc-test-id=servicesTabBtn",
- "text": "Every Project in Sim4Life is composed of at lease one so-called Service.
Services are building blocks for Studies and can provide data/files, visualize results (2D, 3D), implement code in Jupyter notebooks or perform computations to execute simulations within a Project.",
+ "text": "Every Project in Sim4Life is composed of at lease one so-called Service.
Services are building blocks for Projects and can provide data/files, visualize results (2D, 3D), implement code in Jupyter notebooks or perform computations to execute simulations within a Project.",
"placement": "bottom"
}]
},
diff --git a/services/storage/requirements/_base.txt b/services/storage/requirements/_base.txt
index 2537e2b3b3b..3c341f221f9 100644
--- a/services/storage/requirements/_base.txt
+++ b/services/storage/requirements/_base.txt
@@ -93,7 +93,6 @@ async-timeout==4.0.3
# via
# aiopg
# asyncpg
- # redis
asyncpg==0.29.0
# via sqlalchemy
attrs==23.2.0
diff --git a/services/storage/requirements/_test.txt b/services/storage/requirements/_test.txt
index 2b21bae3380..6157f084c4d 100644
--- a/services/storage/requirements/_test.txt
+++ b/services/storage/requirements/_test.txt
@@ -17,10 +17,6 @@ annotated-types==0.7.0
# pydantic
antlr4-python3-runtime==4.13.2
# via moto
-async-timeout==4.0.3
- # via
- # -c requirements/_base.txt
- # redis
attrs==23.2.0
# via
# -c requirements/_base.txt
diff --git a/services/storage/src/simcore_service_storage/db_access_layer.py b/services/storage/src/simcore_service_storage/db_access_layer.py
index 19452862de5..b77504088f1 100644
--- a/services/storage/src/simcore_service_storage/db_access_layer.py
+++ b/services/storage/src/simcore_service_storage/db_access_layer.py
@@ -51,6 +51,7 @@
workspaces_access_rights,
)
from simcore_postgres_database.storage_models import file_meta_data, user_to_groups
+from simcore_postgres_database.utils_sql import assemble_array_groups
logger = logging.getLogger(__name__)
@@ -117,14 +118,6 @@ def _aggregate_access_rights(
return AccessRights.none()
-def assemble_array_groups(user_group_ids: list[GroupID]) -> str:
- return (
- "array[]::text[]"
- if len(user_group_ids) == 0
- else f"""array[{', '.join(f"'{group_id}'" for group_id in user_group_ids)}]"""
- )
-
-
access_rights_subquery = (
sa.select(
project_to_groups.c.project_uuid,
diff --git a/services/storage/src/simcore_service_storage/redis.py b/services/storage/src/simcore_service_storage/redis.py
index 2380bd332dc..f18f891ec19 100644
--- a/services/storage/src/simcore_service_storage/redis.py
+++ b/services/storage/src/simcore_service_storage/redis.py
@@ -5,6 +5,7 @@
from servicelib.redis import RedisClientSDK
from settings_library.redis import RedisDatabase, RedisSettings
+from ._meta import APP_NAME
from .constants import APP_CONFIG_KEY
from .settings import Settings
@@ -20,7 +21,9 @@ async def _setup(app: web.Application):
assert settings.STORAGE_REDIS # nosec
redis_settings: RedisSettings = settings.STORAGE_REDIS
redis_locks_dsn = redis_settings.build_redis_dsn(RedisDatabase.LOCKS)
- app[_APP_REDIS_KEY] = client = RedisClientSDK(redis_locks_dsn)
+ app[_APP_REDIS_KEY] = client = RedisClientSDK(
+ redis_locks_dsn, client_name=APP_NAME
+ )
await client.setup()
yield
diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py
index ca6483c75ae..1fc8719cfa7 100644
--- a/services/storage/tests/conftest.py
+++ b/services/storage/tests/conftest.py
@@ -168,19 +168,21 @@ def app_settings(
datcore_adapter_service_mock: aioresponses.aioresponses,
monkeypatch: pytest.MonkeyPatch,
) -> Settings:
+ s3_settings_dict = {}
if external_envfile_dict:
s3_settings = S3Settings.create_from_envs(**external_envfile_dict)
if s3_settings.S3_ENDPOINT is None:
monkeypatch.delenv("S3_ENDPOINT")
- setenvs_from_dict(
- monkeypatch,
- s3_settings.model_dump(exclude={"S3_ENDPOINT"}),
- )
+ s3_settings_dict = s3_settings.model_dump(exclude={"S3_ENDPOINT"})
else:
- setenvs_from_dict(
- monkeypatch,
- s3_settings.model_dump(),
- )
+ s3_settings_dict = s3_settings.model_dump()
+ setenvs_from_dict(
+ monkeypatch,
+ {
+ **s3_settings_dict,
+ "STORAGE_TRACING": "null",
+ },
+ )
test_app_settings = Settings.create_from_envs()
print(f"{test_app_settings.model_dump_json(indent=2)=}")
return test_app_settings
diff --git a/services/web/server/VERSION b/services/web/server/VERSION
index a8ab6c9666a..bcce5d06b8a 100644
--- a/services/web/server/VERSION
+++ b/services/web/server/VERSION
@@ -1 +1 @@
-0.44.0
+0.45.0
diff --git a/services/web/server/requirements/_base.txt b/services/web/server/requirements/_base.txt
index 5b1c3de7397..bacb3f9dced 100644
--- a/services/web/server/requirements/_base.txt
+++ b/services/web/server/requirements/_base.txt
@@ -105,7 +105,6 @@ async-timeout==4.0.3
# via
# aiohttp
# aiopg
- # redis
asyncpg==0.27.0
# via
# -r requirements/_base.in
diff --git a/services/web/server/requirements/_test.txt b/services/web/server/requirements/_test.txt
index 6dfe257d7a8..54ecb02f5e4 100644
--- a/services/web/server/requirements/_test.txt
+++ b/services/web/server/requirements/_test.txt
@@ -18,7 +18,6 @@ async-timeout==4.0.3
# via
# -c requirements/_base.txt
# aiohttp
- # redis
asyncpg==0.27.0
# via
# -c requirements/_base.txt
diff --git a/services/web/server/setup.cfg b/services/web/server/setup.cfg
index ab412830c97..2b54478220b 100644
--- a/services/web/server/setup.cfg
+++ b/services/web/server/setup.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 0.44.0
+current_version = 0.45.0
commit = True
message = services/webserver api version: {current_version} → {new_version}
tag = False
diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml
index a49c71acf17..860d9869218 100644
--- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml
+++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml
@@ -2,7 +2,7 @@ openapi: 3.0.2
info:
title: simcore-service-webserver
description: Main service with an interface (http-API & websockets) to the web front-end
- version: 0.44.0
+ version: 0.45.0
servers:
- url: ''
description: webserver
@@ -2626,6 +2626,27 @@ paths:
example: '{"field": "name", "direction": "desc"}'
name: order_by
in: query
+ - description: "{\n \"title\": \"FolderFilters\",\n \"description\": \"Encoded\
+ \ as JSON. Each available filter can have its own logic (should be well\
+ \ documented)\\nInspired by Docker API https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerList.\"\
+ ,\n \"type\": \"object\",\n \"properties\": {\n \"trashed\": {\n \"title\"\
+ : \"Trashed\",\n \"description\": \"Set to true to list trashed, false\
+ \ to list non-trashed (default), None to list all\",\n \"default\": false,\n\
+ \ \"type\": \"boolean\"\n }\n }\n}"
+ required: false
+ schema:
+ title: Filters
+ type: string
+ description: "{\n \"title\": \"FolderFilters\",\n \"description\": \"Encoded\
+ \ as JSON. Each available filter can have its own logic (should be well\
+ \ documented)\\nInspired by Docker API https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerList.\"\
+ ,\n \"type\": \"object\",\n \"properties\": {\n \"trashed\": {\n \"\
+ title\": \"Trashed\",\n \"description\": \"Set to true to list trashed,\
+ \ false to list non-trashed (default), None to list all\",\n \"default\"\
+ : false,\n \"type\": \"boolean\"\n }\n }\n}"
+ format: json-string
+ name: filters
+ in: query
- required: false
schema:
title: Limit
@@ -2669,6 +2690,76 @@ paths:
application/json:
schema:
$ref: '#/components/schemas/Envelope_FolderGet_'
+ /v0/folders:search:
+ get:
+ tags:
+ - folders
+ summary: List Folders Full Search
+ operationId: list_folders_full_search
+ parameters:
+ - required: false
+ schema:
+ title: Text
+ type: string
+ name: text
+ in: query
+ - description: Order by field (modified_at|name|description) and direction (asc|desc).
+ The default sorting order is ascending.
+ required: false
+ schema:
+ title: Order By
+ description: Order by field (modified_at|name|description) and direction
+ (asc|desc). The default sorting order is ascending.
+ default: '{"field": "modified_at", "direction": "desc"}'
+ example: '{"field": "name", "direction": "desc"}'
+ name: order_by
+ in: query
+ - description: "{\n \"title\": \"FolderFilters\",\n \"description\": \"Encoded\
+ \ as JSON. Each available filter can have its own logic (should be well\
+ \ documented)\\nInspired by Docker API https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerList.\"\
+ ,\n \"type\": \"object\",\n \"properties\": {\n \"trashed\": {\n \"title\"\
+ : \"Trashed\",\n \"description\": \"Set to true to list trashed, false\
+ \ to list non-trashed (default), None to list all\",\n \"default\": false,\n\
+ \ \"type\": \"boolean\"\n }\n }\n}"
+ required: false
+ schema:
+ title: Filters
+ type: string
+ description: "{\n \"title\": \"FolderFilters\",\n \"description\": \"Encoded\
+ \ as JSON. Each available filter can have its own logic (should be well\
+ \ documented)\\nInspired by Docker API https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerList.\"\
+ ,\n \"type\": \"object\",\n \"properties\": {\n \"trashed\": {\n \"\
+ title\": \"Trashed\",\n \"description\": \"Set to true to list trashed,\
+ \ false to list non-trashed (default), None to list all\",\n \"default\"\
+ : false,\n \"type\": \"boolean\"\n }\n }\n}"
+ format: json-string
+ name: filters
+ in: query
+ - required: false
+ schema:
+ title: Limit
+ exclusiveMaximum: true
+ minimum: 1
+ type: integer
+ default: 20
+ maximum: 50
+ name: limit
+ in: query
+ - required: false
+ schema:
+ title: Offset
+ minimum: 0
+ type: integer
+ default: 0
+ name: offset
+ in: query
+ responses:
+ '200':
+ description: Successful Response
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Envelope_list_models_library.api_schemas_webserver.folders_v2.FolderGet__'
/v0/folders/{folder_id}:
get:
tags:
@@ -3056,10 +3147,24 @@ paths:
example: '{"field": "last_change_date", "direction": "desc"}'
name: order_by
in: query
- - required: false
+ - description: "{\n \"title\": \"ProjectFilters\",\n \"description\": \"Encoded\
+ \ as JSON. Each available filter can have its own logic (should be well\
+ \ documented)\\nInspired by Docker API https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerList.\"\
+ ,\n \"type\": \"object\",\n \"properties\": {\n \"trashed\": {\n \"title\"\
+ : \"Trashed\",\n \"description\": \"Set to true to list trashed, false\
+ \ to list non-trashed (default), None to list all\",\n \"default\": false,\n\
+ \ \"type\": \"boolean\"\n }\n }\n}"
+ required: false
schema:
title: Filters
type: string
+ description: "{\n \"title\": \"ProjectFilters\",\n \"description\": \"Encoded\
+ \ as JSON. Each available filter can have its own logic (should be well\
+ \ documented)\\nInspired by Docker API https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerList.\"\
+ ,\n \"type\": \"object\",\n \"properties\": {\n \"trashed\": {\n \"\
+ title\": \"Trashed\",\n \"description\": \"Set to true to list trashed,\
+ \ false to list non-trashed (default), None to list all\",\n \"default\"\
+ : false,\n \"type\": \"boolean\"\n }\n }\n}"
format: json-string
name: filters
in: query
@@ -4328,7 +4433,7 @@ paths:
'403':
description: ProjectInvalidRightsError
'404':
- description: ProjectNotFoundError, UserDefaultWalletNotFoundError
+ description: UserDefaultWalletNotFoundError, ProjectNotFoundError
'409':
description: ProjectTooManyProjectOpenedError
'422':
@@ -5410,6 +5515,57 @@ paths:
responses:
'204':
description: Successful Response
+ /v0/folders/{folder_id}:trash:
+ post:
+ tags:
+ - trash
+ - folders
+ summary: Trash Folder
+ operationId: trash_folder
+ parameters:
+ - required: true
+ schema:
+ title: Folder Id
+ exclusiveMinimum: true
+ type: integer
+ minimum: 0
+ name: folder_id
+ in: path
+ - required: false
+ schema:
+ title: Force
+ type: boolean
+ default: false
+ name: force
+ in: query
+ responses:
+ '204':
+ description: Successful Response
+ '404':
+ description: Not such a folder
+ '409':
+ description: One or more projects is in use and cannot be trashed
+ '503':
+ description: Trash service error
+ /v0/folders/{folder_id}:untrash:
+ post:
+ tags:
+ - trash
+ - folders
+ summary: Untrash Folder
+ operationId: untrash_folder
+ parameters:
+ - required: true
+ schema:
+ title: Folder Id
+ exclusiveMinimum: true
+ type: integer
+ minimum: 0
+ name: folder_id
+ in: path
+ responses:
+ '204':
+ description: Successful Response
/v0/repos/projects:
get:
tags:
@@ -8427,6 +8583,10 @@ components:
title: Modifiedat
type: string
format: date-time
+ trashedAt:
+ title: Trashedat
+ type: string
+ format: date-time
owner:
title: Owner
exclusiveMinimum: true
@@ -12583,28 +12743,25 @@ components:
type: string
- type: string
default: UNDEFINED
- id:
- title: Id
- type: string
- read:
- title: Read
- type: boolean
resource_id:
- title: Resource ID
+ title: Resource Id
anyOf:
- enum:
- - ""
+ - ''
type: string
- type: string
- default: ""
+ default: ''
user_from_id:
- title: User ID of the one creating it
- anyOf:
- - enum:
- - None
- type: integer
- - type: integer
- default: None
+ title: User From Id
+ exclusiveMinimum: true
+ type: integer
+ minimum: 0
+ id:
+ title: Id
+ type: string
+ read:
+ title: Read
+ type: boolean
UserNotificationCreate:
title: UserNotificationCreate
required:
@@ -12645,21 +12802,18 @@ components:
- type: string
default: UNDEFINED
resource_id:
- title: Resource ID
+ title: Resource Id
anyOf:
- enum:
- - ""
+ - ''
type: string
- type: string
- default: ""
+ default: ''
user_from_id:
- title: User ID of the one creating it
- anyOf:
- - enum:
- - None
- type: integer
- - type: integer
- default: None
+ title: User From Id
+ exclusiveMinimum: true
+ type: integer
+ minimum: 0
UserNotificationPatch:
title: UserNotificationPatch
required:
diff --git a/services/web/server/src/simcore_service_webserver/errors.py b/services/web/server/src/simcore_service_webserver/errors.py
index ac21f882297..2ed7dc634a9 100644
--- a/services/web/server/src/simcore_service_webserver/errors.py
+++ b/services/web/server/src/simcore_service_webserver/errors.py
@@ -2,4 +2,4 @@
class WebServerBaseError(OsparcErrorMixin, Exception):
- """WebServer base error."""
+ msg_template = "Error in web-server service"
diff --git a/services/web/server/src/simcore_service_webserver/exceptions_handlers.py b/services/web/server/src/simcore_service_webserver/exceptions_handlers.py
new file mode 100644
index 00000000000..7e1ae0bd3e0
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/exceptions_handlers.py
@@ -0,0 +1,90 @@
+import functools
+import logging
+from collections.abc import Iterable
+from typing import NamedTuple, TypeAlias
+
+from aiohttp import web
+from servicelib.aiohttp.typing_extension import Handler
+from servicelib.aiohttp.web_exceptions_extension import get_http_error_class_or_none
+from servicelib.logging_errors import create_troubleshotting_log_kwargs
+from servicelib.status_codes_utils import is_5xx_server_error
+
+_logger = logging.getLogger(__name__)
+
+
+class HttpErrorInfo(NamedTuple):
+ status_code: int
+ msg_template: str
+
+
+ExceptionToHttpErrorMap: TypeAlias = dict[type[BaseException], HttpErrorInfo]
+
+
+class _DefaultDict(dict):
+ def __missing__(self, key):
+ return f"'{key}=?'"
+
+
+def _sort_exceptions_by_specificity(
+ exceptions: Iterable[type[BaseException]], *, concrete_first: bool = True
+) -> list[type[BaseException]]:
+ return sorted(
+ exceptions,
+ key=lambda exc: sum(issubclass(e, exc) for e in exceptions if e is not exc),
+ reverse=not concrete_first,
+ )
+
+
+def create_exception_handlers_decorator(
+ exceptions_catch: type[BaseException] | tuple[type[BaseException], ...],
+ exc_to_status_map: ExceptionToHttpErrorMap,
+):
+ mapped_classes: tuple[type[BaseException], ...] = tuple(
+ _sort_exceptions_by_specificity(exc_to_status_map.keys())
+ )
+
+ assert all( # nosec
+ issubclass(cls, exceptions_catch) for cls in mapped_classes
+ ), f"Every {mapped_classes=} must inherit by one or more of {exceptions_catch=}"
+
+ def _decorator(handler: Handler):
+ @functools.wraps(handler)
+ async def _wrapper(request: web.Request) -> web.StreamResponse:
+ try:
+ return await handler(request)
+
+ except exceptions_catch as exc:
+ if exc_cls := next(
+ (cls for cls in mapped_classes if isinstance(exc, cls)), None
+ ):
+ http_error_info = exc_to_status_map[exc_cls]
+
+ # safe formatting, i.e. does not raise
+ user_msg = http_error_info.msg_template.format_map(
+ _DefaultDict(getattr(exc, "__dict__", {}))
+ )
+
+ http_error_cls = get_http_error_class_or_none(
+ http_error_info.status_code
+ )
+ assert http_error_cls # nosec
+
+ if is_5xx_server_error(http_error_info.status_code):
+ _logger.exception(
+ **create_troubleshotting_log_kwargs(
+ user_msg,
+ error=exc,
+ error_context={
+ "request": request,
+ "request.remote": f"{request.remote}",
+ "request.method": f"{request.method}",
+ "request.path": f"{request.path}",
+ },
+ )
+ )
+ raise http_error_cls(reason=user_msg) from exc
+ raise # reraise
+
+ return _wrapper
+
+ return _decorator
diff --git a/services/web/server/src/simcore_service_webserver/folders/_exceptions_handlers.py b/services/web/server/src/simcore_service_webserver/folders/_exceptions_handlers.py
new file mode 100644
index 00000000000..4f83b5e1872
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/folders/_exceptions_handlers.py
@@ -0,0 +1,74 @@
+import logging
+
+from servicelib.aiohttp import status
+
+from ..exceptions_handlers import (
+ ExceptionToHttpErrorMap,
+ HttpErrorInfo,
+ create_exception_handlers_decorator,
+)
+from ..projects.exceptions import (
+ BaseProjectError,
+ ProjectRunningConflictError,
+ ProjectStoppingError,
+)
+from ..workspaces.errors import (
+ WorkspaceAccessForbiddenError,
+ WorkspaceFolderInconsistencyError,
+ WorkspaceNotFoundError,
+ WorkspacesValueError,
+)
+from .errors import (
+ FolderAccessForbiddenError,
+ FolderNotFoundError,
+ FoldersValueError,
+ FolderValueNotPermittedError,
+)
+
+_logger = logging.getLogger(__name__)
+
+
+_TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = {
+ FolderNotFoundError: HttpErrorInfo(
+ status.HTTP_404_NOT_FOUND,
+ "Folder was not found",
+ ),
+ WorkspaceNotFoundError: HttpErrorInfo(
+ status.HTTP_404_NOT_FOUND,
+ "Workspace was not found",
+ ),
+ FolderAccessForbiddenError: HttpErrorInfo(
+ status.HTTP_403_FORBIDDEN,
+ "Does not have access to this folder",
+ ),
+ WorkspaceAccessForbiddenError: HttpErrorInfo(
+ status.HTTP_403_FORBIDDEN,
+ "Does not have access to this workspace",
+ ),
+ WorkspaceFolderInconsistencyError: HttpErrorInfo(
+ status.HTTP_403_FORBIDDEN,
+ "This folder does not exist in this workspace",
+ ),
+ FolderValueNotPermittedError: HttpErrorInfo(
+ status.HTTP_409_CONFLICT,
+ "Provided folder value is not permitted: {reason}",
+ ),
+ FoldersValueError: HttpErrorInfo(
+ status.HTTP_409_CONFLICT,
+ "Invalid folder value set: {reason}",
+ ),
+ ProjectRunningConflictError: HttpErrorInfo(
+ status.HTTP_409_CONFLICT,
+ "One or more studies in this folder are in use and cannot be trashed. Please stop all services first and try again",
+ ),
+ ProjectStoppingError: HttpErrorInfo(
+ status.HTTP_503_SERVICE_UNAVAILABLE,
+ "Something went wrong while stopping services before trashing. Aborting trash.",
+ ),
+}
+
+
+handle_plugin_requests_exceptions = create_exception_handlers_decorator(
+ exceptions_catch=(BaseProjectError, FoldersValueError, WorkspacesValueError),
+ exc_to_status_map=_TO_HTTP_ERROR_MAP,
+)
diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_api.py b/services/web/server/src/simcore_service_webserver/folders/_folders_api.py
index a6de1ce842d..2ef9818f431 100644
--- a/services/web/server/src/simcore_service_webserver/folders/_folders_api.py
+++ b/services/web/server/src/simcore_service_webserver/folders/_folders_api.py
@@ -1,335 +1,373 @@
-# pylint: disable=unused-argument
-
-import logging
-
-from aiohttp import web
-from models_library.access_rights import AccessRights
-from models_library.api_schemas_webserver.folders_v2 import FolderGet, FolderGetPage
-from models_library.folders import FolderID
-from models_library.products import ProductName
-from models_library.projects import ProjectID
-from models_library.rest_ordering import OrderBy
-from models_library.users import UserID
-from models_library.workspaces import WorkspaceID
-from pydantic import NonNegativeInt
-from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY
-from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE
-from servicelib.utils import fire_and_forget_task
-
-from ..folders.errors import FolderValueNotPermittedError
-from ..projects.projects_api import submit_delete_project_task
-from ..users.api import get_user
-from ..workspaces._workspaces_api import check_user_workspace_access
-from ..workspaces.errors import (
- WorkspaceAccessForbiddenError,
- WorkspaceFolderInconsistencyError,
-)
-from . import _folders_db as folders_db
-
-_logger = logging.getLogger(__name__)
-
-
-async def create_folder(
- app: web.Application,
- user_id: UserID,
- name: str,
- parent_folder_id: FolderID | None,
- product_name: ProductName,
- workspace_id: WorkspaceID | None,
-) -> FolderGet:
- user = await get_user(app, user_id=user_id)
-
- workspace_is_private = True
- user_folder_access_rights = AccessRights(read=True, write=True, delete=True)
- if workspace_id:
- user_workspace_access_rights = await check_user_workspace_access(
- app,
- user_id=user_id,
- workspace_id=workspace_id,
- product_name=product_name,
- permission="write",
- )
- workspace_is_private = False
- user_folder_access_rights = user_workspace_access_rights.my_access_rights
-
- # Check parent_folder_id lives in the workspace
- if parent_folder_id:
- parent_folder_db = await folders_db.get(
- app, folder_id=parent_folder_id, product_name=product_name
- )
- if parent_folder_db.workspace_id != workspace_id:
- raise WorkspaceFolderInconsistencyError(
- folder_id=parent_folder_id, workspace_id=workspace_id
- )
-
- if parent_folder_id:
- # Check user has access to the parent folder
- parent_folder_db = await folders_db.get_for_user_or_workspace(
- app,
- folder_id=parent_folder_id,
- product_name=product_name,
- user_id=user_id if workspace_is_private else None,
- workspace_id=workspace_id,
- )
- if workspace_id and parent_folder_db.workspace_id != workspace_id:
- # Check parent folder id exists inside the same workspace
- raise WorkspaceAccessForbiddenError(
- reason=f"Folder {parent_folder_id} does not exists in workspace {workspace_id}."
- )
-
- folder_db = await folders_db.create(
- app,
- product_name=product_name,
- created_by_gid=user["primary_gid"],
- folder_name=name,
- parent_folder_id=parent_folder_id,
- user_id=user_id if workspace_is_private else None,
- workspace_id=workspace_id,
- )
- return FolderGet(
- folder_id=folder_db.folder_id,
- parent_folder_id=folder_db.parent_folder_id,
- name=folder_db.name,
- created_at=folder_db.created,
- modified_at=folder_db.modified,
- owner=folder_db.created_by_gid,
- workspace_id=workspace_id,
- my_access_rights=user_folder_access_rights,
- )
-
-
-async def get_folder(
- app: web.Application,
- user_id: UserID,
- folder_id: FolderID,
- product_name: ProductName,
-) -> FolderGet:
- folder_db = await folders_db.get(
- app, folder_id=folder_id, product_name=product_name
- )
-
- workspace_is_private = True
- user_folder_access_rights = AccessRights(read=True, write=True, delete=True)
- if folder_db.workspace_id:
- user_workspace_access_rights = await check_user_workspace_access(
- app,
- user_id=user_id,
- workspace_id=folder_db.workspace_id,
- product_name=product_name,
- permission="read",
- )
- workspace_is_private = False
- user_folder_access_rights = user_workspace_access_rights.my_access_rights
-
- folder_db = await folders_db.get_for_user_or_workspace(
- app,
- folder_id=folder_id,
- product_name=product_name,
- user_id=user_id if workspace_is_private else None,
- workspace_id=folder_db.workspace_id,
- )
- return FolderGet(
- folder_id=folder_db.folder_id,
- parent_folder_id=folder_db.parent_folder_id,
- name=folder_db.name,
- created_at=folder_db.created,
- modified_at=folder_db.modified,
- owner=folder_db.created_by_gid,
- workspace_id=folder_db.workspace_id,
- my_access_rights=user_folder_access_rights,
- )
-
-
-async def list_folders(
- app: web.Application,
- user_id: UserID,
- product_name: ProductName,
- folder_id: FolderID | None,
- workspace_id: WorkspaceID | None,
- offset: NonNegativeInt,
- limit: int,
- order_by: OrderBy,
-) -> FolderGetPage:
- workspace_is_private = True
- user_folder_access_rights = AccessRights(read=True, write=True, delete=True)
-
- if workspace_id:
- user_workspace_access_rights = await check_user_workspace_access(
- app,
- user_id=user_id,
- workspace_id=workspace_id,
- product_name=product_name,
- permission="read",
- )
- workspace_is_private = False
- user_folder_access_rights = user_workspace_access_rights.my_access_rights
-
- if folder_id:
- # Check user access to folder
- await folders_db.get_for_user_or_workspace(
- app,
- folder_id=folder_id,
- product_name=product_name,
- user_id=user_id if workspace_is_private else None,
- workspace_id=workspace_id,
- )
-
- total_count, folders = await folders_db.list_(
- app,
- content_of_folder_id=folder_id,
- user_id=user_id if workspace_is_private else None,
- workspace_id=workspace_id,
- product_name=product_name,
- offset=offset,
- limit=limit,
- order_by=order_by,
- )
- return FolderGetPage(
- items=[
- FolderGet(
- folder_id=folder.folder_id,
- parent_folder_id=folder.parent_folder_id,
- name=folder.name,
- created_at=folder.created,
- modified_at=folder.modified,
- owner=folder.created_by_gid,
- workspace_id=folder.workspace_id,
- my_access_rights=user_folder_access_rights,
- )
- for folder in folders
- ],
- total=total_count,
- )
-
-
-async def update_folder(
- app: web.Application,
- user_id: UserID,
- folder_id: FolderID,
- *,
- name: str,
- parent_folder_id: FolderID | None,
- product_name: ProductName,
-) -> FolderGet:
- folder_db = await folders_db.get(
- app, folder_id=folder_id, product_name=product_name
- )
-
- workspace_is_private = True
- user_folder_access_rights = AccessRights(read=True, write=True, delete=True)
- if folder_db.workspace_id:
- user_workspace_access_rights = await check_user_workspace_access(
- app,
- user_id=user_id,
- workspace_id=folder_db.workspace_id,
- product_name=product_name,
- permission="write",
- )
- workspace_is_private = False
- user_folder_access_rights = user_workspace_access_rights.my_access_rights
-
- # Check user has access to the folder
- await folders_db.get_for_user_or_workspace(
- app,
- folder_id=folder_id,
- product_name=product_name,
- user_id=user_id if workspace_is_private else None,
- workspace_id=folder_db.workspace_id,
- )
-
- if folder_db.parent_folder_id != parent_folder_id and parent_folder_id is not None:
- # Check user has access to the parent folder
- await folders_db.get_for_user_or_workspace(
- app,
- folder_id=parent_folder_id,
- product_name=product_name,
- user_id=user_id if workspace_is_private else None,
- workspace_id=folder_db.workspace_id,
- )
- # Do not allow to move to a child folder id
- _child_folders = await folders_db.get_folders_recursively(
- app, folder_id=folder_id, product_name=product_name
- )
- if parent_folder_id in _child_folders:
- raise FolderValueNotPermittedError(
- reason="Parent folder id should not be one of children"
- )
-
- folder_db = await folders_db.update(
- app,
- folder_id=folder_id,
- name=name,
- parent_folder_id=parent_folder_id,
- product_name=product_name,
- )
- return FolderGet(
- folder_id=folder_db.folder_id,
- parent_folder_id=folder_db.parent_folder_id,
- name=folder_db.name,
- created_at=folder_db.created,
- modified_at=folder_db.modified,
- owner=folder_db.created_by_gid,
- workspace_id=folder_db.workspace_id,
- my_access_rights=user_folder_access_rights,
- )
-
-
-async def delete_folder(
- app: web.Application,
- user_id: UserID,
- folder_id: FolderID,
- product_name: ProductName,
-) -> None:
- folder_db = await folders_db.get(
- app, folder_id=folder_id, product_name=product_name
- )
-
- workspace_is_private = True
- if folder_db.workspace_id:
- await check_user_workspace_access(
- app,
- user_id=user_id,
- workspace_id=folder_db.workspace_id,
- product_name=product_name,
- permission="delete",
- )
- workspace_is_private = False
-
- # Check user has access to the folder
- await folders_db.get_for_user_or_workspace(
- app,
- folder_id=folder_id,
- product_name=product_name,
- user_id=user_id if workspace_is_private else None,
- workspace_id=folder_db.workspace_id,
- )
-
- # 1. Delete folder content
- # 1.1 Delete all child projects that I am an owner
- project_id_list: list[
- ProjectID
- ] = await folders_db.get_projects_recursively_only_if_user_is_owner(
- app,
- folder_id=folder_id,
- private_workspace_user_id_or_none=user_id if workspace_is_private else None,
- user_id=user_id,
- product_name=product_name,
- )
-
- # fire and forget task for project deletion
- for project_id in project_id_list:
- fire_and_forget_task(
- submit_delete_project_task(
- app,
- project_uuid=project_id,
- user_id=user_id,
- simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE,
- ),
- task_suffix_name=f"delete_project_task_{project_id}",
- fire_and_forget_tasks_collection=app[APP_FIRE_AND_FORGET_TASKS_KEY],
- )
-
- # 1.2 Delete all child folders
- await folders_db.delete_recursively(
- app, folder_id=folder_id, product_name=product_name
- )
+# pylint: disable=unused-argument
+
+import logging
+
+from aiohttp import web
+from models_library.access_rights import AccessRights
+from models_library.api_schemas_webserver.folders_v2 import FolderGet, FolderGetPage
+from models_library.folders import FolderID, FolderQuery, FolderScope
+from models_library.products import ProductName
+from models_library.projects import ProjectID
+from models_library.rest_ordering import OrderBy
+from models_library.users import UserID
+from models_library.workspaces import WorkspaceID, WorkspaceQuery, WorkspaceScope
+from pydantic import NonNegativeInt
+from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY
+from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE
+from servicelib.utils import fire_and_forget_task
+
+from ..folders.errors import FolderValueNotPermittedError
+from ..projects.projects_api import submit_delete_project_task
+from ..users.api import get_user
+from ..workspaces.api import check_user_workspace_access
+from ..workspaces.errors import (
+ WorkspaceAccessForbiddenError,
+ WorkspaceFolderInconsistencyError,
+)
+from . import _folders_db as folders_db
+
+_logger = logging.getLogger(__name__)
+
+
+async def create_folder(
+ app: web.Application,
+ user_id: UserID,
+ name: str,
+ parent_folder_id: FolderID | None,
+ product_name: ProductName,
+ workspace_id: WorkspaceID | None,
+) -> FolderGet:
+ user = await get_user(app, user_id=user_id)
+
+ workspace_is_private = True
+ user_folder_access_rights = AccessRights(read=True, write=True, delete=True)
+ if workspace_id:
+ user_workspace_access_rights = await check_user_workspace_access(
+ app,
+ user_id=user_id,
+ workspace_id=workspace_id,
+ product_name=product_name,
+ permission="write",
+ )
+ workspace_is_private = False
+ user_folder_access_rights = user_workspace_access_rights.my_access_rights
+
+ # Check parent_folder_id lives in the workspace
+ if parent_folder_id:
+ parent_folder_db = await folders_db.get(
+ app, folder_id=parent_folder_id, product_name=product_name
+ )
+ if parent_folder_db.workspace_id != workspace_id:
+ raise WorkspaceFolderInconsistencyError(
+ folder_id=parent_folder_id, workspace_id=workspace_id
+ )
+
+ if parent_folder_id:
+ # Check user has access to the parent folder
+ parent_folder_db = await folders_db.get_for_user_or_workspace(
+ app,
+ folder_id=parent_folder_id,
+ product_name=product_name,
+ user_id=user_id if workspace_is_private else None,
+ workspace_id=workspace_id,
+ )
+ if workspace_id and parent_folder_db.workspace_id != workspace_id:
+ # Check parent folder id exists inside the same workspace
+ raise WorkspaceAccessForbiddenError(
+ reason=f"Folder {parent_folder_id} does not exists in workspace {workspace_id}."
+ )
+
+ folder_db = await folders_db.create(
+ app,
+ product_name=product_name,
+ created_by_gid=user["primary_gid"],
+ folder_name=name,
+ parent_folder_id=parent_folder_id,
+ user_id=user_id if workspace_is_private else None,
+ workspace_id=workspace_id,
+ )
+ return FolderGet(
+ folder_id=folder_db.folder_id,
+ parent_folder_id=folder_db.parent_folder_id,
+ name=folder_db.name,
+ created_at=folder_db.created,
+ modified_at=folder_db.modified,
+ trashed_at=folder_db.trashed_at,
+ owner=folder_db.created_by_gid,
+ workspace_id=workspace_id,
+ my_access_rights=user_folder_access_rights,
+ )
+
+
+async def get_folder(
+ app: web.Application,
+ user_id: UserID,
+ folder_id: FolderID,
+ product_name: ProductName,
+) -> FolderGet:
+ folder_db = await folders_db.get(
+ app, folder_id=folder_id, product_name=product_name
+ )
+
+ workspace_is_private = True
+ user_folder_access_rights = AccessRights(read=True, write=True, delete=True)
+ if folder_db.workspace_id:
+ user_workspace_access_rights = await check_user_workspace_access(
+ app,
+ user_id=user_id,
+ workspace_id=folder_db.workspace_id,
+ product_name=product_name,
+ permission="read",
+ )
+ workspace_is_private = False
+ user_folder_access_rights = user_workspace_access_rights.my_access_rights
+
+ folder_db = await folders_db.get_for_user_or_workspace(
+ app,
+ folder_id=folder_id,
+ product_name=product_name,
+ user_id=user_id if workspace_is_private else None,
+ workspace_id=folder_db.workspace_id,
+ )
+ return FolderGet(
+ folder_id=folder_db.folder_id,
+ parent_folder_id=folder_db.parent_folder_id,
+ name=folder_db.name,
+ created_at=folder_db.created,
+ modified_at=folder_db.modified,
+ trashed_at=folder_db.trashed_at,
+ owner=folder_db.created_by_gid,
+ workspace_id=folder_db.workspace_id,
+ my_access_rights=user_folder_access_rights,
+ )
+
+
+async def list_folders(
+ app: web.Application,
+ user_id: UserID,
+ product_name: ProductName,
+ folder_id: FolderID | None,
+ workspace_id: WorkspaceID | None,
+ trashed: bool | None,
+ offset: NonNegativeInt,
+ limit: int,
+ order_by: OrderBy,
+) -> FolderGetPage:
+ # NOTE: Folder access rights for listing are checked within the listing DB function.
+
+ total_count, folders = await folders_db.list_(
+ app,
+ product_name=product_name,
+ user_id=user_id,
+ folder_query=(
+ FolderQuery(folder_scope=FolderScope.SPECIFIC, folder_id=folder_id)
+ if folder_id
+ else FolderQuery(folder_scope=FolderScope.ROOT)
+ ),
+ workspace_query=(
+ WorkspaceQuery(
+ workspace_scope=WorkspaceScope.SHARED, workspace_id=workspace_id
+ )
+ if workspace_id
+ else WorkspaceQuery(workspace_scope=WorkspaceScope.PRIVATE)
+ ),
+ filter_trashed=trashed,
+ filter_by_text=None,
+ offset=offset,
+ limit=limit,
+ order_by=order_by,
+ )
+ return FolderGetPage(
+ items=[
+ FolderGet(
+ folder_id=folder.folder_id,
+ parent_folder_id=folder.parent_folder_id,
+ name=folder.name,
+ created_at=folder.created,
+ modified_at=folder.modified,
+ trashed_at=folder.trashed_at,
+ owner=folder.created_by_gid,
+ workspace_id=folder.workspace_id,
+ my_access_rights=folder.my_access_rights,
+ )
+ for folder in folders
+ ],
+ total=total_count,
+ )
+
+
+async def list_folders_full_search(
+ app: web.Application,
+ user_id: UserID,
+ product_name: ProductName,
+ text: str | None,
+ trashed: bool | None,
+ offset: NonNegativeInt,
+ limit: int,
+ order_by: OrderBy,
+) -> FolderGetPage:
+ # NOTE: Folder access rights for listing are checked within the listing DB function.
+
+ total_count, folders = await folders_db.list_(
+ app,
+ product_name=product_name,
+ user_id=user_id,
+ folder_query=FolderQuery(folder_scope=FolderScope.ALL),
+ workspace_query=WorkspaceQuery(workspace_scope=WorkspaceScope.ALL),
+ filter_trashed=trashed,
+ filter_by_text=text,
+ offset=offset,
+ limit=limit,
+ order_by=order_by,
+ )
+ return FolderGetPage(
+ items=[
+ FolderGet(
+ folder_id=folder.folder_id,
+ parent_folder_id=folder.parent_folder_id,
+ name=folder.name,
+ created_at=folder.created,
+ modified_at=folder.modified,
+ trashed_at=folder.trashed_at,
+ owner=folder.created_by_gid,
+ workspace_id=folder.workspace_id,
+ my_access_rights=folder.my_access_rights,
+ )
+ for folder in folders
+ ],
+ total=total_count,
+ )
+
+
+async def update_folder(
+ app: web.Application,
+ user_id: UserID,
+ folder_id: FolderID,
+ *,
+ name: str,
+ parent_folder_id: FolderID | None,
+ product_name: ProductName,
+) -> FolderGet:
+ folder_db = await folders_db.get(
+ app, folder_id=folder_id, product_name=product_name
+ )
+
+ workspace_is_private = True
+ user_folder_access_rights = AccessRights(read=True, write=True, delete=True)
+ if folder_db.workspace_id:
+ user_workspace_access_rights = await check_user_workspace_access(
+ app,
+ user_id=user_id,
+ workspace_id=folder_db.workspace_id,
+ product_name=product_name,
+ permission="write",
+ )
+ workspace_is_private = False
+ user_folder_access_rights = user_workspace_access_rights.my_access_rights
+
+ # Check user has access to the folder
+ await folders_db.get_for_user_or_workspace(
+ app,
+ folder_id=folder_id,
+ product_name=product_name,
+ user_id=user_id if workspace_is_private else None,
+ workspace_id=folder_db.workspace_id,
+ )
+
+ if folder_db.parent_folder_id != parent_folder_id and parent_folder_id is not None:
+ # Check user has access to the parent folder
+ await folders_db.get_for_user_or_workspace(
+ app,
+ folder_id=parent_folder_id,
+ product_name=product_name,
+ user_id=user_id if workspace_is_private else None,
+ workspace_id=folder_db.workspace_id,
+ )
+ # Do not allow to move to a child folder id
+ _child_folders = await folders_db.get_folders_recursively(
+ app, folder_id=folder_id, product_name=product_name
+ )
+ if parent_folder_id in _child_folders:
+ raise FolderValueNotPermittedError(
+ reason="Parent folder id should not be one of children"
+ )
+
+ folder_db = await folders_db.update(
+ app,
+ folders_id_or_ids=folder_id,
+ name=name,
+ parent_folder_id=parent_folder_id,
+ product_name=product_name,
+ )
+ return FolderGet(
+ folder_id=folder_db.folder_id,
+ parent_folder_id=folder_db.parent_folder_id,
+ name=folder_db.name,
+ created_at=folder_db.created,
+ modified_at=folder_db.modified,
+ trashed_at=folder_db.trashed_at,
+ owner=folder_db.created_by_gid,
+ workspace_id=folder_db.workspace_id,
+ my_access_rights=user_folder_access_rights,
+ )
+
+
+async def delete_folder(
+ app: web.Application,
+ user_id: UserID,
+ folder_id: FolderID,
+ product_name: ProductName,
+) -> None:
+ folder_db = await folders_db.get(
+ app, folder_id=folder_id, product_name=product_name
+ )
+
+ workspace_is_private = True
+ if folder_db.workspace_id:
+ await check_user_workspace_access(
+ app,
+ user_id=user_id,
+ workspace_id=folder_db.workspace_id,
+ product_name=product_name,
+ permission="delete",
+ )
+ workspace_is_private = False
+
+ # Check user has access to the folder
+ await folders_db.get_for_user_or_workspace(
+ app,
+ folder_id=folder_id,
+ product_name=product_name,
+ user_id=user_id if workspace_is_private else None,
+ workspace_id=folder_db.workspace_id,
+ )
+
+ # 1. Delete folder content
+ # 1.1 Delete all child projects that I am an owner
+ project_id_list: list[
+ ProjectID
+ ] = await folders_db.get_projects_recursively_only_if_user_is_owner(
+ app,
+ folder_id=folder_id,
+ private_workspace_user_id_or_none=user_id if workspace_is_private else None,
+ user_id=user_id,
+ product_name=product_name,
+ )
+
+ # fire and forget task for project deletion
+ for project_id in project_id_list:
+ fire_and_forget_task(
+ submit_delete_project_task(
+ app,
+ project_uuid=project_id,
+ user_id=user_id,
+ simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE,
+ ),
+ task_suffix_name=f"delete_project_task_{project_id}",
+ fire_and_forget_tasks_collection=app[APP_FIRE_AND_FORGET_TASKS_KEY],
+ )
+
+ # 1.2 Delete all child folders
+ await folders_db.delete_recursively(
+ app, folder_id=folder_id, product_name=product_name
+ )
diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py
index d2992ed30d5..7e3a54d0bb5 100644
--- a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py
+++ b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py
@@ -5,29 +5,56 @@
"""
import logging
-from typing import cast
+from datetime import datetime
+from typing import Any, Final, cast
+import sqlalchemy as sa
from aiohttp import web
-from models_library.folders import FolderDB, FolderID
+from models_library.folders import (
+ FolderDB,
+ FolderID,
+ FolderQuery,
+ FolderScope,
+ UserFolderAccessRightsDB,
+)
from models_library.products import ProductName
from models_library.projects import ProjectID
from models_library.rest_ordering import OrderBy, OrderDirection
from models_library.users import GroupID, UserID
-from models_library.workspaces import WorkspaceID
+from models_library.workspaces import WorkspaceID, WorkspaceQuery, WorkspaceScope
from pydantic import NonNegativeInt
from simcore_postgres_database.models.folders_v2 import folders_v2
from simcore_postgres_database.models.projects import projects
from simcore_postgres_database.models.projects_to_folders import projects_to_folders
+from simcore_postgres_database.utils_repos import (
+ pass_or_acquire_connection,
+ transaction_context,
+)
+from simcore_postgres_database.utils_workspaces_sql import (
+ create_my_workspace_access_rights_subquery,
+)
from sqlalchemy import func
+from sqlalchemy.ext.asyncio import AsyncConnection
from sqlalchemy.orm import aliased
-from sqlalchemy.sql import asc, desc, select
+from sqlalchemy.sql import ColumnElement, CompoundSelect, Select, asc, desc, select
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_asyncpg_engine
from .errors import FolderAccessForbiddenError, FolderNotFoundError
_logger = logging.getLogger(__name__)
+class UnSet:
+ ...
+
+
+_unset: Final = UnSet()
+
+
+def as_dict_exclude_unset(**params) -> dict[str, Any]:
+ return {k: v for k, v in params.items() if not isinstance(v, UnSet)}
+
+
_SELECTION_ARGS = (
folders_v2.c.folder_id,
folders_v2.c.name,
@@ -35,6 +62,7 @@
folders_v2.c.created_by_gid,
folders_v2.c.created,
folders_v2.c.modified,
+ folders_v2.c.trashed_at,
folders_v2.c.user_id,
folders_v2.c.workspace_id,
)
@@ -42,6 +70,7 @@
async def create(
app: web.Application,
+ connection: AsyncConnection | None = None,
*,
created_by_gid: GroupID,
folder_name: str,
@@ -54,8 +83,8 @@ async def create(
user_id is not None and workspace_id is not None
), "Both user_id and workspace_id cannot be provided at the same time. Please provide only one."
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(
+ async with transaction_context(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(
folders_v2.insert()
.values(
name=folder_name,
@@ -73,62 +102,157 @@ async def create(
return FolderDB.model_validate(row)
-async def list_(
+async def list_( # pylint: disable=too-many-arguments,too-many-branches
app: web.Application,
+ connection: AsyncConnection | None = None,
*,
- content_of_folder_id: FolderID | None,
- user_id: UserID | None,
- workspace_id: WorkspaceID | None,
product_name: ProductName,
+ user_id: UserID,
+ # hierarchy filters
+ folder_query: FolderQuery,
+ workspace_query: WorkspaceQuery,
+ # attribute filters
+ filter_trashed: bool | None,
+ filter_by_text: str | None,
+ # pagination
offset: NonNegativeInt,
limit: int,
+ # order
order_by: OrderBy,
-) -> tuple[int, list[FolderDB]]:
+) -> tuple[int, list[UserFolderAccessRightsDB]]:
"""
- content_of_folder_id - Used to filter in which folder we want to list folders. None means root folder.
+ folder_query - Used to filter in which folder we want to list folders.
+ trashed - If set to true, it returns folders **explicitly** trashed, if false then non-trashed folders.
"""
- assert not (
- user_id is not None and workspace_id is not None
- ), "Both user_id and workspace_id cannot be provided at the same time. Please provide only one."
- base_query = (
- select(*_SELECTION_ARGS)
- .select_from(folders_v2)
- .where(
- (folders_v2.c.product_name == product_name)
- & (folders_v2.c.parent_folder_id == content_of_folder_id)
- )
+ workspace_access_rights_subquery = create_my_workspace_access_rights_subquery(
+ user_id=user_id
)
- if user_id:
- base_query = base_query.where(folders_v2.c.user_id == user_id)
+ if workspace_query.workspace_scope is not WorkspaceScope.SHARED:
+ assert workspace_query.workspace_scope in ( # nosec
+ WorkspaceScope.PRIVATE,
+ WorkspaceScope.ALL,
+ )
+
+ private_workspace_query = (
+ select(
+ *_SELECTION_ARGS,
+ func.json_build_object(
+ "read",
+ sa.text("true"),
+ "write",
+ sa.text("true"),
+ "delete",
+ sa.text("true"),
+ ).label("my_access_rights"),
+ )
+ .select_from(folders_v2)
+ .where(
+ (folders_v2.c.product_name == product_name)
+ & (folders_v2.c.user_id == user_id)
+ )
+ )
else:
- assert workspace_id # nosec
- base_query = base_query.where(folders_v2.c.workspace_id == workspace_id)
+ private_workspace_query = None
+
+ if workspace_query.workspace_scope is not WorkspaceScope.PRIVATE:
+ assert workspace_query.workspace_scope in ( # nosec
+ WorkspaceScope.SHARED,
+ WorkspaceScope.ALL,
+ )
+
+ shared_workspace_query = (
+ select(
+ *_SELECTION_ARGS, workspace_access_rights_subquery.c.my_access_rights
+ )
+ .select_from(
+ folders_v2.join(
+ workspace_access_rights_subquery,
+ folders_v2.c.workspace_id
+ == workspace_access_rights_subquery.c.workspace_id,
+ )
+ )
+ .where(
+ (folders_v2.c.product_name == product_name)
+ & (folders_v2.c.user_id.is_(None))
+ )
+ )
+
+ if workspace_query.workspace_scope == WorkspaceScope.SHARED:
+ shared_workspace_query = shared_workspace_query.where(
+ folders_v2.c.workspace_id == workspace_query.workspace_id
+ )
+
+ else:
+ shared_workspace_query = None
+
+ attributes_filters: list[ColumnElement] = []
+
+ if filter_trashed is not None:
+ attributes_filters.append(
+ (
+ (folders_v2.c.trashed_at.is_not(None))
+ & (folders_v2.c.trashed_explicitly.is_(True))
+ )
+ if filter_trashed
+ else folders_v2.c.trashed_at.is_(None)
+ )
+ if folder_query.folder_scope is not FolderScope.ALL:
+ if folder_query.folder_scope == FolderScope.SPECIFIC:
+ attributes_filters.append(
+ folders_v2.c.parent_folder_id == folder_query.folder_id
+ )
+ else:
+ assert folder_query.folder_scope == FolderScope.ROOT # nosec
+ attributes_filters.append(folders_v2.c.parent_folder_id.is_(None))
+ if filter_by_text:
+ attributes_filters.append(folders_v2.c.name.ilike(f"%{filter_by_text}%"))
+
+ ###
+ # Combined
+ ###
+
+ combined_query: CompoundSelect | Select | None = None
+ if private_workspace_query is not None and shared_workspace_query is not None:
+ combined_query = sa.union_all(
+ private_workspace_query.where(sa.and_(*attributes_filters)),
+ shared_workspace_query.where(sa.and_(*attributes_filters)),
+ )
+ elif private_workspace_query is not None:
+ combined_query = private_workspace_query.where(sa.and_(*attributes_filters))
+ elif shared_workspace_query is not None:
+ combined_query = shared_workspace_query.where(sa.and_(*attributes_filters))
+
+ if combined_query is None:
+ msg = f"No valid queries were provided to combine. Workspace scope: {workspace_query.workspace_scope}"
+ raise ValueError(msg)
# Select total count from base_query
- subquery = base_query.subquery()
- count_query = select(func.count()).select_from(subquery)
+ count_query = select(func.count()).select_from(combined_query.subquery())
# Ordering and pagination
if order_by.direction == OrderDirection.ASC:
- list_query = base_query.order_by(asc(getattr(folders_v2.c, order_by.field)))
+ list_query = combined_query.order_by(asc(getattr(folders_v2.c, order_by.field)))
else:
- list_query = base_query.order_by(desc(getattr(folders_v2.c, order_by.field)))
+ list_query = combined_query.order_by(
+ desc(getattr(folders_v2.c, order_by.field))
+ )
list_query = list_query.offset(offset).limit(limit)
- async with get_database_engine(app).acquire() as conn:
- count_result = await conn.execute(count_query)
- total_count = await count_result.scalar()
+ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn:
+ total_count = await conn.scalar(count_query)
- result = await conn.execute(list_query)
- rows = await result.fetchall() or []
- results: list[FolderDB] = [FolderDB.model_validate(row) for row in rows]
- return cast(int, total_count), results
+ result = await conn.stream(list_query)
+ folders: list[UserFolderAccessRightsDB] = [
+ UserFolderAccessRightsDB.model_validate(row) async for row in result
+ ]
+ return cast(int, total_count), folders
async def get(
app: web.Application,
+ connection: AsyncConnection | None = None,
*,
folder_id: FolderID,
product_name: ProductName,
@@ -142,8 +266,8 @@ async def get(
)
)
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(query)
+ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(query)
row = await result.first()
if row is None:
raise FolderAccessForbiddenError(
@@ -154,6 +278,7 @@ async def get(
async def get_for_user_or_workspace(
app: web.Application,
+ connection: AsyncConnection | None = None,
*,
folder_id: FolderID,
product_name: ProductName,
@@ -178,8 +303,8 @@ async def get_for_user_or_workspace(
else:
query = query.where(folders_v2.c.workspace_id == workspace_id)
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(query)
+ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(query)
row = await result.first()
if row is None:
raise FolderAccessForbiddenError(
@@ -190,39 +315,56 @@ async def get_for_user_or_workspace(
async def update(
app: web.Application,
+ connection: AsyncConnection | None = None,
*,
- folder_id: FolderID,
- name: str,
- parent_folder_id: FolderID | None,
+ folders_id_or_ids: FolderID | set[FolderID],
product_name: ProductName,
+ # updatable columns
+ name: str | UnSet = _unset,
+ parent_folder_id: FolderID | None | UnSet = _unset,
+ trashed_at: datetime | None | UnSet = _unset,
+ trashed_explicitly: bool | UnSet = _unset,
) -> FolderDB:
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(
- folders_v2.update()
- .values(
- name=name,
- parent_folder_id=parent_folder_id,
- modified=func.now(),
- )
- .where(
- (folders_v2.c.folder_id == folder_id)
- & (folders_v2.c.product_name == product_name)
- )
- .returning(*_SELECTION_ARGS)
- )
+ """
+ Batch/single patch of folder/s
+ """
+ # NOTE: exclude unset can also be done using a pydantic model and dict(exclude_unset=True)
+ updated = as_dict_exclude_unset(
+ name=name,
+ parent_folder_id=parent_folder_id,
+ trashed_at=trashed_at,
+ trashed_explicitly=trashed_explicitly,
+ )
+
+ query = (
+ (folders_v2.update().values(modified=func.now(), **updated))
+ .where(folders_v2.c.product_name == product_name)
+ .returning(*_SELECTION_ARGS)
+ )
+
+ if isinstance(folders_id_or_ids, set):
+ # batch-update
+ query = query.where(folders_v2.c.folder_id.in_(list(folders_id_or_ids)))
+ else:
+ # single-update
+ query = query.where(folders_v2.c.folder_id == folders_id_or_ids)
+
+ async with transaction_context(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(query)
row = await result.first()
if row is None:
- raise FolderNotFoundError(reason=f"Folder {folder_id} not found.")
+ raise FolderNotFoundError(reason=f"Folder {folders_id_or_ids} not found.")
return FolderDB.model_validate(row)
async def delete_recursively(
app: web.Application,
+ connection: AsyncConnection | None = None,
*,
folder_id: FolderID,
product_name: ProductName,
) -> None:
- async with get_database_engine(app).acquire() as conn, conn.begin():
+ async with transaction_context(get_asyncpg_engine(app), connection) as conn:
# Step 1: Define the base case for the recursive CTE
base_query = select(
folders_v2.c.folder_id, folders_v2.c.parent_folder_id
@@ -231,6 +373,7 @@ async def delete_recursively(
& (folders_v2.c.product_name == product_name)
)
folder_hierarchy_cte = base_query.cte(name="folder_hierarchy", recursive=True)
+
# Step 2: Define the recursive case
folder_alias = aliased(folders_v2)
recursive_query = select(
@@ -241,14 +384,15 @@ async def delete_recursively(
folder_alias.c.parent_folder_id == folder_hierarchy_cte.c.folder_id,
)
)
+
# Step 3: Combine base and recursive cases into a CTE
folder_hierarchy_cte = folder_hierarchy_cte.union_all(recursive_query)
+
# Step 4: Execute the query to get all descendants
final_query = select(folder_hierarchy_cte)
- result = await conn.execute(final_query)
- rows = ( # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)]
- await result.fetchall() or []
- )
+ result = await conn.stream(final_query)
+ # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)]
+ rows = [row async for row in result]
# Sort folders so that child folders come first
sorted_folders = sorted(
@@ -262,6 +406,7 @@ async def delete_recursively(
async def get_projects_recursively_only_if_user_is_owner(
app: web.Application,
+ connection: AsyncConnection | None = None,
*,
folder_id: FolderID,
private_workspace_user_id_or_none: UserID | None,
@@ -276,7 +421,8 @@ async def get_projects_recursively_only_if_user_is_owner(
or the `users_to_groups` table for private workspace projects.
"""
- async with get_database_engine(app).acquire() as conn, conn.begin():
+ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn:
+
# Step 1: Define the base case for the recursive CTE
base_query = select(
folders_v2.c.folder_id, folders_v2.c.parent_folder_id
@@ -285,6 +431,7 @@ async def get_projects_recursively_only_if_user_is_owner(
& (folders_v2.c.product_name == product_name)
)
folder_hierarchy_cte = base_query.cte(name="folder_hierarchy", recursive=True)
+
# Step 2: Define the recursive case
folder_alias = aliased(folders_v2)
recursive_query = select(
@@ -295,16 +442,15 @@ async def get_projects_recursively_only_if_user_is_owner(
folder_alias.c.parent_folder_id == folder_hierarchy_cte.c.folder_id,
)
)
+
# Step 3: Combine base and recursive cases into a CTE
folder_hierarchy_cte = folder_hierarchy_cte.union_all(recursive_query)
+
# Step 4: Execute the query to get all descendants
final_query = select(folder_hierarchy_cte)
- result = await conn.execute(final_query)
- rows = ( # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)]
- await result.fetchall() or []
- )
-
- folder_ids = [item[0] for item in rows]
+ result = await conn.stream(final_query)
+ # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)]
+ folder_ids = [item[0] async for item in result]
query = (
select(projects_to_folders.c.project_uuid)
@@ -317,20 +463,19 @@ async def get_projects_recursively_only_if_user_is_owner(
if private_workspace_user_id_or_none is not None:
query = query.where(projects.c.prj_owner == user_id)
- result = await conn.execute(query)
-
- rows = await result.fetchall() or []
- results = [ProjectID(row[0]) for row in rows]
- return results
+ result = await conn.stream(query)
+ return [ProjectID(row[0]) async for row in result]
async def get_folders_recursively(
app: web.Application,
+ connection: AsyncConnection | None = None,
*,
folder_id: FolderID,
product_name: ProductName,
) -> list[FolderID]:
- async with get_database_engine(app).acquire() as conn, conn.begin():
+ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn:
+
# Step 1: Define the base case for the recursive CTE
base_query = select(
folders_v2.c.folder_id, folders_v2.c.parent_folder_id
@@ -339,6 +484,7 @@ async def get_folders_recursively(
& (folders_v2.c.product_name == product_name)
)
folder_hierarchy_cte = base_query.cte(name="folder_hierarchy", recursive=True)
+
# Step 2: Define the recursive case
folder_alias = aliased(folders_v2)
recursive_query = select(
@@ -349,13 +495,11 @@ async def get_folders_recursively(
folder_alias.c.parent_folder_id == folder_hierarchy_cte.c.folder_id,
)
)
+
# Step 3: Combine base and recursive cases into a CTE
folder_hierarchy_cte = folder_hierarchy_cte.union_all(recursive_query)
+
# Step 4: Execute the query to get all descendants
final_query = select(folder_hierarchy_cte)
- result = await conn.execute(final_query)
- rows = ( # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)]
- await result.fetchall() or []
- )
-
- return [FolderID(row[0]) for row in rows]
+ result = await conn.stream(final_query)
+ return [FolderID(row[0]) async for row in result]
diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py b/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py
index 857eb56ef57..64b69994f51 100644
--- a/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py
+++ b/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py
@@ -1,4 +1,3 @@
-import functools
import logging
from aiohttp import web
@@ -8,134 +7,43 @@
FolderGetPage,
PutFolderBodyParams,
)
-from models_library.basic_types import IDStr
-from models_library.folders import FolderID
-from models_library.rest_ordering import OrderBy, OrderDirection
-from models_library.rest_pagination import Page, PageQueryParameters
+from models_library.rest_ordering import OrderBy
+from models_library.rest_pagination import Page
from models_library.rest_pagination_utils import paginate_data
-from models_library.users import UserID
-from models_library.utils.common_validators import null_or_none_str_to_none_validator
-from models_library.workspaces import WorkspaceID
-from pydantic import ConfigDict, Field, Json, TypeAdapter, field_validator
+from pydantic import TypeAdapter
from servicelib.aiohttp import status
from servicelib.aiohttp.requests_validation import (
- RequestParams,
- StrictRequestParams,
parse_request_body_as,
parse_request_path_parameters_as,
parse_request_query_parameters_as,
)
-from servicelib.aiohttp.typing_extension import Handler
from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON
-from servicelib.request_keys import RQT_USERID_KEY
from servicelib.rest_constants import RESPONSE_MODEL_POLICY
-from .._constants import RQ_PRODUCT_KEY
from .._meta import API_VTAG as VTAG
from ..login.decorators import login_required
from ..security.decorators import permission_required
from ..utils_aiohttp import envelope_json_response
-from ..workspaces.errors import (
- WorkspaceAccessForbiddenError,
- WorkspaceFolderInconsistencyError,
- WorkspaceNotFoundError,
-)
from . import _folders_api
-from .errors import (
- FolderAccessForbiddenError,
- FolderNotFoundError,
- FoldersValueError,
- FolderValueNotPermittedError,
+from ._exceptions_handlers import handle_plugin_requests_exceptions
+from ._models import (
+ FolderFilters,
+ FolderListFullSearchWithJsonStrQueryParams,
+ FolderListWithJsonStrQueryParams,
+ FoldersPathParams,
+ FoldersRequestContext,
)
_logger = logging.getLogger(__name__)
-def handle_folders_exceptions(handler: Handler):
- @functools.wraps(handler)
- async def wrapper(request: web.Request) -> web.StreamResponse:
- try:
- return await handler(request)
-
- except (FolderNotFoundError, WorkspaceNotFoundError) as exc:
- raise web.HTTPNotFound(reason=f"{exc}") from exc
-
- except (
- FolderAccessForbiddenError,
- WorkspaceAccessForbiddenError,
- WorkspaceFolderInconsistencyError,
- ) as exc:
- raise web.HTTPForbidden(reason=f"{exc}") from exc
-
- except (FolderValueNotPermittedError, FoldersValueError) as exc:
- raise web.HTTPBadRequest(reason=f"{exc}") from exc
-
- return wrapper
-
-
-#
-# folders COLLECTION -------------------------
-#
-
routes = web.RouteTableDef()
-class FoldersRequestContext(RequestParams):
- user_id: UserID = Field(..., alias=RQT_USERID_KEY) # type: ignore[literal-required]
- product_name: str = Field(..., alias=RQ_PRODUCT_KEY) # type: ignore[literal-required]
-
-
-class FoldersPathParams(StrictRequestParams):
- folder_id: FolderID
-
-
-class FolderListWithJsonStrQueryParams(PageQueryParameters):
- # pylint: disable=unsubscriptable-object
- order_by: Json[OrderBy] = Field(
- default=OrderBy(field=IDStr("modified"), direction=OrderDirection.DESC),
- description="Order by field (modified_at|name|description) and direction (asc|desc). The default sorting order is ascending.",
- examples=['{"field": "name", "direction": "desc"}'],
- alias="order_by",
- )
- folder_id: FolderID | None = Field(
- default=None,
- description="List the subfolders of this folder. By default, list the subfolders of the root directory (Folder ID is None).",
- )
- workspace_id: WorkspaceID | None = Field(
- default=None,
- description="List folders in specific workspace. By default, list in the user private workspace",
- )
-
- @field_validator("order_by", check_fields=False)
- @classmethod
- def validate_order_by_field(cls, v):
- if v.field not in {
- "modified_at",
- "name",
- "description",
- }:
- msg = f"We do not support ordering by provided field {v.field}"
- raise ValueError(msg)
- if v.field == "modified_at":
- v.field = "modified"
- return v
-
- model_config = ConfigDict(extra="forbid")
-
- # validators
- _null_or_none_str_to_none_validator = field_validator("folder_id", mode="before")(
- null_or_none_str_to_none_validator
- )
-
- _null_or_none_str_to_none_validator2 = field_validator(
- "workspace_id", mode="before"
- )(null_or_none_str_to_none_validator)
-
-
@routes.post(f"/{VTAG}/folders", name="create_folder")
@login_required
@permission_required("folder.create")
-@handle_folders_exceptions
+@handle_plugin_requests_exceptions
async def create_folder(request: web.Request):
req_ctx = FoldersRequestContext.model_validate(request)
body_params = await parse_request_body_as(CreateFolderBodyParams, request)
@@ -155,19 +63,64 @@ async def create_folder(request: web.Request):
@routes.get(f"/{VTAG}/folders", name="list_folders")
@login_required
@permission_required("folder.read")
-@handle_folders_exceptions
+@handle_plugin_requests_exceptions
async def list_folders(request: web.Request):
req_ctx = FoldersRequestContext.model_validate(request)
query_params: FolderListWithJsonStrQueryParams = parse_request_query_parameters_as(
FolderListWithJsonStrQueryParams, request
)
+ if not query_params.filters:
+ query_params.filters = FolderFilters()
+
folders: FolderGetPage = await _folders_api.list_folders(
app=request.app,
user_id=req_ctx.user_id,
product_name=req_ctx.product_name,
folder_id=query_params.folder_id,
workspace_id=query_params.workspace_id,
+ trashed=query_params.filters.trashed,
+ offset=query_params.offset,
+ limit=query_params.limit,
+ order_by=parse_obj_as(OrderBy, query_params.order_by),
+ )
+
+ page = Page[FolderGet].parse_obj(
+ paginate_data(
+ chunk=folders.items,
+ request_url=request.url,
+ total=folders.total,
+ limit=query_params.limit,
+ offset=query_params.offset,
+ )
+ )
+ return web.Response(
+ text=page.json(**RESPONSE_MODEL_POLICY),
+ content_type=MIMETYPE_APPLICATION_JSON,
+ )
+
+
+@routes.get(f"/{VTAG}/folders:search", name="list_folders_full_search")
+@login_required
+@permission_required("folder.read")
+@handle_plugin_requests_exceptions
+async def list_folders_full_search(request: web.Request):
+ req_ctx = FoldersRequestContext.parse_obj(request)
+ query_params: FolderListFullSearchWithJsonStrQueryParams = (
+ parse_request_query_parameters_as(
+ FolderListFullSearchWithJsonStrQueryParams, request
+ )
+ )
+
+ if not query_params.filters:
+ query_params.filters = FolderFilters()
+
+ folders: FolderGetPage = await _folders_api.list_folders_full_search(
+ app=request.app,
+ user_id=req_ctx.user_id,
+ product_name=req_ctx.product_name,
+ text=query_params.text,
+ trashed=query_params.filters.trashed,
offset=query_params.offset,
limit=query_params.limit,
order_by=TypeAdapter(OrderBy).validate_python(query_params.order_by),
@@ -191,7 +144,7 @@ async def list_folders(request: web.Request):
@routes.get(f"/{VTAG}/folders/{{folder_id}}", name="get_folder")
@login_required
@permission_required("folder.read")
-@handle_folders_exceptions
+@handle_plugin_requests_exceptions
async def get_folder(request: web.Request):
req_ctx = FoldersRequestContext.model_validate(request)
path_params = parse_request_path_parameters_as(FoldersPathParams, request)
@@ -212,7 +165,7 @@ async def get_folder(request: web.Request):
)
@login_required
@permission_required("folder.update")
-@handle_folders_exceptions
+@handle_plugin_requests_exceptions
async def replace_folder(request: web.Request):
req_ctx = FoldersRequestContext.model_validate(request)
path_params = parse_request_path_parameters_as(FoldersPathParams, request)
@@ -235,7 +188,7 @@ async def replace_folder(request: web.Request):
)
@login_required
@permission_required("folder.delete")
-@handle_folders_exceptions
+@handle_plugin_requests_exceptions
async def delete_folder_group(request: web.Request):
req_ctx = FoldersRequestContext.model_validate(request)
path_params = parse_request_path_parameters_as(FoldersPathParams, request)
diff --git a/services/web/server/src/simcore_service_webserver/folders/_models.py b/services/web/server/src/simcore_service_webserver/folders/_models.py
new file mode 100644
index 00000000000..899514a271b
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/folders/_models.py
@@ -0,0 +1,112 @@
+import logging
+
+from models_library.basic_types import IDStr
+from models_library.folders import FolderID
+from models_library.rest_filters import Filters, FiltersQueryParameters
+from models_library.rest_ordering import OrderBy, OrderDirection
+from models_library.rest_pagination import PageQueryParameters
+from models_library.users import UserID
+from models_library.utils.common_validators import (
+ empty_str_to_none_pre_validator,
+ null_or_none_str_to_none_validator,
+)
+from models_library.workspaces import WorkspaceID
+from pydantic import BaseModel, Extra, Field, Json, validator
+from servicelib.aiohttp.requests_validation import RequestParams, StrictRequestParams
+from servicelib.request_keys import RQT_USERID_KEY
+
+from .._constants import RQ_PRODUCT_KEY
+
+_logger = logging.getLogger(__name__)
+
+
+class FoldersRequestContext(RequestParams):
+ user_id: UserID = Field(..., alias=RQT_USERID_KEY) # type: ignore[literal-required]
+ product_name: str = Field(..., alias=RQ_PRODUCT_KEY) # type: ignore[literal-required]
+
+
+class FoldersPathParams(StrictRequestParams):
+ folder_id: FolderID
+
+
+class FolderFilters(Filters):
+ trashed: bool | None = Field(
+ default=False,
+ description="Set to true to list trashed, false to list non-trashed (default), None to list all",
+ )
+
+
+class FolderListSortParams(BaseModel):
+ # pylint: disable=unsubscriptable-object
+ order_by: Json[OrderBy] = Field(
+ default=OrderBy(field=IDStr("modified"), direction=OrderDirection.DESC),
+ description="Order by field (modified_at|name|description) and direction (asc|desc). The default sorting order is ascending.",
+ example='{"field": "name", "direction": "desc"}',
+ alias="order_by",
+ )
+
+ @validator("order_by", check_fields=False)
+ @classmethod
+ def _validate_order_by_field(cls, v):
+ if v.field not in {
+ "modified_at",
+ "name",
+ "description",
+ }:
+ msg = f"We do not support ordering by provided field {v.field}"
+ raise ValueError(msg)
+ if v.field == "modified_at":
+ v.field = "modified"
+ return v
+
+ class Config:
+ extra = Extra.forbid
+
+
+class FolderListWithJsonStrQueryParams(
+ PageQueryParameters, FolderListSortParams, FiltersQueryParameters[FolderFilters]
+):
+ folder_id: FolderID | None = Field(
+ default=None,
+ description="List the subfolders of this folder. By default, list the subfolders of the root directory (Folder ID is None).",
+ )
+ workspace_id: WorkspaceID | None = Field(
+ default=None,
+ description="List folders in specific workspace. By default, list in the user private workspace",
+ )
+
+ class Config:
+ extra = Extra.forbid
+
+ # validators
+ _null_or_none_str_to_none_validator = validator(
+ "folder_id", allow_reuse=True, pre=True
+ )(null_or_none_str_to_none_validator)
+
+ _null_or_none_str_to_none_validator2 = validator(
+ "workspace_id", allow_reuse=True, pre=True
+ )(null_or_none_str_to_none_validator)
+
+
+class FolderListFullSearchWithJsonStrQueryParams(
+ PageQueryParameters, FolderListSortParams, FiltersQueryParameters[FolderFilters]
+):
+ text: str | None = Field(
+ default=None,
+ description="Multi column full text search, across all folders and workspaces",
+ max_length=100,
+ example="My Project",
+ )
+
+ _empty_is_none = validator("text", allow_reuse=True, pre=True)(
+ empty_str_to_none_pre_validator
+ )
+
+ class Config:
+ extra = Extra.forbid
+
+
+class RemoveQueryParams(BaseModel):
+ force: bool = Field(
+ default=False, description="Force removal (even if resource is active)"
+ )
diff --git a/services/web/server/src/simcore_service_webserver/folders/_trash_api.py b/services/web/server/src/simcore_service_webserver/folders/_trash_api.py
new file mode 100644
index 00000000000..b3e1823369a
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/folders/_trash_api.py
@@ -0,0 +1,183 @@
+import logging
+from datetime import datetime
+
+import arrow
+from aiohttp import web
+from models_library.folders import FolderID
+from models_library.products import ProductName
+from models_library.projects import ProjectID
+from models_library.users import UserID
+from simcore_postgres_database.utils_repos import transaction_context
+from sqlalchemy.ext.asyncio import AsyncConnection
+
+from ..db.plugin import get_asyncpg_engine
+from ..projects._trash_api import trash_project, untrash_project
+from ..workspaces.api import check_user_workspace_access
+from . import _folders_db
+
+_logger = logging.getLogger(__name__)
+
+
+async def _check_exists_and_access(
+ app: web.Application,
+ *,
+ product_name: ProductName,
+ user_id: UserID,
+ folder_id: FolderID,
+) -> bool:
+ # exists?
+ # check whether this folder exists
+ # otherwise raise not-found error
+ folder_db = await _folders_db.get(
+ app, folder_id=folder_id, product_name=product_name
+ )
+
+ # can?
+ # check whether user in product has enough permissions to delete this folder
+ # otherwise raise forbidden error
+ workspace_is_private = True
+ if folder_db.workspace_id:
+ await check_user_workspace_access(
+ app,
+ user_id=user_id,
+ workspace_id=folder_db.workspace_id,
+ product_name=product_name,
+ permission="delete",
+ )
+ workspace_is_private = False
+
+ await _folders_db.get_for_user_or_workspace(
+ app,
+ folder_id=folder_id,
+ product_name=product_name,
+ user_id=user_id if workspace_is_private else None,
+ workspace_id=folder_db.workspace_id,
+ )
+ return workspace_is_private
+
+
+async def _folders_db_update(
+ app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
+ product_name: ProductName,
+ folder_id: FolderID,
+ trashed_at: datetime | None,
+):
+ # EXPLICIT un/trash
+ await _folders_db.update(
+ app,
+ connection,
+ folders_id_or_ids=folder_id,
+ product_name=product_name,
+ trashed_at=trashed_at,
+ trashed_explicitly=trashed_at is not None,
+ )
+
+ # IMPLICIT un/trash
+ child_folders: set[FolderID] = {
+ f
+ for f in await _folders_db.get_folders_recursively(
+ app, connection, folder_id=folder_id, product_name=product_name
+ )
+ if f != folder_id
+ }
+
+ if child_folders:
+ await _folders_db.update(
+ app,
+ connection,
+ folders_id_or_ids=child_folders,
+ product_name=product_name,
+ trashed_at=trashed_at,
+ trashed_explicitly=False,
+ )
+
+
+async def trash_folder(
+ app: web.Application,
+ *,
+ product_name: ProductName,
+ user_id: UserID,
+ folder_id: FolderID,
+ force_stop_first: bool,
+):
+
+ workspace_is_private = await _check_exists_and_access(
+ app, product_name=product_name, user_id=user_id, folder_id=folder_id
+ )
+
+ # Trash
+ trashed_at = arrow.utcnow().datetime
+
+ async with transaction_context(get_asyncpg_engine(app)) as connection:
+
+ # 1. Trash folder and children
+ await _folders_db_update(
+ app,
+ connection,
+ folder_id=folder_id,
+ product_name=product_name,
+ trashed_at=trashed_at,
+ )
+
+ # 2. Trash all child projects that I am an owner
+ child_projects: list[
+ ProjectID
+ ] = await _folders_db.get_projects_recursively_only_if_user_is_owner(
+ app,
+ connection,
+ folder_id=folder_id,
+ private_workspace_user_id_or_none=user_id if workspace_is_private else None,
+ user_id=user_id,
+ product_name=product_name,
+ )
+
+ for project_id in child_projects:
+ await trash_project(
+ app,
+ # NOTE: this needs to be included in the unit-of-work, i.e. connection,
+ product_name=product_name,
+ user_id=user_id,
+ project_id=project_id,
+ force_stop_first=force_stop_first,
+ explicit=False,
+ )
+
+
+async def untrash_folder(
+ app: web.Application,
+ *,
+ product_name: ProductName,
+ user_id: UserID,
+ folder_id: FolderID,
+):
+ workspace_is_private = await _check_exists_and_access(
+ app, product_name=product_name, user_id=user_id, folder_id=folder_id
+ )
+
+ # 3. UNtrash
+
+ # 3.1 UNtrash folder and children
+ await _folders_db_update(
+ app,
+ folder_id=folder_id,
+ product_name=product_name,
+ trashed_at=None,
+ )
+
+ # 3.2 UNtrash all child projects that I am an owner
+ child_projects: list[
+ ProjectID
+ ] = await _folders_db.get_projects_recursively_only_if_user_is_owner(
+ app,
+ folder_id=folder_id,
+ private_workspace_user_id_or_none=user_id if workspace_is_private else None,
+ user_id=user_id,
+ product_name=product_name,
+ )
+
+ for project_id in child_projects:
+ await untrash_project(
+ app, product_name=product_name, user_id=user_id, project_id=project_id
+ )
diff --git a/services/web/server/src/simcore_service_webserver/folders/_trash_handlers.py b/services/web/server/src/simcore_service_webserver/folders/_trash_handlers.py
new file mode 100644
index 00000000000..55b53fcd4ee
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/folders/_trash_handlers.py
@@ -0,0 +1,66 @@
+import logging
+
+from aiohttp import web
+from servicelib.aiohttp import status
+from servicelib.aiohttp.requests_validation import (
+ parse_request_path_parameters_as,
+ parse_request_query_parameters_as,
+)
+
+from .._meta import API_VTAG as VTAG
+from ..application_settings_utils import requires_dev_feature_enabled
+from ..login.decorators import get_user_id, login_required
+from ..products.api import get_product_name
+from ..security.decorators import permission_required
+from . import _trash_api
+from ._exceptions_handlers import handle_plugin_requests_exceptions
+from ._models import FoldersPathParams, RemoveQueryParams
+
+_logger = logging.getLogger(__name__)
+
+
+routes = web.RouteTableDef()
+
+
+@routes.post(f"/{VTAG}/folders/{{folder_id}}:trash", name="trash_folder")
+@requires_dev_feature_enabled
+@login_required
+@permission_required("folder.delete")
+@handle_plugin_requests_exceptions
+async def trash_folder(request: web.Request):
+ user_id = get_user_id(request)
+ product_name = get_product_name(request)
+ path_params = parse_request_path_parameters_as(FoldersPathParams, request)
+ query_params: RemoveQueryParams = parse_request_query_parameters_as(
+ RemoveQueryParams, request
+ )
+
+ await _trash_api.trash_folder(
+ request.app,
+ product_name=product_name,
+ user_id=user_id,
+ folder_id=path_params.folder_id,
+ force_stop_first=query_params.force,
+ )
+
+ return web.json_response(status=status.HTTP_204_NO_CONTENT)
+
+
+@routes.post(f"/{VTAG}/folders/{{folder_id}}:untrash", name="untrash_folder")
+@requires_dev_feature_enabled
+@login_required
+@permission_required("folder.delete")
+@handle_plugin_requests_exceptions
+async def untrash_folder(request: web.Request):
+ user_id = get_user_id(request)
+ product_name = get_product_name(request)
+ path_params = parse_request_path_parameters_as(FoldersPathParams, request)
+
+ await _trash_api.untrash_folder(
+ request.app,
+ product_name=product_name,
+ user_id=user_id,
+ folder_id=path_params.folder_id,
+ )
+
+ return web.json_response(status=status.HTTP_204_NO_CONTENT)
diff --git a/services/web/server/src/simcore_service_webserver/folders/plugin.py b/services/web/server/src/simcore_service_webserver/folders/plugin.py
index bfc0fafb351..8ddef03ec1f 100644
--- a/services/web/server/src/simcore_service_webserver/folders/plugin.py
+++ b/services/web/server/src/simcore_service_webserver/folders/plugin.py
@@ -7,7 +7,7 @@
from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY
from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup
-from . import _folders_handlers
+from . import _folders_handlers, _trash_handlers
_logger = logging.getLogger(__name__)
@@ -24,3 +24,4 @@ def setup_folders(app: web.Application):
# routes
app.router.add_routes(_folders_handlers.routes)
+ app.router.add_routes(_trash_handlers.routes)
diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py
index 81a615730d0..a18157242ad 100644
--- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py
+++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py
@@ -6,24 +6,21 @@
"""
from aiohttp import web
-from models_library.access_rights import AccessRights
from models_library.api_schemas_webserver._base import OutputSchema
from models_library.api_schemas_webserver.projects import ProjectListItem
-from models_library.folders import FolderID
+from models_library.folders import FolderID, FolderQuery, FolderScope
from models_library.projects import ProjectID
from models_library.rest_ordering import OrderBy
-from models_library.users import GroupID, UserID
-from models_library.workspaces import WorkspaceID
+from models_library.users import UserID
+from models_library.workspaces import WorkspaceID, WorkspaceQuery, WorkspaceScope
from pydantic import NonNegativeInt
from servicelib.utils import logged_gather
+from simcore_postgres_database.models.projects import ProjectType
from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB
-from simcore_service_webserver.workspaces._workspaces_api import (
- check_user_workspace_access,
-)
from ..catalog.client import get_services_for_user_in_product
from ..folders import _folders_db as folders_db
-from ..workspaces import _workspaces_db as workspaces_db
+from ..workspaces._workspaces_api import check_user_workspace_access
from . import projects_api
from ._permalink_api import update_or_pop_permalink_in_project
from .db import ProjectDBAPI
@@ -36,7 +33,6 @@ async def _append_fields(
user_id: UserID,
project: ProjectDict,
is_template: bool,
- workspace_access_rights: dict[GroupID, AccessRights] | None,
model_schema_cls: type[OutputSchema],
):
# state
@@ -50,13 +46,6 @@ async def _append_fields(
# permalink
await update_or_pop_permalink_in_project(request, project)
- # replace project access rights (if project is in workspace)
- if workspace_access_rights:
- project["accessRights"] = {
- f"{gid}": access.model_dump()
- for gid, access in workspace_access_rights.items()
- }
-
# validate
return model_schema_cls.model_validate(project).data(exclude_unset=True)
@@ -111,15 +100,25 @@ async def list_projects( # pylint: disable=too-many-arguments
db_projects, db_project_types, total_number_projects = await db.list_projects(
product_name=product_name,
user_id=user_id,
- workspace_id=workspace_id,
- folder_id=folder_id,
+ workspace_query=(
+ WorkspaceQuery(
+ workspace_scope=WorkspaceScope.SHARED, workspace_id=workspace_id
+ )
+ if workspace_id
+ else WorkspaceQuery(workspace_scope=WorkspaceScope.PRIVATE)
+ ),
+ folder_query=(
+ FolderQuery(folder_scope=FolderScope.SPECIFIC, folder_id=folder_id)
+ if folder_id
+ else FolderQuery(folder_scope=FolderScope.ROOT)
+ ),
# attrs
filter_by_project_type=ProjectTypeAPI.to_project_type_db(project_type),
filter_by_services=user_available_services,
- trashed=trashed,
- hidden=show_hidden,
+ filter_trashed=trashed,
+ filter_hidden=show_hidden,
# composed attrs
- search=search,
+ filter_by_text=search,
# pagination
offset=offset,
limit=limit,
@@ -127,14 +126,6 @@ async def list_projects( # pylint: disable=too-many-arguments
order_by=order_by,
)
- # If workspace, override project access rights
- workspace_access_rights = None
- if workspace_id:
- workspace_db = await workspaces_db.get_workspace_for_user(
- app, user_id=user_id, workspace_id=workspace_id, product_name=product_name
- )
- workspace_access_rights = workspace_db.access_rights
-
projects: list[ProjectDict] = await logged_gather(
*(
_append_fields(
@@ -142,10 +133,9 @@ async def list_projects( # pylint: disable=too-many-arguments
user_id=user_id,
project=prj,
is_template=prj_type == ProjectTypeDB.TEMPLATE,
- workspace_access_rights=workspace_access_rights,
model_schema_cls=ProjectListItem,
)
- for prj, prj_type in zip(db_projects, db_project_types)
+ for prj, prj_type in zip(db_projects, db_project_types, strict=False)
),
reraise=True,
max_concurrency=100,
@@ -171,19 +161,18 @@ async def list_projects_full_search(
request.app, user_id, product_name, only_key_versions=True
)
- (
- db_projects,
- db_project_types,
- total_number_projects,
- ) = await db.list_projects_full_search(
- user_id=user_id,
+ (db_projects, db_project_types, total_number_projects,) = await db.list_projects(
product_name=product_name,
+ user_id=user_id,
+ workspace_query=WorkspaceQuery(workspace_scope=WorkspaceScope.ALL),
+ folder_query=FolderQuery(folder_scope=FolderScope.ALL),
filter_by_services=user_available_services,
- text=text,
+ filter_by_text=text,
+ filter_tag_ids_list=tag_ids_list,
+ filter_by_project_type=ProjectType.STANDARD,
offset=offset,
limit=limit,
order_by=order_by,
- tag_ids_list=tag_ids_list,
)
projects: list[ProjectDict] = await logged_gather(
@@ -193,10 +182,9 @@ async def list_projects_full_search(
user_id=user_id,
project=prj,
is_template=prj_type == ProjectTypeDB.TEMPLATE,
- workspace_access_rights=None,
model_schema_cls=ProjectListItem,
)
- for prj, prj_type in zip(db_projects, db_project_types)
+ for prj, prj_type in zip(db_projects, db_project_types, strict=False)
),
reraise=True,
max_concurrency=100,
diff --git a/services/web/server/src/simcore_service_webserver/projects/_trash_api.py b/services/web/server/src/simcore_service_webserver/projects/_trash_api.py
index 6469375c853..d3bc6092aaf 100644
--- a/services/web/server/src/simcore_service_webserver/projects/_trash_api.py
+++ b/services/web/server/src/simcore_service_webserver/projects/_trash_api.py
@@ -69,6 +69,7 @@ async def trash_project(
user_id: UserID,
project_id: ProjectID,
force_stop_first: bool,
+ explicit: bool,
):
"""
@@ -113,13 +114,14 @@ async def _schedule():
product_name=product_name,
)
- # mark as trash
await projects_api.patch_project(
app,
user_id=user_id,
product_name=product_name,
project_uuid=project_id,
- project_patch=ProjectPatchExtended(trashed_at=arrow.utcnow().datetime),
+ project_patch=ProjectPatchExtended(
+ trashed_at=arrow.utcnow().datetime, trashed_explicitly=explicit
+ ),
)
@@ -136,5 +138,5 @@ async def untrash_project(
user_id=user_id,
product_name=product_name,
project_uuid=project_id,
- project_patch=ProjectPatchExtended(trashed_at=None),
+ project_patch=ProjectPatchExtended(trashed_at=None, trashed_explicitly=False),
)
diff --git a/services/web/server/src/simcore_service_webserver/projects/_trash_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_trash_handlers.py
index 2995488c562..4593779e735 100644
--- a/services/web/server/src/simcore_service_webserver/projects/_trash_handlers.py
+++ b/services/web/server/src/simcore_service_webserver/projects/_trash_handlers.py
@@ -1,6 +1,4 @@
-import functools
import logging
-from typing import NamedTuple
from aiohttp import web
from servicelib.aiohttp import status
@@ -8,13 +6,14 @@
parse_request_path_parameters_as,
parse_request_query_parameters_as,
)
-from servicelib.aiohttp.typing_extension import Handler
-from servicelib.aiohttp.web_exceptions_extension import get_http_error_class_or_none
-from servicelib.logging_errors import create_troubleshotting_log_kwargs
-from servicelib.status_codes_utils import is_5xx_server_error
from .._meta import API_VTAG as VTAG
from ..application_settings_utils import requires_dev_feature_enabled
+from ..exceptions_handlers import (
+ ExceptionToHttpErrorMap,
+ HttpErrorInfo,
+ create_exception_handlers_decorator,
+)
from ..login.decorators import get_user_id, login_required
from ..products.api import get_product_name
from ..projects._common_models import ProjectPathParams
@@ -34,12 +33,7 @@
#
-class HttpErrorInfo(NamedTuple):
- status_code: int
- msg_template: str
-
-
-_TO_HTTP_ERROR_MAP: dict[type[Exception], HttpErrorInfo] = {
+_TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = {
ProjectRunningConflictError: HttpErrorInfo(
status.HTTP_409_CONFLICT,
"Current study is in use and cannot be trashed [project_id={project_uuid}]. Please stop all services first and try again",
@@ -51,49 +45,9 @@ class HttpErrorInfo(NamedTuple):
}
-class _DefaultDict(dict):
- def __missing__(self, key):
- return f"'{key}=?'"
-
-
-def _handle_request_exceptions(handler: Handler):
- @functools.wraps(handler)
- async def _wrapper(request: web.Request) -> web.StreamResponse:
- try:
- return await handler(request)
-
- except ProjectTrashError as exc:
- for exc_cls, http_error_info in _TO_HTTP_ERROR_MAP.items():
- if isinstance(exc, exc_cls):
-
- # safe formatting, i.e. does not raise
- user_msg = http_error_info.msg_template.format_map(
- _DefaultDict(getattr(exc, "__dict__", {}))
- )
-
- http_error_cls = get_http_error_class_or_none(
- http_error_info.status_code
- )
- assert http_error_cls # nosec
-
- if is_5xx_server_error(http_error_info.status_code):
- _logger.exception(
- **create_troubleshotting_log_kwargs(
- user_msg,
- error=exc,
- error_context={
- "request": request,
- "request.remote": f"{request.remote}",
- "request.method": f"{request.method}",
- "request.path": f"{request.path}",
- },
- )
- )
- raise http_error_cls(reason=user_msg) from exc
- raise
-
- return _wrapper
-
+_handle_exceptions = create_exception_handlers_decorator(
+ exceptions_catch=ProjectTrashError, exc_to_status_map=_TO_HTTP_ERROR_MAP
+)
#
# ROUTES
@@ -106,7 +60,7 @@ async def _wrapper(request: web.Request) -> web.StreamResponse:
@requires_dev_feature_enabled
@login_required
@permission_required("project.delete")
-@_handle_request_exceptions
+@_handle_exceptions
async def empty_trash(request: web.Request):
user_id = get_user_id(request)
product_name = get_product_name(request)
@@ -122,7 +76,7 @@ async def empty_trash(request: web.Request):
@requires_dev_feature_enabled
@login_required
@permission_required("project.delete")
-@_handle_request_exceptions
+@_handle_exceptions
async def trash_project(request: web.Request):
user_id = get_user_id(request)
product_name = get_product_name(request)
@@ -137,6 +91,7 @@ async def trash_project(request: web.Request):
user_id=user_id,
project_id=path_params.project_id,
force_stop_first=query_params.force,
+ explicit=True,
)
return web.json_response(status=status.HTTP_204_NO_CONTENT)
@@ -146,7 +101,7 @@ async def trash_project(request: web.Request):
@requires_dev_feature_enabled
@login_required
@permission_required("project.delete")
-@_handle_request_exceptions
+@_handle_exceptions
async def untrash_project(request: web.Request):
user_id = get_user_id(request)
product_name = get_product_name(request)
diff --git a/services/web/server/src/simcore_service_webserver/projects/db.py b/services/web/server/src/simcore_service_webserver/projects/db.py
index a74375640fd..cb5579b421f 100644
--- a/services/web/server/src/simcore_service_webserver/projects/db.py
+++ b/services/web/server/src/simcore_service_webserver/projects/db.py
@@ -16,7 +16,7 @@
from aiopg.sa.connection import SAConnection
from aiopg.sa.result import ResultProxy, RowProxy
from models_library.basic_types import IDStr
-from models_library.folders import FolderID
+from models_library.folders import FolderQuery, FolderScope
from models_library.products import ProductName
from models_library.projects import ProjectID, ProjectIDStr
from models_library.projects_comments import CommentID, ProjectsCommentsDB
@@ -31,7 +31,7 @@
from models_library.users import UserID
from models_library.utils.fastapi_encoders import jsonable_encoder
from models_library.wallets import WalletDB, WalletID
-from models_library.workspaces import WorkspaceID
+from models_library.workspaces import WorkspaceQuery, WorkspaceScope
from pydantic import TypeAdapter
from pydantic.types import PositiveInt
from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY
@@ -59,7 +59,7 @@
from sqlalchemy import func, literal_column
from sqlalchemy.dialects.postgresql import BOOLEAN, INTEGER
from sqlalchemy.dialects.postgresql import insert as pg_insert
-from sqlalchemy.sql import and_
+from sqlalchemy.sql import ColumnElement, CompoundSelect, Select, and_
from tenacity import TryAgain
from tenacity.asyncio import AsyncRetrying
from tenacity.retry import retry_if_exception_type
@@ -352,21 +352,22 @@ async def upsert_project_linked_product(
).group_by(project_to_groups.c.project_uuid)
).subquery("access_rights_subquery")
- async def list_projects( # pylint: disable=too-many-arguments
+ async def list_projects( # pylint: disable=too-many-arguments,too-many-statements,too-many-branches
self,
*,
- # hierarchy filters
- product_name: str,
+ product_name: ProductName,
user_id: PositiveInt,
- workspace_id: WorkspaceID | None,
- folder_id: FolderID | None = None,
+ # hierarchy filters
+ workspace_query: WorkspaceQuery,
+ folder_query: FolderQuery,
# attribute filters
- search: str | None = None,
filter_by_project_type: ProjectType | None = None,
filter_by_services: list[dict] | None = None,
- published: bool | None = False,
- hidden: bool | None = False,
- trashed: bool | None = False,
+ filter_published: bool | None = False,
+ filter_hidden: bool | None = False,
+ filter_trashed: bool | None = False,
+ filter_by_text: str | None = None,
+ filter_tag_ids_list: list[int] | None = None,
# pagination
offset: int | None = 0,
limit: int | None = None,
@@ -375,151 +376,9 @@ async def list_projects( # pylint: disable=too-many-arguments
field=IDStr("last_change_date"), direction=OrderDirection.DESC
),
) -> tuple[list[dict[str, Any]], list[ProjectType], int]:
- """
- If workspace_id is provided, then listing in workspace is considered/preffered
- """
- assert (
- order_by.field in projects.columns
- ), "Guaranteed by ProjectListWithJsonStrParams" # nosec
-
- # helper
- private_workspace_user_id_or_none: UserID | None = (
- None if workspace_id else user_id
- )
-
- async with self.engine.acquire() as conn:
-
- _join_query = (
- projects.join(projects_to_products, isouter=True)
- .join(self.access_rights_subquery, isouter=True)
- .join(
- projects_to_folders,
- (
- (projects_to_folders.c.project_uuid == projects.c.uuid)
- & (
- projects_to_folders.c.user_id
- == private_workspace_user_id_or_none
- )
- ),
- isouter=True,
- )
- )
-
- query = (
- sa.select(
- *[
- col
- for col in projects.columns
- if col.name not in ["access_rights"]
- ],
- self.access_rights_subquery.c.access_rights,
- projects_to_products.c.product_name,
- projects_to_folders.c.folder_id,
- )
- .select_from(_join_query)
- .where(
- (
- (projects_to_products.c.product_name == product_name)
- # This was added for backward compatibility, including old projects not in the projects_to_products table.
- | (projects_to_products.c.product_name.is_(None))
- )
- & (
- projects_to_folders.c.folder_id == folder_id
- if folder_id
- else projects_to_folders.c.folder_id.is_(None)
- )
- & (
- projects.c.workspace_id == workspace_id # <-- Shared workspace
- if workspace_id
- else projects.c.workspace_id.is_(None) # <-- Private workspace
- )
- )
- )
-
- # attributes filters
- # None, true, false = all, attribute, !attribute
- attributes_filters = []
- if filter_by_project_type is not None:
- attributes_filters.append(
- projects.c.type == filter_by_project_type.value
- )
-
- if hidden is not None:
- attributes_filters.append(projects.c.hidden.is_(hidden))
-
- if published is not None:
- attributes_filters.append(projects.c.published.is_(published))
-
- if trashed is not None:
- attributes_filters.append(
- projects.c.trashed_at.is_not(None)
- if trashed
- else projects.c.trashed_at.is_(None)
- )
- query = query.where(sa.and_(*attributes_filters))
+ if filter_tag_ids_list is None:
+ filter_tag_ids_list = []
- if private_workspace_user_id_or_none:
- # If Private workspace we check to which projects user has access
- user_groups: list[RowProxy] = await self._list_user_groups(
- conn, user_id
- )
- query = query.where(
- (projects.c.prj_owner == user_id)
- | sa.text(
- f"jsonb_exists_any(access_rights_subquery.access_rights, {assemble_array_groups(user_groups)})"
- )
- )
-
- if search:
- query = query.join(
- users, users.c.id == projects.c.prj_owner, isouter=True
- )
- query = query.where(
- (projects.c.name.ilike(f"%{search}%"))
- | (projects.c.description.ilike(f"%{search}%"))
- | (projects.c.uuid.ilike(f"%{search}%"))
- | (users.c.name.ilike(f"%{search}%"))
- )
-
- if order_by.direction == OrderDirection.ASC:
- query = query.order_by(sa.asc(getattr(projects.c, order_by.field)))
- else:
- query = query.order_by(sa.desc(getattr(projects.c, order_by.field)))
-
- # page meta
- total_number_of_projects = await conn.scalar(
- query.with_only_columns(func.count()).order_by(None)
- )
- assert total_number_of_projects is not None # nosec
-
- # page data
- prjs, prj_types = await self._execute_without_permission_check(
- conn,
- user_id=user_id,
- select_projects_query=query.offset(offset).limit(limit),
- filter_by_services=filter_by_services,
- )
-
- return (
- prjs,
- prj_types,
- total_number_of_projects,
- )
-
- async def list_projects_full_search(
- self,
- *,
- user_id: PositiveInt,
- product_name: ProductName,
- filter_by_services: list[dict] | None = None,
- text: str | None = None,
- offset: int | None = 0,
- limit: int | None = None,
- tag_ids_list: list[int],
- order_by: OrderBy = OrderBy(
- field=IDStr("last_change_date"), direction=OrderDirection.DESC
- ),
- ) -> tuple[list[dict[str, Any]], list[ProjectType], int]:
async with self.engine.acquire() as conn:
user_groups: list[RowProxy] = await self._list_user_groups(conn, user_id)
@@ -549,124 +408,212 @@ async def list_projects_full_search(
).group_by(projects_tags.c.project_id)
).subquery("project_tags_subquery")
- private_workspace_query = (
- sa.select(
- *[
- col
- for col in projects.columns
- if col.name not in ["access_rights"]
- ],
- self.access_rights_subquery.c.access_rights,
- projects_to_products.c.product_name,
- projects_to_folders.c.folder_id,
- sa.func.coalesce(
- project_tags_subquery.c.tags,
- sa.cast(sa.text("'{}'"), sa.ARRAY(sa.Integer)),
- ).label("tags"),
+ ###
+ # Private workspace query
+ ###
+
+ if workspace_query.workspace_scope is not WorkspaceScope.SHARED:
+ assert workspace_query.workspace_scope in ( # nosec
+ WorkspaceScope.PRIVATE,
+ WorkspaceScope.ALL,
)
- .select_from(
- projects.join(self.access_rights_subquery, isouter=True)
- .join(projects_to_products)
- .join(
- projects_to_folders,
+
+ private_workspace_query = (
+ sa.select(
+ *[
+ col
+ for col in projects.columns
+ if col.name not in ["access_rights"]
+ ],
+ self.access_rights_subquery.c.access_rights,
+ projects_to_products.c.product_name,
+ projects_to_folders.c.folder_id,
+ sa.func.coalesce(
+ project_tags_subquery.c.tags,
+ sa.cast(sa.text("'{}'"), sa.ARRAY(sa.Integer)),
+ ).label("tags"),
+ )
+ .select_from(
+ projects.join(self.access_rights_subquery, isouter=True)
+ .join(projects_to_products)
+ .join(
+ projects_to_folders,
+ (
+ (projects_to_folders.c.project_uuid == projects.c.uuid)
+ & (projects_to_folders.c.user_id == user_id)
+ ),
+ isouter=True,
+ )
+ .join(project_tags_subquery, isouter=True)
+ )
+ .where(
(
- (projects_to_folders.c.project_uuid == projects.c.uuid)
- & (projects_to_folders.c.user_id == user_id)
- ),
- isouter=True,
+ (projects.c.prj_owner == user_id)
+ | sa.text(
+ f"jsonb_exists_any(access_rights_subquery.access_rights, {assemble_array_groups(user_groups)})"
+ )
+ )
+ & (projects.c.workspace_id.is_(None)) # <-- Private workspace
+ & (projects_to_products.c.product_name == product_name)
)
- .join(project_tags_subquery, isouter=True)
)
- .where(
- (
- (projects.c.prj_owner == user_id)
- | sa.text(
- f"jsonb_exists_any(access_rights_subquery.access_rights, {assemble_array_groups(user_groups)})"
+ if filter_by_text is not None:
+ private_workspace_query = private_workspace_query.join(
+ users, users.c.id == projects.c.prj_owner, isouter=True
+ )
+ else:
+ private_workspace_query = None
+
+ ###
+ # Shared workspace query
+ ###
+
+ if workspace_query.workspace_scope is not WorkspaceScope.PRIVATE:
+ assert workspace_query.workspace_scope in (
+ WorkspaceScope.SHARED,
+ WorkspaceScope.ALL,
+ ) # nosec
+
+ shared_workspace_query = (
+ sa.select(
+ *[
+ col
+ for col in projects.columns
+ if col.name not in ["access_rights"]
+ ],
+ workspace_access_rights_subquery.c.access_rights,
+ projects_to_products.c.product_name,
+ projects_to_folders.c.folder_id,
+ sa.func.coalesce(
+ project_tags_subquery.c.tags,
+ sa.cast(sa.text("'{}'"), sa.ARRAY(sa.Integer)),
+ ).label("tags"),
+ )
+ .select_from(
+ projects.join(
+ workspace_access_rights_subquery,
+ projects.c.workspace_id
+ == workspace_access_rights_subquery.c.workspace_id,
+ )
+ .join(projects_to_products)
+ .join(
+ projects_to_folders,
+ (
+ (projects_to_folders.c.project_uuid == projects.c.uuid)
+ & (projects_to_folders.c.user_id.is_(None))
+ ),
+ isouter=True,
)
+ .join(project_tags_subquery, isouter=True)
)
- & (projects.c.workspace_id.is_(None))
- & (projects_to_products.c.product_name == product_name)
- & (projects.c.hidden.is_(False))
- & (projects.c.type == ProjectType.STANDARD)
- & (
- (projects.c.name.ilike(f"%{text}%"))
- | (projects.c.description.ilike(f"%{text}%"))
- | (projects.c.uuid.ilike(f"%{text}%"))
+ .where(
+ (
+ sa.text(
+ f"jsonb_exists_any(workspace_access_rights_subquery.access_rights, {assemble_array_groups(user_groups)})"
+ )
+ )
+ & (projects_to_products.c.product_name == product_name)
)
)
- )
-
- if tag_ids_list:
- private_workspace_query = private_workspace_query.where(
- sa.func.coalesce(
- project_tags_subquery.c.tags,
- sa.cast(sa.text("'{}'"), sa.ARRAY(sa.Integer)),
- ).op("@>")(tag_ids_list)
- )
+ if workspace_query.workspace_scope == WorkspaceScope.ALL:
+ shared_workspace_query = shared_workspace_query.where(
+ projects.c.workspace_id.is_not(
+ None
+ ) # <-- All shared workspaces
+ )
+ if filter_by_text is not None:
+ shared_workspace_query = shared_workspace_query.join(
+ users, users.c.id == projects.c.prj_owner, isouter=True
+ )
- shared_workspace_query = (
- sa.select(
- *[
- col
- for col in projects.columns
- if col.name not in ["access_rights"]
- ],
- workspace_access_rights_subquery.c.access_rights,
- projects_to_products.c.product_name,
- projects_to_folders.c.folder_id,
- sa.func.coalesce(
- project_tags_subquery.c.tags,
- sa.cast(sa.text("'{}'"), sa.ARRAY(sa.Integer)),
- ).label("tags"),
- )
- .select_from(
- projects.join(
- workspace_access_rights_subquery,
+ else:
+ assert (
+ workspace_query.workspace_scope == WorkspaceScope.SHARED
+ ) # nosec
+ shared_workspace_query = shared_workspace_query.where(
projects.c.workspace_id
- == workspace_access_rights_subquery.c.workspace_id,
- )
- .join(projects_to_products)
- .join(
- projects_to_folders,
- (
- (projects_to_folders.c.project_uuid == projects.c.uuid)
- & (projects_to_folders.c.user_id.is_(None))
- ),
- isouter=True,
+ == workspace_query.workspace_id # <-- Specific shared workspace
)
- .join(project_tags_subquery, isouter=True)
+
+ else:
+ shared_workspace_query = None
+
+ ###
+ # Attributes Filters
+ ###
+
+ attributes_filters: list[ColumnElement] = []
+ if filter_by_project_type is not None:
+ attributes_filters.append(
+ projects.c.type == filter_by_project_type.value
)
- .where(
+
+ if filter_hidden is not None:
+ attributes_filters.append(projects.c.hidden.is_(filter_hidden))
+
+ if filter_published is not None:
+ attributes_filters.append(projects.c.published.is_(filter_published))
+
+ if filter_trashed is not None:
+ attributes_filters.append(
+ # marked explicitly as trashed
(
- sa.text(
- f"jsonb_exists_any(workspace_access_rights_subquery.access_rights, {assemble_array_groups(user_groups)})"
- )
- )
- & (projects.c.workspace_id.is_not(None))
- & (projects_to_products.c.product_name == product_name)
- & (projects.c.hidden.is_(False))
- & (projects.c.type == ProjectType.STANDARD)
- & (
- (projects.c.name.ilike(f"%{text}%"))
- | (projects.c.description.ilike(f"%{text}%"))
- | (projects.c.uuid.ilike(f"%{text}%"))
+ projects.c.trashed_at.is_not(None)
+ & projects.c.trashed_explicitly.is_(True)
)
+ if filter_trashed
+ # not marked as trashed
+ else projects.c.trashed_at.is_(None)
)
- )
-
- if tag_ids_list:
- shared_workspace_query = shared_workspace_query.where(
+ if filter_by_text is not None:
+ attributes_filters.append(
+ (projects.c.name.ilike(f"%{filter_by_text}%"))
+ | (projects.c.description.ilike(f"%{filter_by_text}%"))
+ | (projects.c.uuid.ilike(f"%{filter_by_text}%"))
+ | (users.c.name.ilike(f"%{filter_by_text}%"))
+ )
+ if filter_tag_ids_list:
+ attributes_filters.append(
sa.func.coalesce(
project_tags_subquery.c.tags,
sa.cast(sa.text("'{}'"), sa.ARRAY(sa.Integer)),
- ).op("@>")(tag_ids_list)
+ ).op("@>")(filter_tag_ids_list)
+ )
+ if folder_query.folder_scope is not FolderScope.ALL:
+ if folder_query.folder_scope == FolderScope.SPECIFIC:
+ attributes_filters.append(
+ projects_to_folders.c.folder_id == folder_query.folder_id
+ )
+ else:
+ assert folder_query.folder_scope == FolderScope.ROOT # nosec
+ attributes_filters.append(projects_to_folders.c.folder_id.is_(None))
+
+ ###
+ # Combined
+ ###
+
+ combined_query: CompoundSelect | Select | None = None
+ if (
+ private_workspace_query is not None
+ and shared_workspace_query is not None
+ ):
+ combined_query = sa.union_all(
+ private_workspace_query.where(sa.and_(*attributes_filters)),
+ shared_workspace_query.where(sa.and_(*attributes_filters)),
+ )
+ elif private_workspace_query is not None:
+ combined_query = private_workspace_query.where(
+ sa.and_(*attributes_filters)
+ )
+ elif shared_workspace_query is not None:
+ combined_query = shared_workspace_query.where(
+ sa.and_(*attributes_filters)
)
- combined_query = sa.union_all(
- private_workspace_query, shared_workspace_query
- )
-
- count_query = sa.select(func.count()).select_from(combined_query)
+ if combined_query is None:
+ msg = f"No valid queries were provided to combine. Workspace scope: {workspace_query.workspace_scope}"
+ raise ValueError(msg)
+ count_query = sa.select(func.count()).select_from(combined_query.subquery())
total_count = await conn.scalar(count_query)
if order_by.direction == OrderDirection.ASC:
diff --git a/services/web/server/src/simcore_service_webserver/projects/models.py b/services/web/server/src/simcore_service_webserver/projects/models.py
index 5b3e900b531..dca631ba39a 100644
--- a/services/web/server/src/simcore_service_webserver/projects/models.py
+++ b/services/web/server/src/simcore_service_webserver/projects/models.py
@@ -52,6 +52,7 @@ class ProjectDB(BaseModel):
hidden: bool
workspace_id: WorkspaceID | None
trashed_at: datetime | None
+ trashed_explicitly: bool = False
model_config = ConfigDict(from_attributes=True, arbitrary_types_allowed=True)
@@ -94,7 +95,8 @@ class UserProjectAccessRightsWithWorkspace(BaseModel):
class ProjectPatchExtended(ProjectPatch):
# Only used internally
- trashed_at: datetime | None = None
+ trashed_at: datetime | None
+ trashed_explicitly: bool
model_config = ConfigDict(populate_by_name=True, extra="forbid")
diff --git a/services/web/server/src/simcore_service_webserver/redis.py b/services/web/server/src/simcore_service_webserver/redis.py
index deee93f1fbd..1a1427cc09c 100644
--- a/services/web/server/src/simcore_service_webserver/redis.py
+++ b/services/web/server/src/simcore_service_webserver/redis.py
@@ -7,6 +7,7 @@
from settings_library.redis import RedisDatabase, RedisSettings
from ._constants import APP_SETTINGS_KEY
+from ._meta import APP_NAME
_logger = logging.getLogger(__name__)
@@ -44,6 +45,7 @@ async def setup_redis_client(app: web.Application):
)
},
settings=redis_settings,
+ client_name=APP_NAME,
)
await manager.setup()
diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py b/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py
index c186786f603..b5b969f0db4 100644
--- a/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py
+++ b/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py
@@ -3,20 +3,26 @@
- Adds a layer to the postgres API with a focus on the projects comments
"""
+
import logging
from datetime import datetime
from aiohttp import web
from models_library.users import GroupID
from models_library.workspaces import WorkspaceID
-from pydantic import ConfigDict, BaseModel
+from pydantic import BaseModel, ConfigDict
from simcore_postgres_database.models.workspaces_access_rights import (
workspaces_access_rights,
)
+from simcore_postgres_database.utils_repos import (
+ pass_or_acquire_connection,
+ transaction_context,
+)
from sqlalchemy import func, literal_column
+from sqlalchemy.ext.asyncio import AsyncConnection
from sqlalchemy.sql import select
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_asyncpg_engine
from .errors import WorkspaceGroupNotFoundError
_logger = logging.getLogger(__name__)
@@ -39,15 +45,16 @@ class WorkspaceGroupGetDB(BaseModel):
async def create_workspace_group(
app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
workspace_id: WorkspaceID,
group_id: GroupID,
- *,
read: bool,
write: bool,
delete: bool,
) -> WorkspaceGroupGetDB:
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(
+ async with transaction_context(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(
workspaces_access_rights.insert()
.values(
workspace_id=workspace_id,
@@ -66,6 +73,8 @@ async def create_workspace_group(
async def list_workspace_groups(
app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
workspace_id: WorkspaceID,
) -> list[WorkspaceGroupGetDB]:
stmt = (
@@ -81,14 +90,15 @@ async def list_workspace_groups(
.where(workspaces_access_rights.c.workspace_id == workspace_id)
)
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(stmt)
- rows = await result.fetchall() or []
- return [WorkspaceGroupGetDB.model_validate(row) for row in rows]
+ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(stmt)
+ return [WorkspaceGroupGetDB.model_validate(row) async for row in result]
async def get_workspace_group(
app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
workspace_id: WorkspaceID,
group_id: GroupID,
) -> WorkspaceGroupGetDB:
@@ -108,8 +118,8 @@ async def get_workspace_group(
)
)
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(stmt)
+ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(stmt)
row = await result.first()
if row is None:
raise WorkspaceGroupNotFoundError(
@@ -120,15 +130,16 @@ async def get_workspace_group(
async def update_workspace_group(
app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
workspace_id: WorkspaceID,
group_id: GroupID,
- *,
read: bool,
write: bool,
delete: bool,
) -> WorkspaceGroupGetDB:
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(
+ async with transaction_context(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(
workspaces_access_rights.update()
.values(
read=read,
@@ -151,10 +162,12 @@ async def update_workspace_group(
async def delete_workspace_group(
app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
workspace_id: WorkspaceID,
group_id: GroupID,
) -> None:
- async with get_database_engine(app).acquire() as conn:
+ async with transaction_context(get_asyncpg_engine(app), connection) as conn:
await conn.execute(
workspaces_access_rights.delete().where(
(workspaces_access_rights.c.workspace_id == workspace_id)
diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_api.py b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_api.py
index 256b50de114..a645037f5a4 100644
--- a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_api.py
+++ b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_api.py
@@ -12,11 +12,11 @@
from models_library.users import UserID
from models_library.workspaces import UserWorkspaceAccessRightsDB, WorkspaceID
from pydantic import NonNegativeInt
-from simcore_service_webserver.projects._db_utils import PermissionStr
-from simcore_service_webserver.workspaces.errors import WorkspaceAccessForbiddenError
+from ..projects._db_utils import PermissionStr
from ..users.api import get_user
from . import _workspaces_db as db
+from .errors import WorkspaceAccessForbiddenError
_logger = logging.getLogger(__name__)
diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py
index 4f007bc7552..5f80868a27f 100644
--- a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py
+++ b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py
@@ -17,16 +17,22 @@
WorkspaceID,
)
from pydantic import NonNegativeInt
-from simcore_postgres_database.models.groups import user_to_groups
from simcore_postgres_database.models.workspaces import workspaces
from simcore_postgres_database.models.workspaces_access_rights import (
workspaces_access_rights,
)
+from simcore_postgres_database.utils_repos import (
+ pass_or_acquire_connection,
+ transaction_context,
+)
+from simcore_postgres_database.utils_workspaces_sql import (
+ create_my_workspace_access_rights_subquery,
+)
from sqlalchemy import asc, desc, func
-from sqlalchemy.dialects.postgresql import BOOLEAN, INTEGER
-from sqlalchemy.sql import Subquery, select
+from sqlalchemy.ext.asyncio import AsyncConnection
+from sqlalchemy.sql import select
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_asyncpg_engine
from .errors import WorkspaceAccessForbiddenError, WorkspaceNotFoundError
_logger = logging.getLogger(__name__)
@@ -45,14 +51,16 @@
async def create_workspace(
app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
product_name: ProductName,
owner_primary_gid: GroupID,
name: str,
description: str | None,
thumbnail: str | None,
) -> WorkspaceDB:
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(
+ async with transaction_context(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(
workspaces.insert()
.values(
name=name,
@@ -69,7 +77,7 @@ async def create_workspace(
return WorkspaceDB.model_validate(row)
-access_rights_subquery = (
+_access_rights_subquery = (
select(
workspaces_access_rights.c.workspace_id,
func.jsonb_object_agg(
@@ -91,31 +99,9 @@ async def create_workspace(
).subquery("access_rights_subquery")
-def _create_my_access_rights_subquery(user_id: UserID) -> Subquery:
- return (
- select(
- workspaces_access_rights.c.workspace_id,
- func.json_build_object(
- "read",
- func.max(workspaces_access_rights.c.read.cast(INTEGER)).cast(BOOLEAN),
- "write",
- func.max(workspaces_access_rights.c.write.cast(INTEGER)).cast(BOOLEAN),
- "delete",
- func.max(workspaces_access_rights.c.delete.cast(INTEGER)).cast(BOOLEAN),
- ).label("my_access_rights"),
- )
- .select_from(
- workspaces_access_rights.join(
- user_to_groups, user_to_groups.c.gid == workspaces_access_rights.c.gid
- )
- )
- .where(user_to_groups.c.uid == user_id)
- .group_by(workspaces_access_rights.c.workspace_id)
- ).subquery("my_access_rights_subquery")
-
-
async def list_workspaces_for_user(
app: web.Application,
+ connection: AsyncConnection | None = None,
*,
user_id: UserID,
product_name: ProductName,
@@ -123,16 +109,18 @@ async def list_workspaces_for_user(
limit: NonNegativeInt,
order_by: OrderBy,
) -> tuple[int, list[UserWorkspaceAccessRightsDB]]:
- my_access_rights_subquery = _create_my_access_rights_subquery(user_id=user_id)
+ my_access_rights_subquery = create_my_workspace_access_rights_subquery(
+ user_id=user_id
+ )
base_query = (
select(
*_SELECTION_ARGS,
- access_rights_subquery.c.access_rights,
+ _access_rights_subquery.c.access_rights,
my_access_rights_subquery.c.my_access_rights,
)
.select_from(
- workspaces.join(access_rights_subquery).join(my_access_rights_subquery)
+ workspaces.join(_access_rights_subquery).join(my_access_rights_subquery)
)
.where(workspaces.c.product_name == product_name)
)
@@ -148,35 +136,37 @@ async def list_workspaces_for_user(
list_query = base_query.order_by(desc(getattr(workspaces.c, order_by.field)))
list_query = list_query.offset(offset).limit(limit)
- async with get_database_engine(app).acquire() as conn:
- count_result = await conn.execute(count_query)
- total_count = await count_result.scalar()
+ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn:
+ total_count = await conn.scalar(count_query)
- result = await conn.execute(list_query)
- rows = await result.fetchall() or []
- results: list[UserWorkspaceAccessRightsDB] = [
- UserWorkspaceAccessRightsDB.model_validate(row) for row in rows
+ result = await conn.stream(list_query)
+ items: list[UserWorkspaceAccessRightsDB] = [
+ UserWorkspaceAccessRightsDB.model_validate(row) async for row in result
]
- return cast(int, total_count), results
+ return cast(int, total_count), items
async def get_workspace_for_user(
app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
user_id: UserID,
workspace_id: WorkspaceID,
product_name: ProductName,
) -> UserWorkspaceAccessRightsDB:
- my_access_rights_subquery = _create_my_access_rights_subquery(user_id=user_id)
+ my_access_rights_subquery = create_my_workspace_access_rights_subquery(
+ user_id=user_id
+ )
base_query = (
select(
*_SELECTION_ARGS,
- access_rights_subquery.c.access_rights,
+ _access_rights_subquery.c.access_rights,
my_access_rights_subquery.c.my_access_rights,
)
.select_from(
- workspaces.join(access_rights_subquery).join(my_access_rights_subquery)
+ workspaces.join(_access_rights_subquery).join(my_access_rights_subquery)
)
.where(
(workspaces.c.workspace_id == workspace_id)
@@ -184,8 +174,8 @@ async def get_workspace_for_user(
)
)
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(base_query)
+ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(base_query)
row = await result.first()
if row is None:
raise WorkspaceAccessForbiddenError(
@@ -196,14 +186,16 @@ async def get_workspace_for_user(
async def update_workspace(
app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
workspace_id: WorkspaceID,
name: str,
description: str | None,
thumbnail: str | None,
product_name: ProductName,
) -> WorkspaceDB:
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(
+ async with transaction_context(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(
workspaces.update()
.values(
name=name,
@@ -225,10 +217,12 @@ async def update_workspace(
async def delete_workspace(
app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
workspace_id: WorkspaceID,
product_name: ProductName,
) -> None:
- async with get_database_engine(app).acquire() as conn:
+ async with transaction_context(get_asyncpg_engine(app), connection) as conn:
await conn.execute(
workspaces.delete().where(
(workspaces.c.workspace_id == workspace_id)
diff --git a/services/web/server/tests/unit/isolated/test_exceptions_handlers.py b/services/web/server/tests/unit/isolated/test_exceptions_handlers.py
new file mode 100644
index 00000000000..27cde72283b
--- /dev/null
+++ b/services/web/server/tests/unit/isolated/test_exceptions_handlers.py
@@ -0,0 +1,117 @@
+# pylint: disable=protected-access
+# pylint: disable=redefined-outer-name
+# pylint: disable=too-many-arguments
+# pylint: disable=too-many-statements
+# pylint: disable=unused-argument
+# pylint: disable=unused-variable
+
+
+import logging
+
+import pytest
+from aiohttp import web
+from aiohttp.test_utils import make_mocked_request
+from servicelib.aiohttp import status
+from simcore_service_webserver.errors import WebServerBaseError
+from simcore_service_webserver.exceptions_handlers import (
+ HttpErrorInfo,
+ _sort_exceptions_by_specificity,
+ create_exception_handlers_decorator,
+)
+
+
+class BasePluginError(WebServerBaseError):
+ ...
+
+
+class OneError(BasePluginError):
+ ...
+
+
+class OtherError(BasePluginError):
+ ...
+
+
+def test_sort_concrete_first():
+ assert _sort_exceptions_by_specificity([Exception, BasePluginError]) == [
+ BasePluginError,
+ Exception,
+ ]
+
+ assert _sort_exceptions_by_specificity(
+ [Exception, BasePluginError], concrete_first=False
+ ) == [
+ Exception,
+ BasePluginError,
+ ]
+
+
+def test_sort_exceptions_by_specificity():
+
+ got_exceptions_cls = _sort_exceptions_by_specificity(
+ [
+ Exception,
+ OtherError,
+ OneError,
+ BasePluginError,
+ ValueError,
+ ArithmeticError,
+ ZeroDivisionError,
+ ]
+ )
+
+ for from_, exc in enumerate(got_exceptions_cls, start=1):
+ for exc_after in got_exceptions_cls[from_:]:
+ assert not issubclass(exc_after, exc), f"{got_exceptions_cls=}"
+
+
+async def test_exception_handlers_decorator(
+ caplog: pytest.LogCaptureFixture,
+):
+
+ _handle_exceptions = create_exception_handlers_decorator(
+ exceptions_catch=BasePluginError,
+ exc_to_status_map={
+ OneError: HttpErrorInfo(
+ status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
+ msg_template="This is one error for front-end",
+ )
+ },
+ )
+
+ @_handle_exceptions
+ async def _rest_handler(request: web.Request) -> web.Response:
+ if request.query.get("raise") == "OneError":
+ raise OneError
+ if request.query.get("raise") == "ArithmeticError":
+ raise ArithmeticError
+
+ return web.Response(reason="all good")
+
+ with caplog.at_level(logging.ERROR):
+
+ # emulates successful call
+ resp = await _rest_handler(make_mocked_request("GET", "/foo"))
+ assert resp.status == status.HTTP_200_OK
+ assert resp.reason == "all good"
+
+ assert not caplog.records
+
+ # this will be passed and catched by the outermost error middleware
+ with pytest.raises(ArithmeticError):
+ await _rest_handler(
+ make_mocked_request("GET", "/foo?raise=ArithmeticError")
+ )
+
+ assert not caplog.records
+
+ # this is a 5XX will be converted to response but is logged as error as well
+ with pytest.raises(web.HTTPException) as exc_info:
+ await _rest_handler(make_mocked_request("GET", "/foo?raise=OneError"))
+
+ resp = exc_info.value
+ assert resp.status == status.HTTP_503_SERVICE_UNAVAILABLE
+ assert "front-end" in resp.reason
+
+ assert caplog.records
+ assert caplog.records[0].levelno == logging.ERROR
diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py b/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py
index 74c932ca600..960d97969ca 100644
--- a/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py
+++ b/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py
@@ -4,7 +4,8 @@
# pylint: disable=unused-variable
import asyncio
-from typing import Any, Awaitable, Callable
+from collections.abc import Awaitable, Callable
+from typing import Any
from urllib.parse import urlparse
import pytest
diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py
index a606d2c61c2..26d6f0cfb0e 100644
--- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py
+++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py
@@ -165,7 +165,7 @@ async def _assert_get_same_project(
project_permalink = data.pop("permalink", None)
folder_id = data.pop("folderId", None)
- assert data == project
+ assert data == {k: project[k] for k in data}
if project_state:
assert ProjectState.model_validate(project_state)
@@ -201,22 +201,24 @@ async def test_list_projects(
assert len(data) == 2
# template project
- project_state = data[0].pop("state")
- project_permalink = data[0].pop("permalink")
- folder_id = data[0].pop("folderId")
+ got = data[0]
+ project_state = got.pop("state")
+ project_permalink = got.pop("permalink")
+ folder_id = got.pop("folderId")
- assert data[0] == template_project
+ assert got == {k: template_project[k] for k in got}
assert not ProjectState(
**project_state
).locked.value, "Templates are not locked"
assert ProjectPermalink.model_validate(project_permalink)
# standard project
- project_state = data[1].pop("state")
- project_permalink = data[1].pop("permalink", None)
- folder_id = data[1].pop("folderId")
+ got = data[1]
+ project_state = got.pop("state")
+ project_permalink = got.pop("permalink", None)
+ folder_id = got.pop("folderId")
- assert data[1] == user_project
+ assert got == {k: user_project[k] for k in got}
assert ProjectState(**project_state)
assert project_permalink is None
assert folder_id is None
@@ -227,11 +229,12 @@ async def test_list_projects(
assert len(data) == 1
# standad project
- project_state = data[0].pop("state")
- project_permalink = data[0].pop("permalink", None)
- folder_id = data[0].pop("folderId")
+ got = data[0]
+ project_state = got.pop("state")
+ project_permalink = got.pop("permalink", None)
+ folder_id = got.pop("folderId")
- assert data[0] == user_project
+ assert got == {k: user_project[k] for k in got}
assert not ProjectState(
**project_state
).locked.value, "Single user does not lock"
@@ -244,11 +247,12 @@ async def test_list_projects(
assert len(data) == 1
# template project
- project_state = data[0].pop("state")
- project_permalink = data[0].pop("permalink")
- folder_id = data[0].pop("folderId")
+ got = data[0]
+ project_state = got.pop("state")
+ project_permalink = got.pop("permalink")
+ folder_id = got.pop("folderId")
- assert data[0] == template_project
+ assert got == {k: template_project[k] for k in got}
assert not ProjectState(
**project_state
).locked.value, "Templates are not locked"
@@ -360,9 +364,14 @@ async def test_list_projects_with_innaccessible_services(
data, *_ = await _list_and_assert_projects(
client, expected, headers=s4l_product_headers
)
- assert len(data) == 2
+ # UPDATE (use-case 4): 11.11.2024 - This test was checking backwards compatibility for listing
+ # projects that were not in the projects_to_products table. After refactoring the project listing,
+ # we no longer support this. MD double-checked the last_modified_timestamp on projects
+ # that do not have any product assigned (all of them were before 01-11-2022 with the exception of two
+ # `4b001ad2-8450-11ec-b105-02420a0b02c7` and `d952cbf4-d838-11ec-af92-02420a0bdad4` which were added to osparc product).
+ assert len(data) == 0
data, *_ = await _list_and_assert_projects(client, expected)
- assert len(data) == 2
+ assert len(data) == 0
@pytest.mark.parametrize(
diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py
index 77950f4c0be..901a597da40 100644
--- a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py
+++ b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py
@@ -931,7 +931,7 @@ async def test_get_active_project(
data_last_change_date = data.pop("lastChangeDate")
assert user_project_last_change_date < data_last_change_date
- assert data == user_project
+ assert data == {k: user_project[k] for k in data}
else:
mocked_notifications_plugin["subscribe"].assert_not_called()
diff --git a/services/web/server/tests/unit/with_dbs/03/test_project_db.py b/services/web/server/tests/unit/with_dbs/03/test_project_db.py
index 89a67734b60..fadfe561267 100644
--- a/services/web/server/tests/unit/with_dbs/03/test_project_db.py
+++ b/services/web/server/tests/unit/with_dbs/03/test_project_db.py
@@ -98,6 +98,7 @@ def _assert_added_project(
"lastChangeDate",
"accessRights", # NOTE: access rights were moved away from the projects table
"trashedAt",
+ "trashedExplicitly",
]
assert {k: v for k, v in expected_prj.items() if k in _DIFFERENT_KEYS} != {
k: v for k, v in added_prj.items() if k in _DIFFERENT_KEYS
diff --git a/services/web/server/tests/unit/with_dbs/03/test_trash.py b/services/web/server/tests/unit/with_dbs/03/test_trash.py
index 5a760d5f9fd..7d6c701c522 100644
--- a/services/web/server/tests/unit/with_dbs/03/test_trash.py
+++ b/services/web/server/tests/unit/with_dbs/03/test_trash.py
@@ -14,6 +14,7 @@
import pytest
from aiohttp.test_utils import TestClient
from aioresponses import aioresponses
+from models_library.api_schemas_webserver.folders_v2 import FolderGet
from models_library.api_schemas_webserver.projects import ProjectGet, ProjectListItem
from models_library.rest_pagination import Page
from pytest_mock import MockerFixture
@@ -24,6 +25,7 @@
from servicelib.aiohttp import status
from simcore_service_webserver.db.models import UserRole
from simcore_service_webserver.projects.models import ProjectDict
+from yarl import URL
@pytest.fixture
@@ -174,3 +176,222 @@ async def test_trash_projects( # noqa: PLR0915
await asyncio.sleep(0.1)
mock_stop_pipeline.assert_awaited()
mock_remove_dynamic_services.assert_awaited()
+
+
+@pytest.mark.acceptance_test(
+ "For https://github.com/ITISFoundation/osparc-simcore/pull/6642"
+)
+async def test_trash_single_folder(client: TestClient, logged_user: UserInfoDict):
+ assert client.app
+
+ # CREATE a folder
+ resp = await client.post("/v0/folders", json={"name": "My first folder"})
+ data, _ = await assert_status(resp, status.HTTP_201_CREATED)
+ folder = FolderGet.parse_obj(data)
+
+ # ---------------------------------------------------------------------
+
+ # LIST NOT trashed
+ resp = await client.get("/v0/folders")
+ await assert_status(resp, status.HTTP_200_OK)
+
+ page = Page[FolderGet].parse_obj(await resp.json())
+ assert page.meta.total == 1
+
+ assert page.data[0] == folder
+
+ # LIST trashed
+ resp = await client.get("/v0/folders", params={"filters": '{"trashed": true}'})
+ await assert_status(resp, status.HTTP_200_OK)
+
+ page = Page[FolderGet].parse_obj(await resp.json())
+ assert page.meta.total == 0
+
+ # TRASH
+ assert client.app.router["trash_folder"].url_for(folder_id="folder_id") == URL(
+ "/v0/folders/folder_id:trash"
+ )
+
+ trashing_at = arrow.utcnow().datetime
+ resp = await client.post(f"/v0/folders/{folder.folder_id}:trash")
+ await assert_status(
+ resp,
+ status.HTTP_204_NO_CONTENT,
+ )
+
+ # GET
+ resp = await client.get(f"/v0/folders/{folder.folder_id}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ got = FolderGet.parse_obj(data)
+ assert got.folder_id == folder.folder_id
+
+ assert got.trashed_at
+ assert trashing_at < got.trashed_at
+ assert got.trashed_at < arrow.utcnow().datetime
+
+ # LIST trashed
+ resp = await client.get("/v0/folders", params={"filters": '{"trashed": true}'})
+ await assert_status(resp, status.HTTP_200_OK)
+
+ page = Page[FolderGet].parse_obj(await resp.json())
+
+ assert page.meta.total == 1
+ assert page.data[0].folder_id == folder.folder_id
+
+ # UNTRASH
+ assert client.app.router["untrash_folder"].url_for(folder_id="folder_id") == URL(
+ "/v0/folders/folder_id:untrash"
+ )
+
+ resp = await client.post(f"/v0/folders/{folder.folder_id}:untrash")
+ data, _ = await assert_status(resp, status.HTTP_204_NO_CONTENT)
+
+ # GET
+ resp = await client.get(f"/v0/folders/{folder.folder_id}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ got = FolderGet.parse_obj(data)
+
+ assert got.folder_id == folder.folder_id
+ assert got.trashed_at is None
+
+
+@pytest.mark.acceptance_test(
+ "For https://github.com/ITISFoundation/osparc-simcore/pull/6642"
+)
+async def test_trash_folder_with_content(
+ client: TestClient,
+ logged_user: UserInfoDict,
+ user_project: ProjectDict,
+ mocked_catalog: None,
+ mocked_director_v2: None,
+):
+ assert client.app
+ project_uuid = UUID(user_project["uuid"])
+
+ # CREATE a folder
+ resp = await client.post("/v0/folders", json={"name": "My first folder"})
+ data, _ = await assert_status(resp, status.HTTP_201_CREATED)
+ folder = FolderGet.parse_obj(data)
+
+ # CREATE a SUB-folder
+ resp = await client.post(
+ "/v0/folders",
+ json={"name": "My subfolder 1", "parentFolderId": folder.folder_id},
+ )
+ data, _ = await assert_status(resp, status.HTTP_201_CREATED)
+ subfolder = FolderGet.parse_obj(data)
+
+ # MOVE project to SUB-folder
+ resp = await client.put(
+ f"/v0/projects/{project_uuid}/folders/{subfolder.folder_id}"
+ )
+ await assert_status(resp, status.HTTP_204_NO_CONTENT)
+
+ # CHECK created
+ resp = await client.get("/v0/folders")
+ await assert_status(resp, status.HTTP_200_OK)
+ page = Page[FolderGet].parse_obj(await resp.json())
+ assert page.meta.total == 1
+ assert page.data[0] == folder
+
+ resp = await client.get("/v0/folders", params={"folder_id": f"{folder.folder_id}"})
+ await assert_status(resp, status.HTTP_200_OK)
+ page = Page[FolderGet].parse_obj(await resp.json())
+ assert page.meta.total == 1
+ assert page.data[0] == subfolder
+
+ resp = await client.get(
+ "/v0/projects", params={"folder_id": f"{subfolder.folder_id}"}
+ )
+ await assert_status(resp, status.HTTP_200_OK)
+ page = Page[ProjectListItem].parse_obj(await resp.json())
+ assert page.meta.total == 1
+ assert page.data[0].uuid == project_uuid
+ assert page.data[0].folder_id == subfolder.folder_id
+
+ # ---------------------------------------------------------------------
+
+ # TRASH folder
+ resp = await client.post(f"/v0/folders/{folder.folder_id}:trash")
+ await assert_status(resp, status.HTTP_204_NO_CONTENT)
+
+ # ONLY folder listed in trash. The rest is not listed anymore!
+ resp = await client.get("/v0/folders", params={"filters": '{"trashed": true}'})
+ await assert_status(resp, status.HTTP_200_OK)
+ page = Page[FolderGet].parse_obj(await resp.json())
+ assert page.meta.total == 1
+ assert page.data[0].folder_id == folder.folder_id
+
+ resp = await client.get(
+ "/v0/folders",
+ params={"filters": '{"trashed": true}', "folder_id": f"{folder.folder_id}"},
+ )
+ await assert_status(resp, status.HTTP_200_OK)
+ page = Page[FolderGet].parse_obj(await resp.json())
+ assert page.meta.total == 0
+
+ resp = await client.get(
+ "/v0/projects",
+ params={"filters": '{"trashed": true}', "folder_id": f"{subfolder.folder_id}"},
+ )
+ await assert_status(resp, status.HTTP_200_OK)
+ page = Page[ProjectListItem].parse_obj(await resp.json())
+ assert page.meta.total == 0
+
+ # CHECK marked as trashed
+ resp = await client.get(f"/v0/folders/{folder.folder_id}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ got = FolderGet.parse_obj(data)
+ assert got.trashed_at is not None
+
+ resp = await client.get(f"/v0/folders/{subfolder.folder_id}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ got = FolderGet.parse_obj(data)
+ assert got.trashed_at is not None
+
+ resp = await client.get(f"/v0/projects/{project_uuid}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ got = ProjectGet.parse_obj(data)
+ assert got.trashed_at is not None
+
+ # UNTRASH folder
+ resp = await client.post(f"/v0/folders/{folder.folder_id}:untrash")
+ await assert_status(resp, status.HTTP_204_NO_CONTENT)
+
+ # NO folders listed in trash.
+ resp = await client.get("/v0/folders", params={"filters": '{"trashed": true}'})
+ await assert_status(resp, status.HTTP_200_OK)
+ page = Page[FolderGet].parse_obj(await resp.json())
+ assert page.meta.total == 0
+
+ resp = await client.get(
+ "/v0/folders",
+ params={"filters": '{"trashed": true}', "folder_id": f"{folder.folder_id}"},
+ )
+ await assert_status(resp, status.HTTP_200_OK)
+ page = Page[FolderGet].parse_obj(await resp.json())
+ assert page.meta.total == 0
+
+ resp = await client.get(
+ "/v0/projects",
+ params={"filters": '{"trashed": true}', "folder_id": f"{subfolder.folder_id}"},
+ )
+ await assert_status(resp, status.HTTP_200_OK)
+ page = Page[ProjectListItem].parse_obj(await resp.json())
+ assert page.meta.total == 0
+
+ # CHECK marked as trashed
+ resp = await client.get(f"/v0/folders/{folder.folder_id}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ got = FolderGet.parse_obj(data)
+ assert got.trashed_at is None
+
+ resp = await client.get(f"/v0/folders/{subfolder.folder_id}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ got = FolderGet.parse_obj(data)
+ assert got.trashed_at is None
+
+ resp = await client.get(f"/v0/projects/{project_uuid}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ got = ProjectGet.parse_obj(data)
+ assert got.trashed_at is None
diff --git a/services/web/server/tests/unit/with_dbs/04/folders/test_folders.py b/services/web/server/tests/unit/with_dbs/04/folders/test_folders.py
index bb0c830ef27..03e30daedc4 100644
--- a/services/web/server/tests/unit/with_dbs/04/folders/test_folders.py
+++ b/services/web/server/tests/unit/with_dbs/04/folders/test_folders.py
@@ -1,16 +1,18 @@
-import asyncio
-
# pylint: disable=redefined-outer-name
# pylint: disable=unused-argument
# pylint: disable=unused-variable
# pylint: disable=too-many-arguments
# pylint: disable=too-many-statements
+
+
+import asyncio
from http import HTTPStatus
from unittest import mock
import pytest
from aiohttp.test_utils import TestClient
from models_library.api_schemas_webserver.folders_v2 import FolderGet
+from pydantic import TypeAdapter
from pytest_mock import MockerFixture
from pytest_simcore.helpers.assert_checks import assert_status
from pytest_simcore.helpers.webserver_login import LoggedUser, UserInfoDict
@@ -21,7 +23,10 @@
from servicelib.aiohttp import status
from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY
from simcore_service_webserver.db.models import UserRole
-from simcore_service_webserver.projects._groups_db import update_or_insert_project_group
+from simcore_service_webserver.projects._groups_db import (
+ GroupID,
+ update_or_insert_project_group,
+)
from simcore_service_webserver.projects.models import ProjectDict
@@ -35,7 +40,7 @@ async def test_folders_user_role_permissions(
assert client.app
url = client.app.router["list_folders"].url_for()
- resp = await client.get(url.path)
+ resp = await client.get(f"{url}")
await assert_status(resp, expected.ok)
@@ -50,68 +55,66 @@ async def test_folders_full_workflow(
# list user folders
url = client.app.router["list_folders"].url_for()
- resp = await client.get(url.path)
+ resp = await client.get(f"{url}")
data, _ = await assert_status(resp, status.HTTP_200_OK)
assert data == []
# create a new folder
url = client.app.router["create_folder"].url_for()
- resp = await client.post(url.path, json={"name": "My first folder"})
- added_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
- assert FolderGet.model_validate(added_folder)
+ resp = await client.post(f"{url}", json={"name": "My first folder"})
+ data, _ = await assert_status(resp, status.HTTP_201_CREATED)
+ added_folder = FolderGet.model_validate(data)
# list user folders
url = client.app.router["list_folders"].url_for()
- resp = await client.get(url.path)
+ resp = await client.get(f"{url}")
data, _, meta, links = await assert_status(
resp, status.HTTP_200_OK, include_meta=True, include_links=True
)
assert len(data) == 1
- assert data[0]["folderId"] == added_folder["folderId"]
- assert data[0]["name"] == "My first folder"
+ assert data[0]["folderId"] == added_folder.folder_id
+ assert data[0]["name"] == added_folder.name
assert meta["count"] == 1
assert links
# get a user folder
- url = client.app.router["get_folder"].url_for(
- folder_id=f"{added_folder['folderId']}"
- )
+ url = client.app.router["get_folder"].url_for(folder_id=f"{added_folder.folder_id}")
resp = await client.get(f"{url}")
data, _ = await assert_status(resp, status.HTTP_200_OK)
- assert FolderGet.model_validate(data)
- assert data["folderId"] == added_folder["folderId"]
- assert data["name"] == "My first folder"
+ got_folder = FolderGet.model_validate(data)
+ assert got_folder.folder_id == added_folder.folder_id
+ assert got_folder.name == added_folder.name
# update a folder
url = client.app.router["replace_folder"].url_for(
- folder_id=f"{added_folder['folderId']}"
+ folder_id=f"{added_folder.folder_id}"
)
resp = await client.put(
- url.path,
- json={
- "name": "My Second folder",
- },
+ f"{url}",
+ json={"name": "My Second folder"},
)
data, _ = await assert_status(resp, status.HTTP_200_OK)
- assert FolderGet.model_validate(data)
+ updated_folder = FolderGet.model_validate(data)
+ assert updated_folder.folder_id == got_folder.folder_id
+ assert updated_folder.name != got_folder.name
# list user folders
url = client.app.router["list_folders"].url_for()
- resp = await client.get(url.path)
+ resp = await client.get(f"{url}")
data, _ = await assert_status(resp, status.HTTP_200_OK)
assert len(data) == 1
assert data[0]["name"] == "My Second folder"
# delete a folder
url = client.app.router["delete_folder"].url_for(
- folder_id=f"{added_folder['folderId']}"
+ folder_id=f"{added_folder.folder_id}"
)
- resp = await client.delete(url.path)
+ resp = await client.delete(f"{url}")
data, _ = await assert_status(resp, status.HTTP_204_NO_CONTENT)
# list user folders
url = client.app.router["list_folders"].url_for()
- resp = await client.get(url.path)
+ resp = await client.get(f"{url}")
data, _ = await assert_status(resp, status.HTTP_200_OK)
assert data == []
@@ -127,19 +130,19 @@ async def test_sub_folders_full_workflow(
# list user folders
url = client.app.router["list_folders"].url_for()
- resp = await client.get(url.path)
+ resp = await client.get(f"{url}")
data, _ = await assert_status(resp, status.HTTP_200_OK)
assert data == []
# create a new folder
url = client.app.router["create_folder"].url_for()
- resp = await client.post(url.path, json={"name": "My first folder"})
+ resp = await client.post(f"{url}", json={"name": "My first folder"})
root_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
# create a subfolder folder
url = client.app.router["create_folder"].url_for()
resp = await client.post(
- url.path,
+ f"{url}",
json={
"name": "My subfolder",
"parentFolderId": root_folder["folderId"],
@@ -149,14 +152,17 @@ async def test_sub_folders_full_workflow(
# list user root folders
url = client.app.router["list_folders"].url_for()
- resp = await client.get(url.path)
+ resp = await client.get(f"{url}")
data, _ = await assert_status(resp, status.HTTP_200_OK)
assert len(data) == 1
assert data[0]["name"] == "My first folder"
# list user specific folder
- base_url = client.app.router["list_folders"].url_for()
- url = base_url.with_query({"folder_id": f"{subfolder_folder['folderId']}"})
+ url = (
+ client.app.router["list_folders"]
+ .url_for()
+ .with_query({"folder_id": f"{subfolder_folder['folderId']}"})
+ )
resp = await client.get(f"{url}")
data, _ = await assert_status(resp, status.HTTP_200_OK)
assert len(data) == 0
@@ -164,7 +170,7 @@ async def test_sub_folders_full_workflow(
# create a sub sub folder
url = client.app.router["create_folder"].url_for()
resp = await client.post(
- url.path,
+ f"{url}",
json={
"name": "My sub sub folder",
"parentFolderId": subfolder_folder["folderId"],
@@ -173,8 +179,11 @@ async def test_sub_folders_full_workflow(
subsubfolder_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
# list user subfolder folders
- base_url = client.app.router["list_folders"].url_for()
- url = base_url.with_query({"folder_id": f"{subfolder_folder['folderId']}"})
+ url = (
+ client.app.router["list_folders"]
+ .url_for()
+ .with_query({"folder_id": f"{subfolder_folder['folderId']}"})
+ )
resp = await client.get(f"{url}")
data, _ = await assert_status(resp, status.HTTP_200_OK)
assert len(data) == 1
@@ -186,20 +195,20 @@ async def test_sub_folders_full_workflow(
folder_id=f"{subfolder_folder['folderId']}",
)
resp = await client.put(
- url.path,
+ f"{url}",
json={
"name": "My Updated Folder",
"parentFolderId": f"{subsubfolder_folder['folderId']}",
},
)
- await assert_status(resp, status.HTTP_400_BAD_REQUEST)
+ await assert_status(resp, status.HTTP_409_CONFLICT)
# move sub sub folder to root folder
url = client.app.router["replace_folder"].url_for(
folder_id=f"{subsubfolder_folder['folderId']}"
)
resp = await client.put(
- url.path,
+ f"{url}",
json={
"name": "My Updated Folder",
"parentFolderId": None,
@@ -209,8 +218,7 @@ async def test_sub_folders_full_workflow(
assert FolderGet.model_validate(data)
# list user root folders
- base_url = client.app.router["list_folders"].url_for()
- url = base_url.with_query({"folder_id": "null"})
+ url = client.app.router["list_folders"].url_for().with_query({"folder_id": "null"})
resp = await client.get(f"{url}")
data, _ = await assert_status(resp, status.HTTP_200_OK)
assert len(data) == 2
@@ -227,20 +235,20 @@ async def test_project_folder_movement_full_workflow(
# create a new folder
url = client.app.router["create_folder"].url_for()
- resp = await client.post(url.path, json={"name": "My first folder"})
+ resp = await client.post(f"{url}", json={"name": "My first folder"})
root_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
# add project to the folder
url = client.app.router["replace_project_folder"].url_for(
folder_id=f"{root_folder['folderId']}", project_id=f"{user_project['uuid']}"
)
- resp = await client.put(url.path)
+ resp = await client.put(f"{url}")
await assert_status(resp, status.HTTP_204_NO_CONTENT)
# create a sub folder
url = client.app.router["create_folder"].url_for()
resp = await client.post(
- url.path,
+ f"{url}",
json={
"name": "My sub folder",
"parentFolderId": root_folder["folderId"],
@@ -252,14 +260,14 @@ async def test_project_folder_movement_full_workflow(
url = client.app.router["replace_project_folder"].url_for(
folder_id=f"{sub_folder['folderId']}", project_id=f"{user_project['uuid']}"
)
- resp = await client.put(url.path)
+ resp = await client.put(f"{url}")
await assert_status(resp, status.HTTP_204_NO_CONTENT)
# move project to the root directory
url = client.app.router["replace_project_folder"].url_for(
folder_id="null", project_id=f"{user_project['uuid']}"
)
- resp = await client.put(url.path)
+ resp = await client.put(f"{url}")
await assert_status(resp, status.HTTP_204_NO_CONTENT)
@@ -284,7 +292,7 @@ async def test_project_listing_inside_of_private_folder(
# create a new folder
url = client.app.router["create_folder"].url_for()
- resp = await client.post(url.path, json={"name": "My first folder"})
+ resp = await client.post(f"{url}", json={"name": "My first folder"})
original_user_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
# add project to the folder
@@ -292,12 +300,15 @@ async def test_project_listing_inside_of_private_folder(
folder_id=f"{original_user_folder['folderId']}",
project_id=f"{user_project['uuid']}",
)
- resp = await client.put(url.path)
+ resp = await client.put(f"{url}")
await assert_status(resp, status.HTTP_204_NO_CONTENT)
# list project in user private folder
- base_url = client.app.router["list_projects"].url_for()
- url = base_url.with_query({"folder_id": f"{original_user_folder['folderId']}"})
+ url = (
+ client.app.router["list_projects"]
+ .url_for()
+ .with_query({"folder_id": f"{original_user_folder['folderId']}"})
+ )
resp = await client.get(f"{url}")
data, _ = await assert_status(resp, status.HTTP_200_OK)
assert len(data) == 1
@@ -308,28 +319,33 @@ async def test_project_listing_inside_of_private_folder(
# Create new user
async with LoggedUser(client) as new_logged_user:
# Try to list folder that user doesn't have access to
- base_url = client.app.router["list_projects"].url_for()
- url = base_url.with_query({"folder_id": f"{original_user_folder['folderId']}"})
- resp = await client.get(f"{url}")
- _, errors = await assert_status(
- resp,
- status.HTTP_403_FORBIDDEN,
+ url = (
+ client.app.router["list_projects"]
+ .url_for()
+ .with_query({"folder_id": f"{original_user_folder['folderId']}"})
)
+ resp = await client.get(f"{url}")
+ _, errors = await assert_status(resp, status.HTTP_403_FORBIDDEN)
assert errors
# Now we will share the project with the new user
await update_or_insert_project_group(
client.app,
project_id=user_project["uuid"],
- group_id=new_logged_user["primary_gid"],
+ group_id=TypeAdapter(GroupID).validate_python(
+ new_logged_user["primary_gid"]
+ ),
read=True,
write=True,
delete=False,
)
# list new user root folder
- base_url = client.app.router["list_projects"].url_for()
- url = base_url.with_query({"folder_id": "null"})
+ url = (
+ client.app.router["list_projects"]
+ .url_for()
+ .with_query({"folder_id": "null"})
+ )
resp = await client.get(f"{url}")
data, _ = await assert_status(resp, status.HTTP_200_OK)
assert len(data) == 1
@@ -339,7 +355,7 @@ async def test_project_listing_inside_of_private_folder(
# create a new folder
url = client.app.router["create_folder"].url_for()
- resp = await client.post(url.path, json={"name": "New user folder"})
+ resp = await client.post(f"{url}", json={"name": "New user folder"})
new_user_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
# add project to the folder
@@ -347,12 +363,15 @@ async def test_project_listing_inside_of_private_folder(
folder_id=f"{new_user_folder['folderId']}",
project_id=f"{user_project['uuid']}",
)
- resp = await client.put(url.path)
+ resp = await client.put(f"{url}")
await assert_status(resp, status.HTTP_204_NO_CONTENT)
# list new user specific folder
- base_url = client.app.router["list_projects"].url_for()
- url = base_url.with_query({"folder_id": f"{new_user_folder['folderId']}"})
+ url = (
+ client.app.router["list_projects"]
+ .url_for()
+ .with_query({"folder_id": f"{new_user_folder['folderId']}"})
+ )
resp = await client.get(f"{url}")
data, _ = await assert_status(resp, status.HTTP_200_OK)
assert len(data) == 1
@@ -394,14 +413,14 @@ async def test_folders_deletion(
# create a new folder
url = client.app.router["create_folder"].url_for()
- resp = await client.post(url.path, json={"name": "My first folder"})
+ resp = await client.post(f"{url}", json={"name": "My first folder"})
root_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
assert FolderGet.model_validate(root_folder)
# create a subfolder folder
url = client.app.router["create_folder"].url_for()
resp = await client.post(
- url.path,
+ f"{url}",
json={
"name": "My subfolder 1",
"parentFolderId": root_folder["folderId"],
@@ -412,7 +431,7 @@ async def test_folders_deletion(
# create a subfolder folder
url = client.app.router["create_folder"].url_for()
resp = await client.post(
- url.path,
+ f"{url}",
json={
"name": "My subfolder 2",
"parentFolderId": root_folder["folderId"],
@@ -425,13 +444,13 @@ async def test_folders_deletion(
folder_id=f"{subfolder_2['folderId']}",
project_id=f"{user_project['uuid']}",
)
- resp = await client.put(url.path)
+ resp = await client.put(f"{url}")
await assert_status(resp, status.HTTP_204_NO_CONTENT)
# create a sub sub folder folder
url = client.app.router["create_folder"].url_for()
resp = await client.post(
- url.path,
+ f"{url}",
json={
"name": "My sub sub folder",
"parentFolderId": subfolder_1["folderId"],
@@ -441,21 +460,24 @@ async def test_folders_deletion(
# list user folders
url = client.app.router["list_folders"].url_for()
- resp = await client.get(url.path)
+ resp = await client.get(f"{url}")
data, _ = await assert_status(resp, status.HTTP_200_OK)
assert len(data) == 1
# list subfolder projects
- base_url = client.app.router["list_projects"].url_for()
- url = base_url.with_query({"folder_id": f"{subfolder_2['folderId']}"})
+ url = (
+ client.app.router["list_projects"]
+ .url_for()
+ .with_query({"folder_id": f"{subfolder_2['folderId']}"})
+ )
resp = await client.get(f"{url}")
data, _ = await assert_status(resp, status.HTTP_200_OK)
assert len(data) == 1
assert data[0]["uuid"] == user_project["uuid"]
# list root projects
- base_url = client.app.router["list_projects"].url_for()
- resp = await client.get(f"{base_url}")
+ url = client.app.router["list_projects"].url_for()
+ resp = await client.get(f"{url}")
data, _ = await assert_status(resp, status.HTTP_200_OK)
assert len(data) == 0
@@ -463,24 +485,27 @@ async def test_folders_deletion(
url = client.app.router["delete_folder"].url_for(
folder_id=f"{subfolder_1['folderId']}"
)
- resp = await client.delete(url.path)
+ resp = await client.delete(f"{url}")
await assert_status(resp, status.HTTP_204_NO_CONTENT)
# delete a root folder
url = client.app.router["delete_folder"].url_for(
folder_id=f"{root_folder['folderId']}"
)
- resp = await client.delete(url.path)
+ resp = await client.delete(f"{url}")
await assert_status(resp, status.HTTP_204_NO_CONTENT)
- fire_and_forget_tasks = client.app[APP_FIRE_AND_FORGET_TASKS_KEY]
- t: asyncio.Task = next(iter(fire_and_forget_tasks))
- assert t.get_name().startswith("fire_and_forget_task_delete_project_task_")
- await t
+ fire_and_forget_task: asyncio.Task = next(
+ iter(client.app[APP_FIRE_AND_FORGET_TASKS_KEY])
+ )
+ assert fire_and_forget_task.get_name().startswith(
+ "fire_and_forget_task_delete_project_task_"
+ )
+ await fire_and_forget_task
assert len(client.app[APP_FIRE_AND_FORGET_TASKS_KEY]) == 0
# list root projects (The project should have been deleted)
- base_url = client.app.router["list_projects"].url_for()
- resp = await client.get(f"{base_url}")
+ url = client.app.router["list_projects"].url_for()
+ resp = await client.get(f"{url}")
data, _ = await assert_status(resp, status.HTTP_200_OK)
assert len(data) == 0
diff --git a/services/web/server/tests/unit/with_dbs/04/folders/test_folders__full_search.py b/services/web/server/tests/unit/with_dbs/04/folders/test_folders__full_search.py
new file mode 100644
index 00000000000..74126da042f
--- /dev/null
+++ b/services/web/server/tests/unit/with_dbs/04/folders/test_folders__full_search.py
@@ -0,0 +1,131 @@
+# pylint: disable=redefined-outer-name
+# pylint: disable=unused-argument
+# pylint: disable=unused-variable
+# pylint: disable=too-many-arguments
+# pylint: disable=too-many-statements
+
+
+from http import HTTPStatus
+
+import pytest
+from aiohttp.test_utils import TestClient
+from models_library.api_schemas_webserver.folders_v2 import FolderGet
+from pytest_simcore.helpers.assert_checks import assert_status
+from pytest_simcore.helpers.webserver_login import LoggedUser, UserInfoDict
+from pytest_simcore.helpers.webserver_parametrizations import (
+ ExpectedResponse,
+ standard_role_response,
+)
+from servicelib.aiohttp import status
+from simcore_service_webserver.db.models import UserRole
+from simcore_service_webserver.projects.models import ProjectDict
+
+
+@pytest.mark.parametrize(*standard_role_response(), ids=str)
+async def test_folders_user_role_permissions(
+ client: TestClient,
+ logged_user: UserInfoDict,
+ user_project: ProjectDict,
+ expected: ExpectedResponse,
+):
+ assert client.app
+
+ url = client.app.router["list_folders_full_search"].url_for()
+ resp = await client.get(f"{url}")
+ await assert_status(resp, expected.ok)
+
+
+@pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)])
+async def test_folders_full_search(
+ client: TestClient,
+ logged_user: UserInfoDict,
+ user_project: ProjectDict,
+ expected: HTTPStatus,
+):
+ assert client.app
+
+ # list full folder search
+ url = client.app.router["list_folders_full_search"].url_for()
+ resp = await client.get(f"{url}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert data == []
+
+ # create a new folder
+ url = client.app.router["create_folder"].url_for()
+ resp = await client.post(f"{url}", json={"name": "My first folder"})
+ root_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # create a subfolder folder
+ url = client.app.router["create_folder"].url_for()
+ resp = await client.post(
+ f"{url}",
+ json={
+ "name": "My subfolder",
+ "parentFolderId": root_folder["folderId"],
+ },
+ )
+ subfolder_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # list full folder search
+ url = client.app.router["list_folders_full_search"].url_for()
+ resp = await client.get(f"{url}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert len(data) == 2
+
+ # create a sub sub folder
+ url = client.app.router["create_folder"].url_for()
+ resp = await client.post(
+ f"{url}",
+ json={
+ "name": "My sub sub folder",
+ "parentFolderId": subfolder_folder["folderId"],
+ },
+ )
+ subsubfolder_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # move sub sub folder to root folder
+ url = client.app.router["replace_folder"].url_for(
+ folder_id=f"{subsubfolder_folder['folderId']}"
+ )
+ resp = await client.put(
+ f"{url}",
+ json={
+ "name": "My Updated Folder",
+ "parentFolderId": None,
+ },
+ )
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert FolderGet.parse_obj(data)
+
+ # list full folder search
+ url = client.app.router["list_folders_full_search"].url_for()
+ resp = await client.get(f"{url}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert len(data) == 3
+
+ # list full folder search with specific text
+ url = client.app.router["list_folders_full_search"].url_for()
+ query_parameters = {"text": "My subfolder"}
+ url_with_query = url.with_query(**query_parameters)
+ resp = await client.get(f"{url_with_query}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert len(data) == 1
+
+ # Create new user
+ async with LoggedUser(client) as new_logged_user:
+ # list full folder search
+ url = client.app.router["list_folders_full_search"].url_for()
+ resp = await client.get(f"{url}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert data == []
+
+ # create a new folder
+ url = client.app.router["create_folder"].url_for()
+ resp = await client.post(f"{url}", json={"name": "New user folder"})
+ new_user_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # list full folder search
+ url = client.app.router["list_folders_full_search"].url_for()
+ resp = await client.get(f"{url}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert len(data) == 1
diff --git a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py
index 330913490ae..3cb82c2bf20 100644
--- a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py
+++ b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py
@@ -62,10 +62,11 @@ def _assert_same_projects(got: dict, expected: dict):
"workbench",
"accessRights",
"ui",
+ "trashedExplicitly",
}
for key in expected:
if key not in exclude:
- assert got[key] == expected[key], "Failed in %s" % key
+ assert got[key] == expected[key], f"Failed in {key}"
def _is_user_authenticated(session: ClientSession) -> bool:
diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py
index 10ef14a5df0..c2bbab0616a 100644
--- a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py
+++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py
@@ -365,3 +365,98 @@ async def test_workspaces_delete_folders(
resp = await client.get(f"{url}")
data, _ = await assert_status(resp, status.HTTP_200_OK)
assert len(data) == 0
+
+
+@pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)])
+async def test_listing_folders_and_projects_in_workspace__multiple_workspaces_created(
+ client: TestClient,
+ logged_user: UserInfoDict,
+ user_project: ProjectDict,
+ expected: HTTPStatus,
+ mock_catalog_api_get_services_for_user_in_product: MockerFixture,
+ fake_project: ProjectDict,
+ workspaces_clean_db: None,
+):
+ assert client.app
+
+ # create a new workspace
+ url = client.app.router["create_workspace"].url_for()
+ resp = await client.post(
+ url.path,
+ json={
+ "name": "My first workspace",
+ "description": "Custom description",
+ "thumbnail": None,
+ },
+ )
+ added_workspace_1, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # Create project in workspace
+ project_data = deepcopy(fake_project)
+ project_data["workspace_id"] = f"{added_workspace_1['workspaceId']}"
+ project = await create_project(
+ client.app,
+ project_data,
+ user_id=logged_user["id"],
+ product_name="osparc",
+ )
+
+ # Create folder in workspace
+ url = client.app.router["create_folder"].url_for()
+ resp = await client.post(
+ url.path,
+ json={
+ "name": "Original user folder",
+ "workspaceId": f"{added_workspace_1['workspaceId']}",
+ },
+ )
+ first_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # create a new workspace
+ url = client.app.router["create_workspace"].url_for()
+ resp = await client.post(
+ url.path,
+ json={
+ "name": "My first workspace",
+ "description": "Custom description",
+ "thumbnail": None,
+ },
+ )
+ added_workspace_2, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # Create project in workspace
+ project_data = deepcopy(fake_project)
+ project_data["workspace_id"] = f"{added_workspace_2['workspaceId']}"
+ project = await create_project(
+ client.app,
+ project_data,
+ user_id=logged_user["id"],
+ product_name="osparc",
+ )
+
+ # Create folder in workspace
+ url = client.app.router["create_folder"].url_for()
+ resp = await client.post(
+ url.path,
+ json={
+ "name": "Original user folder",
+ "workspaceId": f"{added_workspace_2['workspaceId']}",
+ },
+ )
+ first_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # List projects in workspace 1
+ base_url = client.app.router["list_projects"].url_for()
+ url = base_url.with_query({"workspace_id": f"{added_workspace_1['workspaceId']}"})
+ resp = await client.get(url)
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert len(data) == 1
+
+ # List folders in workspace 1
+ base_url = client.app.router["list_folders"].url_for()
+ url = base_url.with_query(
+ {"workspace_id": f"{added_workspace_1['workspaceId']}", "folder_id": "null"}
+ )
+ resp = await client.get(url)
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert len(data) == 1
diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_folders_full_search.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_folders_full_search.py
new file mode 100644
index 00000000000..3cfc1a78842
--- /dev/null
+++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_folders_full_search.py
@@ -0,0 +1,65 @@
+# pylint: disable=redefined-outer-name
+# pylint: disable=unused-argument
+# pylint: disable=unused-variable
+# pylint: disable=too-many-arguments
+# pylint: disable=too-many-statements
+
+
+from http import HTTPStatus
+
+import pytest
+from aiohttp.test_utils import TestClient
+from pytest_simcore.helpers.assert_checks import assert_status
+from pytest_simcore.helpers.webserver_login import UserInfoDict
+from servicelib.aiohttp import status
+from simcore_service_webserver.db.models import UserRole
+
+
+@pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)])
+async def test_workspaces__list_folders_full_search(
+ client: TestClient,
+ logged_user: UserInfoDict,
+ expected: HTTPStatus,
+ workspaces_clean_db: None,
+):
+ assert client.app
+
+ # list full folder search
+ url = client.app.router["list_folders_full_search"].url_for()
+ resp = await client.get(f"{url}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert data == []
+
+ # create a new folder
+ url = client.app.router["create_folder"].url_for()
+ resp = await client.post(f"{url}", json={"name": "My first folder"})
+ root_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # list full folder search
+ url = client.app.router["list_folders_full_search"].url_for()
+ resp = await client.get(f"{url}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert len(data) == 1
+
+ # create a new workspace
+ url = client.app.router["create_workspace"].url_for()
+ resp = await client.post(
+ url.path,
+ json={
+ "name": "My first workspace",
+ "description": "Custom description",
+ "thumbnail": None,
+ },
+ )
+ added_workspace, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # create a folder
+ url = client.app.router["create_folder"].url_for()
+ resp = await client.post(url.path, json={"name": "My first folder"})
+ root_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # list full folder search
+ url = client.app.router["list_folders_full_search"].url_for()
+ resp = await client.get(f"{url}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert len(data) == 2
diff --git a/tests/e2e/tutorials/sleepers_project_template_sql.csv b/tests/e2e/tutorials/sleepers_project_template_sql.csv
index 29c16a6f416..6dbcd7d2a26 100644
--- a/tests/e2e/tutorials/sleepers_project_template_sql.csv
+++ b/tests/e2e/tutorials/sleepers_project_template_sql.csv
@@ -1,2 +1,2 @@
-id,type,uuid,name,description,thumbnail,prj_owner,creation_date,last_change_date,workbench,published,access_rights,dev,ui,classifiers,quality,hidden,workspace_id,trashed_at
-10,TEMPLATE,ed6c2f58-dc16-445d-bb97-e989e2611603,Sleepers,5 sleepers interconnected,"",,2019-06-06 14:34:19.631,2019-06-06 14:34:28.647,"{""027e3ff9-3119-45dd-b8a2-2e31661a7385"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 0"", ""inputs"": {""in_2"": 2}, ""inputAccess"": {""in_1"": ""Invisible"", ""in_2"": ""ReadOnly""}, ""inputNodes"": [], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 300}}, ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 1"", ""inputs"": {""in_1"": {""nodeUuid"": ""027e3ff9-3119-45dd-b8a2-2e31661a7385"", ""output"": ""out_1""}, ""in_2"": 2}, ""inputNodes"": [""027e3ff9-3119-45dd-b8a2-2e31661a7385""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}, ""bf405067-d168-44ba-b6dc-bb3e08542f92"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 2"", ""inputs"": {""in_1"": {""nodeUuid"": ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"", ""output"": ""out_2""}}, ""inputNodes"": [""562aaea9-95ff-46f3-8e84-db8f3c9e3a39""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 550, ""y"": 200}}, ""de2578c5-431e-5065-a079-a5a0476e3c10"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 3"", ""inputs"": {""in_2"": {""nodeUuid"": ""027e3ff9-3119-45dd-b8a2-2e31661a7385"", ""output"": ""out_2""}}, ""inputNodes"": [""027e3ff9-3119-45dd-b8a2-2e31661a7385""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 420, ""y"": 400}}, ""de2578c5-431e-559d-aa19-dc9293e10e4c"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 4"", ""inputs"": {""in_1"": {""nodeUuid"": ""bf405067-d168-44ba-b6dc-bb3e08542f92"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""de2578c5-431e-5065-a079-a5a0476e3c10"", ""output"": ""out_2""}}, ""inputNodes"": [""bf405067-d168-44ba-b6dc-bb3e08542f92"", ""de2578c5-431e-5065-a079-a5a0476e3c10""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 800, ""y"": 300}}}",true,"{""1"": {""read"":true, ""write"":false, ""delete"":false}}", "{}", "{}", "{}", "{}",false,,
+id,type,uuid,name,description,thumbnail,prj_owner,creation_date,last_change_date,workbench,published,access_rights,dev,ui,classifiers,quality,hidden,workspace_id,trashed_at,trashed_explicitly
+10,TEMPLATE,ed6c2f58-dc16-445d-bb97-e989e2611603,Sleepers,5 sleepers interconnected,"",,2019-06-06 14:34:19.631,2019-06-06 14:34:28.647,"{""027e3ff9-3119-45dd-b8a2-2e31661a7385"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 0"", ""inputs"": {""in_2"": 2}, ""inputAccess"": {""in_1"": ""Invisible"", ""in_2"": ""ReadOnly""}, ""inputNodes"": [], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 300}}, ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 1"", ""inputs"": {""in_1"": {""nodeUuid"": ""027e3ff9-3119-45dd-b8a2-2e31661a7385"", ""output"": ""out_1""}, ""in_2"": 2}, ""inputNodes"": [""027e3ff9-3119-45dd-b8a2-2e31661a7385""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}, ""bf405067-d168-44ba-b6dc-bb3e08542f92"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 2"", ""inputs"": {""in_1"": {""nodeUuid"": ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"", ""output"": ""out_2""}}, ""inputNodes"": [""562aaea9-95ff-46f3-8e84-db8f3c9e3a39""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 550, ""y"": 200}}, ""de2578c5-431e-5065-a079-a5a0476e3c10"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 3"", ""inputs"": {""in_2"": {""nodeUuid"": ""027e3ff9-3119-45dd-b8a2-2e31661a7385"", ""output"": ""out_2""}}, ""inputNodes"": [""027e3ff9-3119-45dd-b8a2-2e31661a7385""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 420, ""y"": 400}}, ""de2578c5-431e-559d-aa19-dc9293e10e4c"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 4"", ""inputs"": {""in_1"": {""nodeUuid"": ""bf405067-d168-44ba-b6dc-bb3e08542f92"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""de2578c5-431e-5065-a079-a5a0476e3c10"", ""output"": ""out_2""}}, ""inputNodes"": [""bf405067-d168-44ba-b6dc-bb3e08542f92"", ""de2578c5-431e-5065-a079-a5a0476e3c10""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 800, ""y"": 300}}}",true,"{""1"": {""read"":true, ""write"":false, ""delete"":false}}", "{}", "{}", "{}", "{}",false,,,false
diff --git a/tests/performance/Makefile b/tests/performance/Makefile
index ead1e417d39..d41a60d7af8 100644
--- a/tests/performance/Makefile
+++ b/tests/performance/Makefile
@@ -12,7 +12,8 @@ export ENV_FILE
NETWORK_NAME=dashboards_timenet
# UTILS
-get_my_ip := $(shell (hostname --all-ip-addresses || hostname -i) 2>/dev/null | cut --delimiter=" " --fields=1)
+# NOTE: keep short arguments for `cut` so it works in both BusyBox (alpine) AND Ubuntu
+get_my_ip := $(shell (hostname --all-ip-addresses || hostname -i) 2>/dev/null | cut -d " " -f 1)
# Check that given variables are set and all have non-empty values,
# die with an error otherwise.
@@ -28,6 +29,7 @@ __check_defined = \
$(error Undefined $1$(if $2, ($2))))
+
.PHONY: build
build: ## builds distributed osparc locust docker image
docker \
@@ -42,6 +44,8 @@ build: ## builds distributed osparc locust docker image
push:
docker push itisfoundation/locust:$(LOCUST_VERSION)
+
+
.PHONY: down
down: ## stops and removes osparc locust containers
docker compose --file docker-compose.yml down
@@ -55,6 +59,8 @@ test: ## runs osparc locust. Locust and test configuration are specified in ENV_
fi
docker compose --file docker-compose.yml up --scale worker=4 --exit-code-from=master
+
+
.PHONY: dashboards-up dashboards-down
dashboards-up: ## Create Grafana dashboard for inspecting locust results. See dashboard on localhost:3000
@@ -68,6 +74,8 @@ dashboards-up: ## Create Grafana dashboard for inspecting locust results. See da
dashboards-down:
@locust-compose down
+
+
.PHONY: install-ci install-dev
install-dev:
@@ -80,4 +88,4 @@ install-ci:
.PHONY: config
config:
@$(call check_defined, input, please define inputs when calling $@ - e.g. ```make $@ input="--help"```)
- @uv run locust_settings.py $(input) | tee .env
+ @uv run locust_settings.py $(input) | tee "${ENV_FILE}"
diff --git a/tests/performance/locust_files/platform_ping_test.py b/tests/performance/locust_files/platform_ping_test.py
index 61cb0733458..c8839bb8c2b 100644
--- a/tests/performance/locust_files/platform_ping_test.py
+++ b/tests/performance/locust_files/platform_ping_test.py
@@ -19,7 +19,7 @@
assert locust_plugins # nosec
-class LocustAuth(BaseSettings):
+class MonitoringBasicAuth(BaseSettings):
SC_USER_NAME: str = Field(default=..., examples=[""])
SC_PASSWORD: str = Field(default=..., examples=[""])
@@ -27,7 +27,7 @@ class LocustAuth(BaseSettings):
class WebApiUser(FastHttpUser):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- _auth = LocustAuth()
+ _auth = MonitoringBasicAuth()
self.auth = (
_auth.SC_USER_NAME,
_auth.SC_PASSWORD,
diff --git a/tests/performance/locust_settings.py b/tests/performance/locust_settings.py
index 24f896180fd..48c219871fe 100644
--- a/tests/performance/locust_settings.py
+++ b/tests/performance/locust_settings.py
@@ -1,10 +1,21 @@
+# /// script
+# requires-python = ">=3.11"
+# dependencies = [
+# "parse",
+# "pydantic",
+# "pydantic-settings",
+# ]
+# ///
# pylint: disable=unused-argument
# pylint: disable=no-self-use
# pylint: disable=no-name-in-module
+import importlib.util
+import inspect
import json
from datetime import timedelta
from pathlib import Path
+from types import ModuleType
from typing import Final
from parse import Result, parse
@@ -26,6 +37,37 @@
assert _LOCUST_FILES_DIR.is_dir()
+def _check_load_and_instantiate_settings_classes(file_path: str):
+ module_name = Path(file_path).stem
+ spec = importlib.util.spec_from_file_location(module_name, file_path)
+ if spec is None or spec.loader is None:
+ msg = f"Invalid {file_path=}"
+ raise ValueError(msg)
+
+ module: ModuleType = importlib.util.module_from_spec(spec)
+
+ # Execute the module in its own namespace
+ try:
+ spec.loader.exec_module(module)
+ except Exception as e:
+ msg = f"Failed to load module {module_name} from {file_path}"
+ raise ValueError(msg) from e
+
+ # Filter subclasses of BaseSettings
+ settings_classes = [
+ obj
+ for _, obj in inspect.getmembers(module, inspect.isclass)
+ if issubclass(obj, BaseSettings) and obj is not BaseSettings
+ ]
+
+ for settings_class in settings_classes:
+ try:
+ settings_class()
+ except Exception as e:
+ msg = f"Missing env vars for {settings_class.__name__} in {file_path=}: {e}"
+ raise ValueError(msg) from e
+
+
class LocustSettings(BaseSettings):
model_config = SettingsConfigDict(cli_parse_args=True)
@@ -44,8 +86,8 @@ class LocustSettings(BaseSettings):
LOCUST_RUN_TIME: timedelta
LOCUST_SPAWN_RATE: PositiveInt = Field(default=20)
- # Options for Timescale + Grafana Dashboards
- # SEE https://github.com/SvenskaSpel/locust-plugins/blob/master/locust_plugins/timescale/
+ # Timescale: Log and graph results using TimescaleDB and Grafana dashboards
+ # SEE https://github.com/SvenskaSpel/locust-plugins/tree/master/locust_plugins/dashboards
#
LOCUST_TIMESCALE: NonNegativeInt = Field(
default=1,
@@ -87,6 +129,10 @@ def _validate_locust_file(cls, v: Path) -> Path:
if not v.is_relative_to(_LOCUST_FILES_DIR):
msg = f"{v} must be a test file relative to {_LOCUST_FILES_DIR}"
raise ValueError(msg)
+
+ # NOTE: CHECK that all the env-vars are defined for this test
+ # _check_load_and_instantiate_settings_classes(f"{v}")
+
return v.relative_to(_TEST_DIR)
@field_serializer("LOCUST_RUN_TIME")
diff --git a/tests/swarm-deploy/requirements/_test.txt b/tests/swarm-deploy/requirements/_test.txt
index 5aeffad83c4..c84c4e35f0d 100644
--- a/tests/swarm-deploy/requirements/_test.txt
+++ b/tests/swarm-deploy/requirements/_test.txt
@@ -235,6 +235,10 @@ opentelemetry-api==1.27.0
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
# opentelemetry-instrumentation
+ # opentelemetry-instrumentation-aiopg
+ # opentelemetry-instrumentation-asyncpg
+ # opentelemetry-instrumentation-dbapi
+ # opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
# opentelemetry-semantic-conventions
@@ -251,7 +255,22 @@ opentelemetry-exporter-otlp-proto-grpc==1.27.0
opentelemetry-exporter-otlp-proto-http==1.27.0
# via opentelemetry-exporter-otlp
opentelemetry-instrumentation==0.48b0
- # via opentelemetry-instrumentation-requests
+ # via
+ # opentelemetry-instrumentation-aiopg
+ # opentelemetry-instrumentation-asyncpg
+ # opentelemetry-instrumentation-dbapi
+ # opentelemetry-instrumentation-redis
+ # opentelemetry-instrumentation-requests
+opentelemetry-instrumentation-aiopg==0.48b0
+ # via -r requirements/../../../packages/simcore-sdk/requirements/_base.in
+opentelemetry-instrumentation-asyncpg==0.48b0
+ # via -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in
+opentelemetry-instrumentation-dbapi==0.48b0
+ # via opentelemetry-instrumentation-aiopg
+opentelemetry-instrumentation-redis==0.48b0
+ # via
+ # -r requirements/../../../packages/service-library/requirements/_base.in
+ # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in
opentelemetry-instrumentation-requests==0.48b0
# via
# -r requirements/../../../packages/service-library/requirements/_base.in
@@ -269,6 +288,9 @@ opentelemetry-sdk==1.27.0
# opentelemetry-exporter-otlp-proto-http
opentelemetry-semantic-conventions==0.48b0
# via
+ # opentelemetry-instrumentation-asyncpg
+ # opentelemetry-instrumentation-dbapi
+ # opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
opentelemetry-util-http==0.48b0
@@ -675,6 +697,9 @@ wrapt==1.16.0
# via
# deprecated
# opentelemetry-instrumentation
+ # opentelemetry-instrumentation-aiopg
+ # opentelemetry-instrumentation-dbapi
+ # opentelemetry-instrumentation-redis
yarl==1.12.1
# via
# -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in