diff --git a/.env-devel b/.env-devel index 8ae1e5856d5..17aba60a7dd 100644 --- a/.env-devel +++ b/.env-devel @@ -309,6 +309,7 @@ LOGIN_ACCOUNT_DELETION_RETENTION_DAYS=31 LOGIN_REGISTRATION_CONFIRMATION_REQUIRED=0 LOGIN_REGISTRATION_INVITATION_REQUIRED=0 PROJECTS_INACTIVITY_INTERVAL=20 +PROJECTS_TRASH_RETENTION_DAYS=7 PROJECTS_MAX_COPY_SIZE_BYTES=30Gib PROJECTS_MAX_NUM_RUNNING_DYNAMIC_NODES=5 REST_SWAGGER_API_DOC_ENABLED=1 diff --git a/api/specs/web-server/_projects_crud.py b/api/specs/web-server/_projects_crud.py index 640abe4f2b6..aad8fa82760 100644 --- a/api/specs/web-server/_projects_crud.py +++ b/api/specs/web-server/_projects_crud.py @@ -83,6 +83,7 @@ async def list_projects( example='{"field": "last_change_date", "direction": "desc"}', ), ] = '{"field": "last_change_date", "direction": "desc"}', + filters: Annotated[Json | None, Query()] = None, ): ... diff --git a/api/specs/web-server/_trash.py b/api/specs/web-server/_trash.py new file mode 100644 index 00000000000..cdde2b8c32f --- /dev/null +++ b/api/specs/web-server/_trash.py @@ -0,0 +1,61 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + + +from enum import Enum +from typing import Annotated + +from fastapi import APIRouter, Depends, status +from simcore_service_webserver._meta import API_VTAG +from simcore_service_webserver.projects._trash_handlers import ( + ProjectPathParams, + RemoveQueryParams, +) + +router = APIRouter( + prefix=f"/{API_VTAG}", + tags=["trash"], +) + + +@router.delete( + "/trash", + status_code=status.HTTP_204_NO_CONTENT, +) +def empty_trash(): + ... + + +_extra_tags: list[str | Enum] = ["projects"] + + +@router.post( + "/projects/{project_id}:trash", + tags=_extra_tags, + status_code=status.HTTP_204_NO_CONTENT, + responses={ + status.HTTP_404_NOT_FOUND: {"description": "Not such a project"}, + status.HTTP_409_CONFLICT: { + "description": "Project is in use and cannot be trashed" + }, + status.HTTP_503_SERVICE_UNAVAILABLE: {"description": "Trash service error"}, + }, +) +def trash_project( + _p: Annotated[ProjectPathParams, Depends()], + _q: Annotated[RemoveQueryParams, Depends()], +): + ... + + +@router.post( + "/projects/{project_id}:untrash", + tags=_extra_tags, + status_code=status.HTTP_204_NO_CONTENT, +) +def untrash_project( + _p: Annotated[ProjectPathParams, Depends()], +): + ... diff --git a/api/specs/web-server/openapi.py b/api/specs/web-server/openapi.py index da372d3c7c9..29a7bbedbab 100644 --- a/api/specs/web-server/openapi.py +++ b/api/specs/web-server/openapi.py @@ -55,6 +55,7 @@ "_resource_usage", "_statics", "_storage", + "_trash", "_version_control", "_workspaces", # maintenance ---- diff --git a/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py b/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py index e2d4918c435..4398f1377f7 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py @@ -1,14 +1,14 @@ from datetime import datetime from typing import NamedTuple -from models_library.access_rights import AccessRights -from models_library.basic_types import IDStr -from models_library.folders import FolderID -from models_library.users import GroupID -from models_library.utils.common_validators import null_or_none_str_to_none_validator -from models_library.workspaces import WorkspaceID from pydantic import Extra, PositiveInt, validator +from ..access_rights import AccessRights +from ..basic_types import IDStr +from ..folders import FolderID +from ..users import GroupID +from ..utils.common_validators import null_or_none_str_to_none_validator +from ..workspaces import WorkspaceID from ._base import InputSchema, OutputSchema diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects.py b/packages/models-library/src/models_library/api_schemas_webserver/projects.py index 601ac1e6d15..6d655ca2168 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/projects.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/projects.py @@ -5,6 +5,7 @@ """ +from datetime import datetime from typing import Any, Literal, TypeAlias from models_library.folders import FolderID @@ -85,6 +86,7 @@ class ProjectGet(OutputSchema): permalink: ProjectPermalink = FieldNotRequired() workspace_id: WorkspaceID | None folder_id: FolderID | None + trashed_at: datetime | None _empty_description = validator("description", allow_reuse=True, pre=True)( none_to_empty_str_pre_validator diff --git a/packages/models-library/src/models_library/projects.py b/packages/models-library/src/models_library/projects.py index 6f62457272c..af2d99dc003 100644 --- a/packages/models-library/src/models_library/projects.py +++ b/packages/models-library/src/models_library/projects.py @@ -186,6 +186,11 @@ class Project(BaseProjectModel): alias="folderId", ) + trashed_at: datetime | None = Field( + default=None, + alias="trashedAt", + ) + class Config: description = "Document that stores metadata, pipeline and UI setup of a study" title = "osparc-simcore project" diff --git a/packages/models-library/src/models_library/rest_filters.py b/packages/models-library/src/models_library/rest_filters.py index b675968e559..70a1aeb777d 100644 --- a/packages/models-library/src/models_library/rest_filters.py +++ b/packages/models-library/src/models_library/rest_filters.py @@ -1,7 +1,22 @@ -from pydantic import BaseModel +from typing import Generic, TypeVar + +from pydantic import BaseModel, Field, Json +from pydantic.generics import GenericModel class Filters(BaseModel): - """inspired by Docker API https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerList. + """ Encoded as JSON. Each available filter can have its own logic (should be well documented) + Inspired by Docker API https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerList. """ + + +# Custom filter +FilterT = TypeVar("FilterT", bound=Filters) + + +class FiltersQueryParameters(GenericModel, Generic[FilterT]): + filters: Json[FilterT] | None = Field( # pylint: disable=unsubscriptable-object + default=None, + description="Custom filter query parameter encoded as JSON", + ) diff --git a/packages/models-library/src/models_library/users.py b/packages/models-library/src/models_library/users.py index a28add967a6..7036b1e28dc 100644 --- a/packages/models-library/src/models_library/users.py +++ b/packages/models-library/src/models_library/users.py @@ -1,8 +1,10 @@ from typing import TypeAlias +from models_library.basic_types import IDStr from pydantic import BaseModel, ConstrainedStr, Field, PositiveInt UserID: TypeAlias = PositiveInt +UserNameID: TypeAlias = IDStr GroupID: TypeAlias = PositiveInt diff --git a/packages/models-library/tests/test_rest_filters.py b/packages/models-library/tests/test_rest_filters.py new file mode 100644 index 00000000000..0a46bd3a25b --- /dev/null +++ b/packages/models-library/tests/test_rest_filters.py @@ -0,0 +1,65 @@ +import logging + +import pytest +from models_library.rest_filters import Filters, FiltersQueryParameters +from pydantic import Extra, ValidationError + + +# 1. create filter model +class CustomFilter(Filters): + is_trashed: bool | None = None + is_hidden: bool | None = None + + +class CustomFilterStrict(CustomFilter): + class Config(CustomFilter.Config): + extra = Extra.forbid + + +def test_custom_filter_query_parameters(): + + # 2. use generic as query parameters + logging.info( + "json schema is for the query \n %s", + FiltersQueryParameters[CustomFilter].schema_json(indent=1), + ) + + # lets filter only is_trashed and unset is_hidden + custom_filter = CustomFilter(is_trashed=True) + assert custom_filter.json() == '{"is_trashed": true, "is_hidden": null}' + + # default to None (optional) + query_param = FiltersQueryParameters[CustomFilter]() + assert query_param.filters is None + + +@pytest.mark.parametrize( + "url_query_value,expects", + [ + ('{"is_trashed": true, "is_hidden": null}', CustomFilter(is_trashed=True)), + ('{"is_trashed": true}', CustomFilter(is_trashed=True)), + (None, None), + ], +) +def test_valid_filter_queries( + url_query_value: str | None, expects: CustomFilter | None +): + query_param = FiltersQueryParameters[CustomFilter](filters=url_query_value) + assert query_param.filters == expects + + +def test_invalid_filter_query_is_ignored(): + # NOTE: invalid filter get ignored! + url_query_value = '{"undefined_filter": true, "is_hidden": true}' + + query_param = FiltersQueryParameters[CustomFilter](filters=url_query_value) + assert query_param.filters == CustomFilter(is_hidden=True) + + +@pytest.mark.xfail +def test_invalid_filter_query_fails(): + # NOTE: this should fail according to pydantic manual but it does not + url_query_value = '{"undefined_filter": true, "is_hidden": true}' + + with pytest.raises(ValidationError): + FiltersQueryParameters[CustomFilterStrict](filters=url_query_value) diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/fce5d231e16d_new_projects_trashed_at.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/fce5d231e16d_new_projects_trashed_at.py new file mode 100644 index 00000000000..200013e92ec --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/fce5d231e16d_new_projects_trashed_at.py @@ -0,0 +1,29 @@ +"""new projects trashed_at + +Revision ID: fce5d231e16d +Revises: ea3952fe5a0e +Create Date: 2024-10-23 14:32:32.350937+00:00 + +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "fce5d231e16d" +down_revision = "ea3952fe5a0e" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "projects", sa.Column("trashed_at", sa.DateTime(timezone=True), nullable=True) + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("projects", "trashed_at") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/models/projects.py b/packages/postgres-database/src/simcore_postgres_database/models/projects.py index ae77ea5c5d0..629113f06dc 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/projects.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/projects.py @@ -141,6 +141,12 @@ class ProjectType(enum.Enum): default=False, doc="If true, the project is by default not listed in the API", ), + sa.Column( + "trashed_at", + sa.DateTime(timezone=True), + nullable=True, + doc="Timestamp indicating when the project was marked as trashed, or null otherwise.", + ), sa.Column( "workspace_id", sa.BigInteger, diff --git a/packages/postgres-database/tests/test_utils_projects.py b/packages/postgres-database/tests/test_utils_projects.py index 9990a8a6dda..9af318fca38 100644 --- a/packages/postgres-database/tests/test_utils_projects.py +++ b/packages/postgres-database/tests/test_utils_projects.py @@ -4,25 +4,27 @@ # pylint: disable=too-many-arguments import uuid from collections.abc import Awaitable, Callable -from datetime import datetime +from datetime import datetime, timezone from typing import Any, AsyncIterator import pytest -import sqlalchemy +import sqlalchemy as sa from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy from faker import Faker +from pydantic import parse_obj_as from simcore_postgres_database.models.projects import projects from simcore_postgres_database.utils_projects import ( DBProjectNotFoundError, ProjectsRepo, ) +from simcore_postgres_database.utils_repos import transaction_context from sqlalchemy.ext.asyncio import AsyncEngine async def _delete_project(connection: SAConnection, project_uuid: uuid.UUID) -> None: result = await connection.execute( - sqlalchemy.delete(projects).where(projects.c.uuid == f"{project_uuid}") + sa.delete(projects).where(projects.c.uuid == f"{project_uuid}") ) assert result.rowcount == 1 @@ -51,6 +53,26 @@ async def registered_project( await _delete_project(connection, project["uuid"]) +@pytest.mark.parametrize("expected", (datetime.now(tz=timezone.utc), None)) +async def test_get_project_trashed_at_column_can_be_converted_to_datetime( + asyncpg_engine: AsyncEngine, registered_project: dict, expected: datetime | None +): + project_id = registered_project["uuid"] + + async with transaction_context(asyncpg_engine) as conn: + result = await conn.execute( + projects.update() + .values(trashed_at=expected) + .where(projects.c.uuid == project_id) + .returning(sa.literal_column("*")) + ) + + row = result.fetchone() + + trashed_at = parse_obj_as(datetime | None, row.trashed_at) + assert trashed_at == expected + + async def test_get_project_last_change_date( asyncpg_engine: AsyncEngine, registered_project: dict, faker: Faker ): diff --git a/packages/service-library/src/servicelib/project_lock.py b/packages/service-library/src/servicelib/project_lock.py index 072282dc955..e1a275dcfc7 100644 --- a/packages/service-library/src/servicelib/project_lock.py +++ b/packages/service-library/src/servicelib/project_lock.py @@ -3,7 +3,7 @@ from asyncio.log import logger from collections.abc import AsyncIterator from contextlib import asynccontextmanager -from typing import Final +from typing import Final, TypeAlias import redis import redis.exceptions @@ -21,7 +21,7 @@ PROJECT_LOCK_TIMEOUT: Final[datetime.timedelta] = datetime.timedelta(seconds=10) ProjectLock = Lock -ProjectLockError = redis.exceptions.LockError +ProjectLockError: TypeAlias = redis.exceptions.LockError async def _auto_extend_project_lock(project_lock: Lock) -> None: diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 8c8fe03e217..0b1b7f460cd 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -755,9 +755,10 @@ services: # WEBSERVER_PROJECTS WEBSERVER_PROJECTS: ${WEBSERVER_PROJECTS} + PROJECTS_INACTIVITY_INTERVAL: ${PROJECTS_INACTIVITY_INTERVAL} PROJECTS_MAX_COPY_SIZE_BYTES: ${PROJECTS_MAX_COPY_SIZE_BYTES} PROJECTS_MAX_NUM_RUNNING_DYNAMIC_NODES: ${PROJECTS_MAX_NUM_RUNNING_DYNAMIC_NODES} - PROJECTS_INACTIVITY_INTERVAL: ${PROJECTS_INACTIVITY_INTERVAL} + PROJECTS_TRASH_RETENTION_DAYS: ${PROJECTS_TRASH_RETENTION_DAYS} # WEBSERVER_RABBITMQ RABBIT_HOST: ${RABBIT_HOST} diff --git a/services/web/server/VERSION b/services/web/server/VERSION index f8287cf9564..a8ab6c9666a 100644 --- a/services/web/server/VERSION +++ b/services/web/server/VERSION @@ -1 +1 @@ -0.43.1 +0.44.0 diff --git a/services/web/server/requirements/_base.txt b/services/web/server/requirements/_base.txt index a98c8f71307..df540ac4b88 100644 --- a/services/web/server/requirements/_base.txt +++ b/services/web/server/requirements/_base.txt @@ -120,7 +120,7 @@ certifi==2023.7.22 # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../../requirements/constraints.txt # requests -cffi==1.15.0 +cffi==1.17.1 # via cryptography charset-normalizer==2.0.12 # via @@ -257,7 +257,7 @@ mdurl==0.1.2 # via markdown-it-py msgpack==1.0.7 # via -r requirements/_base.in -multidict==6.0.2 +multidict==6.1.0 # via # aiohttp # yarl diff --git a/services/web/server/requirements/_test.txt b/services/web/server/requirements/_test.txt index 8b547336038..3aab7cde47d 100644 --- a/services/web/server/requirements/_test.txt +++ b/services/web/server/requirements/_test.txt @@ -97,7 +97,7 @@ markupsafe==2.1.1 # via # -c requirements/_base.txt # mako -multidict==6.0.2 +multidict==6.1.0 # via # -c requirements/_base.txt # aiohttp diff --git a/services/web/server/setup.cfg b/services/web/server/setup.cfg index 4e3589a674b..ab412830c97 100644 --- a/services/web/server/setup.cfg +++ b/services/web/server/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.43.1 +current_version = 0.44.0 commit = True message = services/webserver api version: {current_version} → {new_version} tag = False diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index fe4508f89f2..0df5e076a76 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -2,7 +2,7 @@ openapi: 3.0.2 info: title: simcore-service-webserver description: Main service with an interface (http-API & websockets) to the web front-end - version: 0.43.1 + version: 0.44.0 servers: - url: '' description: webserver @@ -3039,6 +3039,13 @@ paths: example: '{"field": "last_change_date", "direction": "desc"}' name: order_by in: query + - required: false + schema: + title: Filters + type: string + format: json-string + name: filters + in: query - required: false schema: title: Limit @@ -5328,6 +5335,64 @@ paths: application/json: schema: $ref: '#/components/schemas/Envelope_FileUploadCompleteFutureResponse_' + /v0/trash: + delete: + tags: + - trash + summary: Empty Trash + operationId: empty_trash + responses: + '204': + description: Successful Response + /v0/projects/{project_id}:trash: + post: + tags: + - trash + - projects + summary: Trash Project + operationId: trash_project + parameters: + - required: true + schema: + title: Project Id + type: string + format: uuid + name: project_id + in: path + - required: false + schema: + title: Force + type: boolean + default: false + name: force + in: query + responses: + '204': + description: Successful Response + '404': + description: Not such a project + '409': + description: Project is in use and cannot be trashed + '503': + description: Trash service error + /v0/projects/{project_id}:untrash: + post: + tags: + - trash + - projects + summary: Untrash Project + operationId: untrash_project + parameters: + - required: true + schema: + title: Project Id + type: string + format: uuid + name: project_id + in: path + responses: + '204': + description: Successful Response /v0/repos/projects: get: tags: @@ -10294,6 +10359,10 @@ components: exclusiveMinimum: true type: integer minimum: 0 + trashedAt: + title: Trashedat + type: string + format: date-time ProjectGroupGet: title: ProjectGroupGet required: @@ -10565,6 +10634,10 @@ components: exclusiveMinimum: true type: integer minimum: 0 + trashedAt: + title: Trashedat + type: string + format: date-time ProjectLocked: title: ProjectLocked required: diff --git a/services/web/server/src/simcore_service_webserver/application_settings.py b/services/web/server/src/simcore_service_webserver/application_settings.py index eefcc4869fc..617447c134a 100644 --- a/services/web/server/src/simcore_service_webserver/application_settings.py +++ b/services/web/server/src/simcore_service_webserver/application_settings.py @@ -387,7 +387,10 @@ def to_client_statics(self) -> dict[str, Any]: "SIMCORE_VCS_RELEASE_TAG": True, "SIMCORE_VCS_RELEASE_URL": True, "SWARM_STACK_NAME": True, - "WEBSERVER_PROJECTS": {"PROJECTS_MAX_NUM_RUNNING_DYNAMIC_NODES"}, + "WEBSERVER_PROJECTS": { + "PROJECTS_MAX_NUM_RUNNING_DYNAMIC_NODES", + "PROJECTS_TRASH_RETENTION_DAYS", + }, "WEBSERVER_LOGIN": { "LOGIN_ACCOUNT_DELETION_RETENTION_DAYS", "LOGIN_2FA_REQUIRED", diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_core_computations.py b/services/web/server/src/simcore_service_webserver/director_v2/_core_computations.py index 950d92fa2eb..5b8a69e33c3 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_core_computations.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_core_computations.py @@ -196,17 +196,37 @@ async def get_computation_task( @log_decorator(logger=_logger) -async def delete_pipeline( - app: web.Application, user_id: PositiveInt, project_id: UUID -) -> None: +async def stop_pipeline( + app: web.Application, *, user_id: PositiveInt, project_id: ProjectID +): settings: DirectorV2Settings = get_plugin_settings(app) + await request_director_v2( + app, + "POST", + url=settings.base_url / f"computations/{project_id}:stop", + expected_status=web.HTTPAccepted, + data={"user_id": user_id}, + ) - backend_url = settings.base_url / f"computations/{project_id}" - body = {"user_id": user_id, "force": True} - # request to director-v2 +@log_decorator(logger=_logger) +async def delete_pipeline( + app: web.Application, + user_id: PositiveInt, + project_id: ProjectID, + *, + force: bool = True, +) -> None: + settings: DirectorV2Settings = get_plugin_settings(app) await request_director_v2( - app, "DELETE", backend_url, expected_status=web.HTTPNoContent, data=body + app, + "DELETE", + url=settings.base_url / f"computations/{project_id}", + expected_status=web.HTTPNoContent, + data={ + "user_id": user_id, + "force": force, + }, ) diff --git a/services/web/server/src/simcore_service_webserver/director_v2/api.py b/services/web/server/src/simcore_service_webserver/director_v2/api.py index a0b40e01b84..4d1efd822f6 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/api.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/api.py @@ -22,6 +22,7 @@ list_clusters, ping_cluster, ping_specific_cluster, + stop_pipeline, update_cluster, ) from ._core_dynamic_services import ( @@ -65,6 +66,7 @@ "restart_dynamic_service", "retrieve", "set_project_run_policy", + "stop_pipeline", "update_cluster", "update_dynamic_service_networks_in_project", ) diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_trash.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_trash.py new file mode 100644 index 00000000000..a86b71fd3f7 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_trash.py @@ -0,0 +1,66 @@ +""" + Scheduled tasks addressing users + +""" + +import asyncio +import logging +from collections.abc import AsyncIterator, Callable + +from aiohttp import web +from tenacity import retry +from tenacity.before_sleep import before_sleep_log +from tenacity.wait import wait_exponential + +from ..projects._trash_api import prune_all_trashes + +_logger = logging.getLogger(__name__) + +CleanupContextFunc = Callable[[web.Application], AsyncIterator[None]] + + +_PERIODIC_TASK_NAME = f"{__name__}" +_APP_TASK_KEY = f"{_PERIODIC_TASK_NAME}.task" + + +@retry( + wait=wait_exponential(min=5, max=20), + before_sleep=before_sleep_log(_logger, logging.WARNING), +) +async def _run_task(app: web.Application): + if deleted := await prune_all_trashes(app): + for name in deleted: + _logger.info("Trash item %s expired and was deleted", f"{name}") + else: + _logger.info("No trash items expired") + + +async def _run_periodically(app: web.Application, wait_interval_s: float): + while True: + await _run_task(app) + await asyncio.sleep(wait_interval_s) + + +def create_background_task_to_prune_trash( + wait_s: float, task_name: str = _PERIODIC_TASK_NAME +) -> CleanupContextFunc: + async def _cleanup_ctx_fun( + app: web.Application, + ) -> AsyncIterator[None]: + # setup + task = asyncio.create_task( + _run_periodically(app, wait_s), + name=task_name, + ) + app[_APP_TASK_KEY] = task + + yield + + # tear-down + task.cancel() + try: + await task + except asyncio.CancelledError: + assert task.cancelled() # nosec + + return _cleanup_ctx_fun diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py b/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py index c4b62d7424d..3e76c6c947c 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py @@ -3,6 +3,9 @@ from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup from servicelib.logging_utils import set_parent_module_log_level +from simcore_service_webserver.garbage_collector._tasks_trash import ( + create_background_task_to_prune_trash, +) from ..application_settings import get_application_settings from ..login.plugin import setup_login_storage @@ -38,6 +41,8 @@ def setup_garbage_collector(app: web.Application) -> None: _logger.name, min(logging.INFO, get_application_settings(app).log_level) ) + # SEE https://github.com/ITISFoundation/osparc-simcore/issues/6592 + # NOTE: scaling web-servers will lead to having multiple tasks upgrading the db # not a huge deal. Instead this task runs in the GC. # If more tasks of this nature are needed, we should setup some sort of registration mechanism @@ -48,3 +53,5 @@ def setup_garbage_collector(app: web.Application) -> None: # SEE https://github.com/ITISFoundation/osparc-issues/issues/705 wait_period_s = settings.GARBAGE_COLLECTOR_PRUNE_APIKEYS_INTERVAL_S app.cleanup_ctx.append(create_background_task_to_prune_api_keys(wait_period_s)) + + app.cleanup_ctx.append(create_background_task_to_prune_trash(wait_period_s)) diff --git a/services/web/server/src/simcore_service_webserver/login/decorators.py b/services/web/server/src/simcore_service_webserver/login/decorators.py index 7e9e681710d..1fd2bc90871 100644 --- a/services/web/server/src/simcore_service_webserver/login/decorators.py +++ b/services/web/server/src/simcore_service_webserver/login/decorators.py @@ -1,7 +1,9 @@ import functools import inspect +from typing import cast from aiohttp import web +from models_library.users import UserID from servicelib.aiohttp.typing_extension import HandlerAnyReturn from servicelib.request_keys import RQT_USERID_KEY @@ -69,3 +71,7 @@ async def _wrapper(request: web.Request): return await handler(request) return _wrapper + + +def get_user_id(request: web.Request) -> UserID: + return cast(UserID, request[RQT_USERID_KEY]) diff --git a/services/web/server/src/simcore_service_webserver/projects/_common_models.py b/services/web/server/src/simcore_service_webserver/projects/_common_models.py index d25a0f6c24b..073c012a8ac 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_common_models.py +++ b/services/web/server/src/simcore_service_webserver/projects/_common_models.py @@ -23,3 +23,9 @@ class ProjectPathParams(BaseModel): class Config: allow_population_by_field_name = True extra = Extra.forbid + + +class RemoveQueryParams(BaseModel): + force: bool = Field( + default=False, description="Force removal (even if resource is active)" + ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py index 21802e9841d..f6c98c6e08e 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py @@ -64,14 +64,20 @@ async def list_projects( # pylint: disable=too-many-arguments request: web.Request, user_id: UserID, product_name: str, + *, + # hierachy filter + workspace_id: WorkspaceID | None, + folder_id: FolderID | None, + # attrs filter project_type: ProjectTypeAPI, show_hidden: bool, + trashed: bool | None, + # pagination offset: NonNegativeInt, limit: int, - search: str | None, order_by: OrderBy, - folder_id: FolderID | None, - workspace_id: WorkspaceID | None, + # search + search: str | None, ) -> tuple[list[ProjectDict], int]: app = request.app db = ProjectDBAPI.get_from_app_context(app) @@ -104,15 +110,20 @@ async def list_projects( # pylint: disable=too-many-arguments db_projects, db_project_types, total_number_projects = await db.list_projects( product_name=product_name, user_id=user_id, + workspace_id=workspace_id, + folder_id=folder_id, + # attrs filter_by_project_type=ProjectTypeAPI.to_project_type_db(project_type), filter_by_services=user_available_services, + trashed=trashed, + hidden=show_hidden, + # composed attrs + search=search, + # pagination offset=offset, limit=limit, - include_hidden=show_hidden, - search=search, + # order order_by=order_by, - folder_id=folder_id, - workspace_id=workspace_id, ) # If workspace, override project access rights diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py index 9e5a7667b7d..7500a6a4d26 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py @@ -54,6 +54,7 @@ ProjectActiveParams, ProjectCreateHeaders, ProjectCreateParams, + ProjectFilters, ProjectListFullSearchWithJsonStrParams, ProjectListWithJsonStrParams, ) @@ -191,12 +192,18 @@ async def list_projects(request: web.Request): ProjectListWithJsonStrParams, request ) + if not query_params.filters: + query_params.filters = ProjectFilters() + + assert query_params.filters # nosec + projects, total_number_of_projects = await _crud_api_read.list_projects( request, user_id=req_ctx.user_id, product_name=req_ctx.product_name, project_type=query_params.project_type, show_hidden=query_params.show_hidden, + trashed=query_params.filters.trashed, limit=query_params.limit, offset=query_params.offset, search=query_params.search, diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers_models.py b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers_models.py index 2fdef2fb3e2..b1c499fd3a9 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers_models.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers_models.py @@ -10,6 +10,7 @@ from models_library.folders import FolderID from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID +from models_library.rest_filters import Filters, FiltersQueryParameters from models_library.rest_ordering import OrderBy, OrderDirection from models_library.rest_pagination import PageQueryParameters from models_library.utils.common_validators import ( @@ -96,6 +97,13 @@ class Config: extra = Extra.forbid +class ProjectFilters(Filters): + trashed: bool | None = Field( + default=False, + description="Set to true to list trashed, false to list non-trashed (default), None to list all", + ) + + class ProjectListParams(PageQueryParameters): project_type: ProjectTypeAPI = Field(default=ProjectTypeAPI.all, alias="type") show_hidden: bool = Field( @@ -132,7 +140,7 @@ def search_check_empty_string(cls, v): )(null_or_none_str_to_none_validator) -class ProjectListWithOrderByParams(BaseModel): +class ProjectListSortParams(BaseModel): order_by: Json[OrderBy] = Field( # pylint: disable=unsubscriptable-object default=OrderBy(field=IDStr("last_change_date"), direction=OrderDirection.DESC), description="Order by field (type|uuid|name|description|prj_owner|creation_date|last_change_date) and direction (asc|desc). The default sorting order is ascending.", @@ -160,7 +168,9 @@ class Config: extra = Extra.forbid -class ProjectListWithJsonStrParams(ProjectListParams, ProjectListWithOrderByParams): +class ProjectListWithJsonStrParams( + ProjectListParams, ProjectListSortParams, FiltersQueryParameters[ProjectFilters] +): ... @@ -187,7 +197,7 @@ class ProjectListFullSearchParams(PageQueryParameters): class ProjectListFullSearchWithJsonStrParams( - ProjectListFullSearchParams, ProjectListWithOrderByParams + ProjectListFullSearchParams, ProjectListSortParams ): def tag_ids_list(self) -> list[int]: try: diff --git a/services/web/server/src/simcore_service_webserver/projects/_db_utils.py b/services/web/server/src/simcore_service_webserver/projects/_db_utils.py index 8bda162ab6f..35be5c4056c 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_db_utils.py +++ b/services/web/server/src/simcore_service_webserver/projects/_db_utils.py @@ -86,7 +86,7 @@ def convert_to_schema_names( if key in DB_EXCLUSIVE_COLUMNS: continue converted_value = value - if isinstance(value, datetime): + if isinstance(value, datetime) and key not in {"trashed_at"}: converted_value = format_datetime(value) elif key == "prj_owner": # this entry has to be converted to the owner e-mail address diff --git a/services/web/server/src/simcore_service_webserver/projects/_trash_api.py b/services/web/server/src/simcore_service_webserver/projects/_trash_api.py new file mode 100644 index 00000000000..6469375c853 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_trash_api.py @@ -0,0 +1,140 @@ +import asyncio +import logging +from datetime import timedelta + +import arrow +from aiohttp import web +from models_library.products import ProductName +from models_library.projects import ProjectID +from models_library.users import UserID +from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY +from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE +from servicelib.utils import fire_and_forget_task + +from ..director_v2 import api as director_v2_api +from . import projects_api +from ._access_rights_api import check_user_project_permission +from .exceptions import ProjectRunningConflictError +from .models import ProjectPatchExtended +from .settings import get_plugin_settings + +_logger = logging.getLogger(__name__) + + +async def empty_trash(app: web.Application, product_name: ProductName, user_id: UserID): + assert app # nosec + # filter trashed=True and set them to False + _logger.debug( + "CODE PLACEHOLDER: all projects marked as trashed of %s in %s are deleted", + f"{user_id=}", + f"{product_name=}", + ) + raise NotImplementedError + + +async def prune_all_trashes(app: web.Application) -> list[str]: + settings = get_plugin_settings(app) + retention = timedelta(days=settings.PROJECTS_TRASH_RETENTION_DAYS) + + _logger.debug( + "CODE PLACEHOLDER: **ALL** projects marked as trashed during %s days are deleted", + retention, + ) + await asyncio.sleep(5) + + return [] + + +async def _is_project_running( + app: web.Application, + *, + user_id: UserID, + project_id: ProjectID, +) -> bool: + return bool( + await director_v2_api.is_pipeline_running( + app, user_id=user_id, project_id=project_id + ) + ) or bool( + await director_v2_api.list_dynamic_services( + app, user_id=user_id, project_id=f"{project_id}" + ) + ) + + +async def trash_project( + app: web.Application, + *, + product_name: ProductName, + user_id: UserID, + project_id: ProjectID, + force_stop_first: bool, +): + """ + + Raises: + ProjectStopError: + ProjectRunningConflictError: + """ + await check_user_project_permission( + app, + project_id=project_id, + user_id=user_id, + product_name=product_name, + permission="write", + ) + + if force_stop_first: + + async def _schedule(): + await asyncio.gather( + director_v2_api.stop_pipeline( + app, user_id=user_id, project_id=project_id + ), + projects_api.remove_project_dynamic_services( + user_id=user_id, + project_uuid=f"{project_id}", + app=app, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + notify_users=False, + ), + ) + + fire_and_forget_task( + _schedule(), + task_suffix_name=f"trash_project_force_stop_first_{user_id=}_{project_id=}", + fire_and_forget_tasks_collection=app[APP_FIRE_AND_FORGET_TASKS_KEY], + ) + + elif await _is_project_running(app, user_id=user_id, project_id=project_id): + raise ProjectRunningConflictError( + project_uuid=project_id, + user_id=user_id, + product_name=product_name, + ) + + # mark as trash + await projects_api.patch_project( + app, + user_id=user_id, + product_name=product_name, + project_uuid=project_id, + project_patch=ProjectPatchExtended(trashed_at=arrow.utcnow().datetime), + ) + + +async def untrash_project( + app: web.Application, + *, + product_name: ProductName, + user_id: UserID, + project_id: ProjectID, +): + # NOTE: check_user_project_permission is inside projects_api.patch_project + await projects_api.patch_project( + app, + user_id=user_id, + product_name=product_name, + project_uuid=project_id, + project_patch=ProjectPatchExtended(trashed_at=None), + ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_trash_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_trash_handlers.py new file mode 100644 index 00000000000..5defa97c927 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_trash_handlers.py @@ -0,0 +1,158 @@ +import functools +import logging +from typing import NamedTuple + +from aiohttp import web +from servicelib.aiohttp import status +from servicelib.aiohttp.requests_validation import ( + parse_request_path_parameters_as, + parse_request_query_parameters_as, +) +from servicelib.aiohttp.typing_extension import Handler +from servicelib.aiohttp.web_exceptions_extension import get_http_error_class_or_none +from servicelib.logging_errors import create_troubleshotting_log_kwargs +from servicelib.status_codes_utils import is_5xx_server_error + +from .._meta import API_VTAG as VTAG +from ..login.decorators import get_user_id, login_required +from ..products.api import get_product_name +from ..projects._common_models import ProjectPathParams +from ..security.decorators import permission_required +from . import _trash_api +from ._common_models import RemoveQueryParams +from .exceptions import ( + ProjectRunningConflictError, + ProjectStoppingError, + ProjectTrashError, +) + +_logger = logging.getLogger(__name__) + +# +# EXCEPTIONS HANDLING +# + + +class HttpErrorInfo(NamedTuple): + status_code: int + msg_template: str + + +_TO_HTTP_ERROR_MAP: dict[type[Exception], HttpErrorInfo] = { + ProjectRunningConflictError: HttpErrorInfo( + status.HTTP_409_CONFLICT, + "Current study is in use and cannot be trashed [project_id={project_uuid}]. Please stop all services first and try again", + ), + ProjectStoppingError: HttpErrorInfo( + status.HTTP_503_SERVICE_UNAVAILABLE, + "Something went wrong while stopping services before trashing. Aborting trash.", + ), +} + + +class _DefaultDict(dict): + def __missing__(self, key): + return f"'{key}=?'" + + +def _handle_request_exceptions(handler: Handler): + @functools.wraps(handler) + async def _wrapper(request: web.Request) -> web.StreamResponse: + try: + return await handler(request) + + except ProjectTrashError as exc: + for exc_cls, http_error_info in _TO_HTTP_ERROR_MAP.items(): + if isinstance(exc, exc_cls): + + # safe formatting, i.e. does not raise + user_msg = http_error_info.msg_template.format_map( + _DefaultDict(getattr(exc, "__dict__", {})) + ) + + http_error_cls = get_http_error_class_or_none( + http_error_info.status_code + ) + assert http_error_cls # nosec + + if is_5xx_server_error(http_error_info.status_code): + _logger.exception( + **create_troubleshotting_log_kwargs( + user_msg, + error=exc, + error_context={ + "request": request, + "request.remote": f"{request.remote}", + "request.method": f"{request.method}", + "request.path": f"{request.path}", + }, + ) + ) + raise http_error_cls(reason=user_msg) from exc + raise + + return _wrapper + + +# +# ROUTES +# + +routes = web.RouteTableDef() + + +@routes.delete(f"/{VTAG}/trash", name="empty_trash") +@login_required +@permission_required("project.delete") +@_handle_request_exceptions +async def empty_trash(request: web.Request): + user_id = get_user_id(request) + product_name = get_product_name(request) + + await _trash_api.empty_trash( + request.app, product_name=product_name, user_id=user_id + ) + + return web.json_response(status=status.HTTP_204_NO_CONTENT) + + +@routes.post(f"/{VTAG}/projects/{{project_id}}:trash", name="trash_project") +@login_required +@permission_required("project.delete") +@_handle_request_exceptions +async def trash_project(request: web.Request): + user_id = get_user_id(request) + product_name = get_product_name(request) + path_params = parse_request_path_parameters_as(ProjectPathParams, request) + query_params: RemoveQueryParams = parse_request_query_parameters_as( + RemoveQueryParams, request + ) + + await _trash_api.trash_project( + request.app, + product_name=product_name, + user_id=user_id, + project_id=path_params.project_id, + force_stop_first=query_params.force, + ) + + return web.json_response(status=status.HTTP_204_NO_CONTENT) + + +@routes.post(f"/{VTAG}/projects/{{project_id}}:untrash", name="untrash_project") +@login_required +@permission_required("project.delete") +@_handle_request_exceptions +async def untrash_project(request: web.Request): + user_id = get_user_id(request) + product_name = get_product_name(request) + path_params = parse_request_path_parameters_as(ProjectPathParams, request) + + await _trash_api.untrash_project( + request.app, + product_name=product_name, + user_id=user_id, + project_id=path_params.project_id, + ) + + return web.json_response(status=status.HTTP_204_NO_CONTENT) diff --git a/services/web/server/src/simcore_service_webserver/projects/api.py b/services/web/server/src/simcore_service_webserver/projects/api.py index 6ec5370bb74..c7b44426c83 100644 --- a/services/web/server/src/simcore_service_webserver/projects/api.py +++ b/services/web/server/src/simcore_service_webserver/projects/api.py @@ -14,14 +14,14 @@ from ._wallets_api import connect_wallet_to_project, get_project_wallet __all__: tuple[str, ...] = ( - "register_permalink_factory", - "ProjectPermalink", - "get_project_wallet", + "check_user_project_permission", "connect_wallet_to_project", - "delete_project_group_without_checking_permissions", "create_project_group_without_checking_permissions", - "check_user_project_permission", + "delete_project_group_without_checking_permissions", + "get_project_wallet", "has_user_project_access_rights", + "ProjectPermalink", + "register_permalink_factory", ) diff --git a/services/web/server/src/simcore_service_webserver/projects/db.py b/services/web/server/src/simcore_service_webserver/projects/db.py index 10487a80e90..6cbe059dfb7 100644 --- a/services/web/server/src/simcore_service_webserver/projects/db.py +++ b/services/web/server/src/simcore_service_webserver/projects/db.py @@ -353,20 +353,25 @@ async def upsert_project_linked_product( async def list_projects( # pylint: disable=too-many-arguments self, *, + # hierarchy filters product_name: str, user_id: PositiveInt, + workspace_id: WorkspaceID | None, + folder_id: FolderID | None = None, + # attribute filters + search: str | None = None, filter_by_project_type: ProjectType | None = None, filter_by_services: list[dict] | None = None, - only_published: bool | None = False, - include_hidden: bool | None = False, + published: bool | None = False, + hidden: bool | None = False, + trashed: bool | None = False, + # pagination offset: int | None = 0, limit: int | None = None, - search: str | None = None, + # order order_by: OrderBy = OrderBy( field=IDStr("last_change_date"), direction=OrderDirection.DESC ), - folder_id: FolderID | None = None, - workspace_id: WorkspaceID | None, ) -> tuple[list[dict[str, Any]], list[ProjectType], int]: """ If workspace_id is provided, then listing in workspace is considered/preffered @@ -412,21 +417,6 @@ async def list_projects( # pylint: disable=too-many-arguments .select_from(_join_query) .where( ( - (projects.c.type == filter_by_project_type.value) - if filter_by_project_type - else (projects.c.type.is_not(None)) - ) - & ( - (projects.c.published.is_(True)) - if only_published - else sa.text("") - ) - & ( - (projects.c.hidden.is_(False)) - if not include_hidden - else sa.text("") - ) - & ( (projects_to_products.c.product_name == product_name) # This was added for backward compatibility, including old projects not in the projects_to_products table. | (projects_to_products.c.product_name.is_(None)) @@ -444,6 +434,28 @@ async def list_projects( # pylint: disable=too-many-arguments ) ) + # attributes filters + # None, true, false = all, attribute, !attribute + attributes_filters = [] + if filter_by_project_type is not None: + attributes_filters.append( + projects.c.type == filter_by_project_type.value + ) + + if hidden is not None: + attributes_filters.append(projects.c.hidden.is_(hidden)) + + if published is not None: + attributes_filters.append(projects.c.published.is_(published)) + + if trashed is not None: + attributes_filters.append( + projects.c.trashed_at.is_not(None) + if trashed + else projects.c.trashed_at.is_(None) + ) + query = query.where(sa.and_(*attributes_filters)) + if private_workspace_user_id_or_none: # If Private workspace we check to which projects user has access user_groups: list[RowProxy] = await self._list_user_groups( @@ -472,11 +484,13 @@ async def list_projects( # pylint: disable=too-many-arguments else: query = query.order_by(sa.desc(getattr(projects.c, order_by.field))) + # page meta total_number_of_projects = await conn.scalar( query.with_only_columns(func.count()).order_by(None) ) assert total_number_of_projects is not None # nosec + # page data prjs, prj_types = await self._execute_without_permission_check( conn, user_id=user_id, @@ -730,6 +744,7 @@ async def get_project( projects.c.published, projects.c.hidden, projects.c.workspace_id, + projects.c.trashed_at, ] async def get_project_db(self, project_uuid: ProjectID) -> ProjectDB: diff --git a/services/web/server/src/simcore_service_webserver/projects/exceptions.py b/services/web/server/src/simcore_service_webserver/projects/exceptions.py index 9741be8e961..76cadc26987 100644 --- a/services/web/server/src/simcore_service_webserver/projects/exceptions.py +++ b/services/web/server/src/simcore_service_webserver/projects/exceptions.py @@ -78,6 +78,20 @@ def __init__(self, *, project_uuid, reason, **ctx): self.reason = reason +class ProjectTrashError(BaseProjectError): + ... + + +class ProjectStoppingError(ProjectTrashError): + msg_template = "Failed to services in '{project_uuid}' before trashing" + + +class ProjectRunningConflictError(ProjectTrashError): + msg_template = ( + "Cannot trash running project '{project_uuid}' except if forced option is on" + ) + + class NodeNotFoundError(BaseProjectError): msg_template = "Node '{node_uuid}' not found in project '{project_uuid}'" diff --git a/services/web/server/src/simcore_service_webserver/projects/models.py b/services/web/server/src/simcore_service_webserver/projects/models.py index 8f4a13c172b..37961a9aff4 100644 --- a/services/web/server/src/simcore_service_webserver/projects/models.py +++ b/services/web/server/src/simcore_service_webserver/projects/models.py @@ -3,6 +3,7 @@ from typing import Any, TypeAlias from aiopg.sa.result import RowProxy +from models_library.api_schemas_webserver.projects import ProjectPatch from models_library.basic_types import HttpUrlWithCustomMinLength from models_library.folders import FolderID from models_library.projects import ClassifierID, ProjectID @@ -13,7 +14,7 @@ none_to_empty_str_pre_validator, ) from models_library.workspaces import WorkspaceID -from pydantic import BaseModel, validator +from pydantic import BaseModel, Extra, validator from simcore_postgres_database.models.projects import ProjectType, projects ProjectDict: TypeAlias = dict[str, Any] @@ -51,6 +52,7 @@ class ProjectDB(BaseModel): published: bool hidden: bool workspace_id: WorkspaceID | None + trashed_at: datetime | None class Config: orm_mode = True @@ -97,6 +99,15 @@ class Config: orm_mode = True +class ProjectPatchExtended(ProjectPatch): + # Only used internally + trashed_at: datetime | None = None + + class Config: + allow_population_by_field_name = True + extra = Extra.forbid + + __all__: tuple[str, ...] = ( "ProjectDict", "ProjectProxy", diff --git a/services/web/server/src/simcore_service_webserver/projects/plugin.py b/services/web/server/src/simcore_service_webserver/projects/plugin.py index 28264899af8..b72c4a90b9a 100644 --- a/services/web/server/src/simcore_service_webserver/projects/plugin.py +++ b/services/web/server/src/simcore_service_webserver/projects/plugin.py @@ -20,6 +20,7 @@ _projects_nodes_pricing_unit_handlers, _states_handlers, _tags_handlers, + _trash_handlers, _wallets_handlers, _workspaces_handlers, ) @@ -61,5 +62,6 @@ def setup_projects(app: web.Application) -> bool: app.router.add_routes(_folders_handlers.routes) app.router.add_routes(_projects_nodes_pricing_unit_handlers.routes) app.router.add_routes(_workspaces_handlers.routes) + app.router.add_routes(_trash_handlers.routes) return True diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py index 1b5ddda4f8b..98760859a3e 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py @@ -144,7 +144,7 @@ ProjectTooManyProjectOpenedError, ) from .lock import get_project_locked_state, is_project_locked, lock_project -from .models import ProjectDict +from .models import ProjectDict, ProjectPatchExtended from .settings import ProjectsSettings, get_plugin_settings from .utils import extract_dns_without_default_port @@ -249,7 +249,7 @@ async def patch_project( *, user_id: UserID, project_uuid: ProjectID, - project_patch: ProjectPatch, + project_patch: ProjectPatch | ProjectPatchExtended, product_name: ProductName, ): _project_patch_exclude_unset: dict[str, Any] = jsonable_encoder( diff --git a/services/web/server/src/simcore_service_webserver/projects/settings.py b/services/web/server/src/simcore_service_webserver/projects/settings.py index 727dcb51b35..28cda29b29d 100644 --- a/services/web/server/src/simcore_service_webserver/projects/settings.py +++ b/services/web/server/src/simcore_service_webserver/projects/settings.py @@ -9,7 +9,7 @@ class ProjectsSettings(BaseCustomSettings): PROJECTS_MAX_COPY_SIZE_BYTES: ByteSize = Field( - parse_obj_as(ByteSize, "30Gib"), + default=parse_obj_as(ByteSize, "30Gib"), description="defines the maximum authorized project data size" " when copying a project (disable with 0)", ) @@ -19,10 +19,14 @@ class ProjectsSettings(BaseCustomSettings): ) PROJECTS_INACTIVITY_INTERVAL: timedelta = Field( - timedelta(seconds=20), + default=timedelta(seconds=20), description="interval after which services need to be idle in order to be considered inactive", ) + PROJECTS_TRASH_RETENTION_DAYS: NonNegativeInt = Field( + default=7, description="Trashed items will be deleted after this time" + ) + def get_plugin_settings(app: web.Application) -> ProjectsSettings: settings = app[APP_SETTINGS_KEY].WEBSERVER_PROJECTS diff --git a/services/web/server/tests/conftest.py b/services/web/server/tests/conftest.py index a57dfd3c852..27d39ea35e4 100644 --- a/services/web/server/tests/conftest.py +++ b/services/web/server/tests/conftest.py @@ -227,6 +227,7 @@ async def _setup( "prjOwner": None, "workspaceId": None, "folderId": None, + "trashedAt": None, } if from_study: # access rights are replaced diff --git a/services/web/server/tests/unit/with_dbs/02/conftest.py b/services/web/server/tests/unit/with_dbs/02/conftest.py index 425756375b1..dbc3890f0b9 100644 --- a/services/web/server/tests/unit/with_dbs/02/conftest.py +++ b/services/web/server/tests/unit/with_dbs/02/conftest.py @@ -107,7 +107,11 @@ def mock_catalog_api( @pytest.fixture async def user_project( - client, fake_project, logged_user, tests_data_dir: Path, osparc_product_name: str + client: TestClient, + fake_project, + logged_user, + tests_data_dir: Path, + osparc_product_name: str, ) -> AsyncIterator[ProjectDict]: async with NewProject( fake_project, diff --git a/services/web/server/tests/unit/with_dbs/03/test_project_db.py b/services/web/server/tests/unit/with_dbs/03/test_project_db.py index 2b7ff6734f8..89a67734b60 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_project_db.py +++ b/services/web/server/tests/unit/with_dbs/03/test_project_db.py @@ -91,29 +91,30 @@ def _assert_added_project( added_project: dict[str, Any], exp_overrides: dict[str, Any], ): - original_prj = deepcopy(exp_project) + expected_prj = deepcopy(exp_project) added_prj = deepcopy(added_project) - # no user so the project owner has a pre-defined value _DIFFERENT_KEYS = [ "creationDate", "lastChangeDate", "accessRights", # NOTE: access rights were moved away from the projects table + "trashedAt", ] - assert {k: v for k, v in original_prj.items() if k in _DIFFERENT_KEYS} != { + assert {k: v for k, v in expected_prj.items() if k in _DIFFERENT_KEYS} != { k: v for k, v in added_prj.items() if k in _DIFFERENT_KEYS } assert to_datetime(added_prj["creationDate"]) > to_datetime( - exp_project["creationDate"] + expected_prj["creationDate"] ) assert to_datetime(added_prj["creationDate"]) <= to_datetime( added_prj["lastChangeDate"] ) - original_prj.update(exp_overrides) + expected_prj.update(exp_overrides) for k in _DIFFERENT_KEYS: - added_prj.pop(k) - original_prj.pop(k) + added_prj.pop(k, None) + expected_prj.pop(k, None) + # the rest of the keys shall be the same as the original - assert added_prj == original_prj + assert added_prj == expected_prj def _assert_projects_to_product_db_row( diff --git a/services/web/server/tests/unit/with_dbs/03/test_trash.py b/services/web/server/tests/unit/with_dbs/03/test_trash.py new file mode 100644 index 00000000000..e10dc0065f1 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/03/test_trash.py @@ -0,0 +1,165 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-statements +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +import asyncio +from collections.abc import Callable +from uuid import UUID + +import arrow +import pytest +from aiohttp.test_utils import TestClient +from aioresponses import aioresponses +from models_library.api_schemas_webserver.projects import ProjectGet, ProjectListItem +from models_library.rest_pagination import Page +from pytest_mock import MockerFixture +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict +from servicelib.aiohttp import status +from simcore_service_webserver.db.models import UserRole +from simcore_service_webserver.projects.models import ProjectDict + + +@pytest.fixture +def user_role() -> UserRole: + return UserRole.USER + + +@pytest.fixture +def mocked_catalog( + user_project: ProjectDict, + catalog_subsystem_mock: Callable[[list[ProjectDict]], None], +): + catalog_subsystem_mock([user_project]) + + +@pytest.fixture +def mocked_director_v2(director_v2_service_mock: aioresponses): + ... + + +@pytest.mark.acceptance_test( + "For https://github.com/ITISFoundation/osparc-simcore/pull/6579" +) +@pytest.mark.parametrize("force", [False, True]) +@pytest.mark.parametrize("is_project_running", [False, True]) +async def test_trash_projects( # noqa: PLR0915 + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + mocked_catalog: None, + mocked_director_v2: None, + mocker: MockerFixture, + force: bool, + is_project_running: bool, +): + assert client.app + + # this test should have no errors stopping services + mock_remove_dynamic_services = mocker.patch( + "simcore_service_webserver.projects._trash_api.projects_api.remove_project_dynamic_services", + autospec=True, + ) + mock_stop_pipeline = mocker.patch( + "simcore_service_webserver.projects._trash_api.director_v2_api.stop_pipeline", + autospec=True, + ) + mocker.patch( + "simcore_service_webserver.projects._trash_api.director_v2_api.is_pipeline_running", + return_value=is_project_running, + autospec=True, + ) + mocker.patch( + "simcore_service_webserver.projects._trash_api.director_v2_api.list_dynamic_services", + return_value=[mocker.MagicMock()] if is_project_running else [], + autospec=True, + ) + + project_uuid = UUID(user_project["uuid"]) + + url = client.app.router["list_projects"].url_for() + assert f"{url}" == "/v0/projects" + + # --------------------------------------------------------------------- + + # LIST NOT trashed + resp = await client.get("/v0/projects") + await assert_status(resp, status.HTTP_200_OK) + + page = Page[ProjectListItem].parse_obj(await resp.json()) + assert page.meta.total == 1 + + got = page.data[0] + assert got.uuid == project_uuid + assert got.trashed_at is None + + # LIST trashed + resp = await client.get("/v0/projects", params={"filters": '{"trashed": true}'}) + await assert_status(resp, status.HTTP_200_OK) + + page = Page[ProjectListItem].parse_obj(await resp.json()) + assert page.meta.total == 0 + + # TRASH + trashing_at = arrow.utcnow().datetime + resp = await client.post( + f"/v0/projects/{project_uuid}:trash", params={"force": f"{force}"} + ) + _, error = await assert_status( + resp, + status.HTTP_409_CONFLICT + if (is_project_running and not force) + else status.HTTP_204_NO_CONTENT, + ) + + could_not_trash = is_project_running and not force + + if could_not_trash: + assert error["status"] == status.HTTP_409_CONFLICT + assert "Current study is in use" in error["message"] + + # GET + resp = await client.get(f"/v0/projects/{project_uuid}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + got = ProjectGet.parse_obj(data) + assert got.uuid == project_uuid + + if could_not_trash: + assert got.trashed_at is None + else: + assert got.trashed_at + assert trashing_at < got.trashed_at + assert got.trashed_at < arrow.utcnow().datetime + + # LIST trashed + resp = await client.get("/v0/projects", params={"filters": '{"trashed": true}'}) + await assert_status(resp, status.HTTP_200_OK) + + page = Page[ProjectListItem].parse_obj(await resp.json()) + if could_not_trash: + assert page.meta.total == 0 + else: + assert page.meta.total == 1 + assert page.data[0].uuid == project_uuid + + # UNTRASH + resp = await client.post(f"/v0/projects/{project_uuid}:untrash") + data, _ = await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # GET + resp = await client.get(f"/v0/projects/{project_uuid}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + got = ProjectGet.parse_obj(data) + + assert got.uuid == project_uuid + assert got.trashed_at is None + + if is_project_running and force: + # checks fire&forget calls + await asyncio.sleep(0.1) + mock_stop_pipeline.assert_awaited() + mock_remove_dynamic_services.assert_awaited() diff --git a/tests/e2e/tutorials/sleepers_project_template_sql.csv b/tests/e2e/tutorials/sleepers_project_template_sql.csv index 46634511eec..29c16a6f416 100644 --- a/tests/e2e/tutorials/sleepers_project_template_sql.csv +++ b/tests/e2e/tutorials/sleepers_project_template_sql.csv @@ -1,2 +1,2 @@ -id,type,uuid,name,description,thumbnail,prj_owner,creation_date,last_change_date,workbench,published,access_rights,dev,ui,classifiers,quality,hidden,workspace_id -10,TEMPLATE,ed6c2f58-dc16-445d-bb97-e989e2611603,Sleepers,5 sleepers interconnected,"",,2019-06-06 14:34:19.631,2019-06-06 14:34:28.647,"{""027e3ff9-3119-45dd-b8a2-2e31661a7385"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 0"", ""inputs"": {""in_2"": 2}, ""inputAccess"": {""in_1"": ""Invisible"", ""in_2"": ""ReadOnly""}, ""inputNodes"": [], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 300}}, ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 1"", ""inputs"": {""in_1"": {""nodeUuid"": ""027e3ff9-3119-45dd-b8a2-2e31661a7385"", ""output"": ""out_1""}, ""in_2"": 2}, ""inputNodes"": [""027e3ff9-3119-45dd-b8a2-2e31661a7385""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}, ""bf405067-d168-44ba-b6dc-bb3e08542f92"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 2"", ""inputs"": {""in_1"": {""nodeUuid"": ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"", ""output"": ""out_2""}}, ""inputNodes"": [""562aaea9-95ff-46f3-8e84-db8f3c9e3a39""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 550, ""y"": 200}}, ""de2578c5-431e-5065-a079-a5a0476e3c10"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 3"", ""inputs"": {""in_2"": {""nodeUuid"": ""027e3ff9-3119-45dd-b8a2-2e31661a7385"", ""output"": ""out_2""}}, ""inputNodes"": [""027e3ff9-3119-45dd-b8a2-2e31661a7385""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 420, ""y"": 400}}, ""de2578c5-431e-559d-aa19-dc9293e10e4c"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 4"", ""inputs"": {""in_1"": {""nodeUuid"": ""bf405067-d168-44ba-b6dc-bb3e08542f92"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""de2578c5-431e-5065-a079-a5a0476e3c10"", ""output"": ""out_2""}}, ""inputNodes"": [""bf405067-d168-44ba-b6dc-bb3e08542f92"", ""de2578c5-431e-5065-a079-a5a0476e3c10""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 800, ""y"": 300}}}",true,"{""1"": {""read"":true, ""write"":false, ""delete"":false}}", "{}", "{}", "{}", "{}",false, +id,type,uuid,name,description,thumbnail,prj_owner,creation_date,last_change_date,workbench,published,access_rights,dev,ui,classifiers,quality,hidden,workspace_id,trashed_at +10,TEMPLATE,ed6c2f58-dc16-445d-bb97-e989e2611603,Sleepers,5 sleepers interconnected,"",,2019-06-06 14:34:19.631,2019-06-06 14:34:28.647,"{""027e3ff9-3119-45dd-b8a2-2e31661a7385"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 0"", ""inputs"": {""in_2"": 2}, ""inputAccess"": {""in_1"": ""Invisible"", ""in_2"": ""ReadOnly""}, ""inputNodes"": [], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 300}}, ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 1"", ""inputs"": {""in_1"": {""nodeUuid"": ""027e3ff9-3119-45dd-b8a2-2e31661a7385"", ""output"": ""out_1""}, ""in_2"": 2}, ""inputNodes"": [""027e3ff9-3119-45dd-b8a2-2e31661a7385""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}, ""bf405067-d168-44ba-b6dc-bb3e08542f92"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 2"", ""inputs"": {""in_1"": {""nodeUuid"": ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"", ""output"": ""out_2""}}, ""inputNodes"": [""562aaea9-95ff-46f3-8e84-db8f3c9e3a39""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 550, ""y"": 200}}, ""de2578c5-431e-5065-a079-a5a0476e3c10"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 3"", ""inputs"": {""in_2"": {""nodeUuid"": ""027e3ff9-3119-45dd-b8a2-2e31661a7385"", ""output"": ""out_2""}}, ""inputNodes"": [""027e3ff9-3119-45dd-b8a2-2e31661a7385""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 420, ""y"": 400}}, ""de2578c5-431e-559d-aa19-dc9293e10e4c"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 4"", ""inputs"": {""in_1"": {""nodeUuid"": ""bf405067-d168-44ba-b6dc-bb3e08542f92"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""de2578c5-431e-5065-a079-a5a0476e3c10"", ""output"": ""out_2""}}, ""inputNodes"": [""bf405067-d168-44ba-b6dc-bb3e08542f92"", ""de2578c5-431e-5065-a079-a5a0476e3c10""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 800, ""y"": 300}}}",true,"{""1"": {""read"":true, ""write"":false, ""delete"":false}}", "{}", "{}", "{}", "{}",false,,