Skip to content

Commit

Permalink
✨ Trash projects (#6579)
Browse files Browse the repository at this point in the history
  • Loading branch information
pcrespov authored Oct 28, 2024
1 parent fe95d75 commit 3ec9333
Show file tree
Hide file tree
Showing 45 changed files with 1,017 additions and 80 deletions.
1 change: 1 addition & 0 deletions .env-devel
Original file line number Diff line number Diff line change
Expand Up @@ -309,6 +309,7 @@ LOGIN_ACCOUNT_DELETION_RETENTION_DAYS=31
LOGIN_REGISTRATION_CONFIRMATION_REQUIRED=0
LOGIN_REGISTRATION_INVITATION_REQUIRED=0
PROJECTS_INACTIVITY_INTERVAL=20
PROJECTS_TRASH_RETENTION_DAYS=7
PROJECTS_MAX_COPY_SIZE_BYTES=30Gib
PROJECTS_MAX_NUM_RUNNING_DYNAMIC_NODES=5
REST_SWAGGER_API_DOC_ENABLED=1
Expand Down
1 change: 1 addition & 0 deletions api/specs/web-server/_projects_crud.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ async def list_projects(
example='{"field": "last_change_date", "direction": "desc"}',
),
] = '{"field": "last_change_date", "direction": "desc"}',
filters: Annotated[Json | None, Query()] = None,
):
...

Expand Down
61 changes: 61 additions & 0 deletions api/specs/web-server/_trash.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
# pylint: disable=redefined-outer-name
# pylint: disable=unused-argument
# pylint: disable=unused-variable
# pylint: disable=too-many-arguments


from enum import Enum
from typing import Annotated

from fastapi import APIRouter, Depends, status
from simcore_service_webserver._meta import API_VTAG
from simcore_service_webserver.projects._trash_handlers import (
ProjectPathParams,
RemoveQueryParams,
)

router = APIRouter(
prefix=f"/{API_VTAG}",
tags=["trash"],
)


@router.delete(
"/trash",
status_code=status.HTTP_204_NO_CONTENT,
)
def empty_trash():
...


_extra_tags: list[str | Enum] = ["projects"]


@router.post(
"/projects/{project_id}:trash",
tags=_extra_tags,
status_code=status.HTTP_204_NO_CONTENT,
responses={
status.HTTP_404_NOT_FOUND: {"description": "Not such a project"},
status.HTTP_409_CONFLICT: {
"description": "Project is in use and cannot be trashed"
},
status.HTTP_503_SERVICE_UNAVAILABLE: {"description": "Trash service error"},
},
)
def trash_project(
_p: Annotated[ProjectPathParams, Depends()],
_q: Annotated[RemoveQueryParams, Depends()],
):
...


@router.post(
"/projects/{project_id}:untrash",
tags=_extra_tags,
status_code=status.HTTP_204_NO_CONTENT,
)
def untrash_project(
_p: Annotated[ProjectPathParams, Depends()],
):
...
1 change: 1 addition & 0 deletions api/specs/web-server/openapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@
"_resource_usage",
"_statics",
"_storage",
"_trash",
"_version_control",
"_workspaces",
# maintenance ----
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
from datetime import datetime
from typing import NamedTuple

from models_library.access_rights import AccessRights
from models_library.basic_types import IDStr
from models_library.folders import FolderID
from models_library.users import GroupID
from models_library.utils.common_validators import null_or_none_str_to_none_validator
from models_library.workspaces import WorkspaceID
from pydantic import Extra, PositiveInt, validator

from ..access_rights import AccessRights
from ..basic_types import IDStr
from ..folders import FolderID
from ..users import GroupID
from ..utils.common_validators import null_or_none_str_to_none_validator
from ..workspaces import WorkspaceID
from ._base import InputSchema, OutputSchema


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
"""

from datetime import datetime
from typing import Any, Literal, TypeAlias

from models_library.folders import FolderID
Expand Down Expand Up @@ -85,6 +86,7 @@ class ProjectGet(OutputSchema):
permalink: ProjectPermalink = FieldNotRequired()
workspace_id: WorkspaceID | None
folder_id: FolderID | None
trashed_at: datetime | None

_empty_description = validator("description", allow_reuse=True, pre=True)(
none_to_empty_str_pre_validator
Expand Down
5 changes: 5 additions & 0 deletions packages/models-library/src/models_library/projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,11 @@ class Project(BaseProjectModel):
alias="folderId",
)

trashed_at: datetime | None = Field(
default=None,
alias="trashedAt",
)

class Config:
description = "Document that stores metadata, pipeline and UI setup of a study"
title = "osparc-simcore project"
Expand Down
19 changes: 17 additions & 2 deletions packages/models-library/src/models_library/rest_filters.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,22 @@
from pydantic import BaseModel
from typing import Generic, TypeVar

from pydantic import BaseModel, Field, Json
from pydantic.generics import GenericModel


class Filters(BaseModel):
"""inspired by Docker API https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerList.
"""
Encoded as JSON. Each available filter can have its own logic (should be well documented)
Inspired by Docker API https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerList.
"""


# Custom filter
FilterT = TypeVar("FilterT", bound=Filters)


class FiltersQueryParameters(GenericModel, Generic[FilterT]):
filters: Json[FilterT] | None = Field( # pylint: disable=unsubscriptable-object
default=None,
description="Custom filter query parameter encoded as JSON",
)
2 changes: 2 additions & 0 deletions packages/models-library/src/models_library/users.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
from typing import TypeAlias

from models_library.basic_types import IDStr
from pydantic import BaseModel, ConstrainedStr, Field, PositiveInt

UserID: TypeAlias = PositiveInt
UserNameID: TypeAlias = IDStr
GroupID: TypeAlias = PositiveInt


Expand Down
65 changes: 65 additions & 0 deletions packages/models-library/tests/test_rest_filters.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
import logging

import pytest
from models_library.rest_filters import Filters, FiltersQueryParameters
from pydantic import Extra, ValidationError


# 1. create filter model
class CustomFilter(Filters):
is_trashed: bool | None = None
is_hidden: bool | None = None


class CustomFilterStrict(CustomFilter):
class Config(CustomFilter.Config):
extra = Extra.forbid


def test_custom_filter_query_parameters():

# 2. use generic as query parameters
logging.info(
"json schema is for the query \n %s",
FiltersQueryParameters[CustomFilter].schema_json(indent=1),
)

# lets filter only is_trashed and unset is_hidden
custom_filter = CustomFilter(is_trashed=True)
assert custom_filter.json() == '{"is_trashed": true, "is_hidden": null}'

# default to None (optional)
query_param = FiltersQueryParameters[CustomFilter]()
assert query_param.filters is None


@pytest.mark.parametrize(
"url_query_value,expects",
[
('{"is_trashed": true, "is_hidden": null}', CustomFilter(is_trashed=True)),
('{"is_trashed": true}', CustomFilter(is_trashed=True)),
(None, None),
],
)
def test_valid_filter_queries(
url_query_value: str | None, expects: CustomFilter | None
):
query_param = FiltersQueryParameters[CustomFilter](filters=url_query_value)
assert query_param.filters == expects


def test_invalid_filter_query_is_ignored():
# NOTE: invalid filter get ignored!
url_query_value = '{"undefined_filter": true, "is_hidden": true}'

query_param = FiltersQueryParameters[CustomFilter](filters=url_query_value)
assert query_param.filters == CustomFilter(is_hidden=True)


@pytest.mark.xfail
def test_invalid_filter_query_fails():
# NOTE: this should fail according to pydantic manual but it does not
url_query_value = '{"undefined_filter": true, "is_hidden": true}'

with pytest.raises(ValidationError):
FiltersQueryParameters[CustomFilterStrict](filters=url_query_value)
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
"""new projects trashed_at
Revision ID: fce5d231e16d
Revises: ea3952fe5a0e
Create Date: 2024-10-23 14:32:32.350937+00:00
"""
import sqlalchemy as sa
from alembic import op

# revision identifiers, used by Alembic.
revision = "fce5d231e16d"
down_revision = "ea3952fe5a0e"
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"projects", sa.Column("trashed_at", sa.DateTime(timezone=True), nullable=True)
)
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("projects", "trashed_at")
# ### end Alembic commands ###
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,12 @@ class ProjectType(enum.Enum):
default=False,
doc="If true, the project is by default not listed in the API",
),
sa.Column(
"trashed_at",
sa.DateTime(timezone=True),
nullable=True,
doc="Timestamp indicating when the project was marked as trashed, or null otherwise.",
),
sa.Column(
"workspace_id",
sa.BigInteger,
Expand Down
28 changes: 25 additions & 3 deletions packages/postgres-database/tests/test_utils_projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,25 +4,27 @@
# pylint: disable=too-many-arguments
import uuid
from collections.abc import Awaitable, Callable
from datetime import datetime
from datetime import datetime, timezone
from typing import Any, AsyncIterator

import pytest
import sqlalchemy
import sqlalchemy as sa
from aiopg.sa.connection import SAConnection
from aiopg.sa.result import RowProxy
from faker import Faker
from pydantic import parse_obj_as
from simcore_postgres_database.models.projects import projects
from simcore_postgres_database.utils_projects import (
DBProjectNotFoundError,
ProjectsRepo,
)
from simcore_postgres_database.utils_repos import transaction_context
from sqlalchemy.ext.asyncio import AsyncEngine


async def _delete_project(connection: SAConnection, project_uuid: uuid.UUID) -> None:
result = await connection.execute(
sqlalchemy.delete(projects).where(projects.c.uuid == f"{project_uuid}")
sa.delete(projects).where(projects.c.uuid == f"{project_uuid}")
)
assert result.rowcount == 1

Expand Down Expand Up @@ -51,6 +53,26 @@ async def registered_project(
await _delete_project(connection, project["uuid"])


@pytest.mark.parametrize("expected", (datetime.now(tz=timezone.utc), None))
async def test_get_project_trashed_at_column_can_be_converted_to_datetime(
asyncpg_engine: AsyncEngine, registered_project: dict, expected: datetime | None
):
project_id = registered_project["uuid"]

async with transaction_context(asyncpg_engine) as conn:
result = await conn.execute(
projects.update()
.values(trashed_at=expected)
.where(projects.c.uuid == project_id)
.returning(sa.literal_column("*"))
)

row = result.fetchone()

trashed_at = parse_obj_as(datetime | None, row.trashed_at)
assert trashed_at == expected


async def test_get_project_last_change_date(
asyncpg_engine: AsyncEngine, registered_project: dict, faker: Faker
):
Expand Down
4 changes: 2 additions & 2 deletions packages/service-library/src/servicelib/project_lock.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from asyncio.log import logger
from collections.abc import AsyncIterator
from contextlib import asynccontextmanager
from typing import Final
from typing import Final, TypeAlias

import redis
import redis.exceptions
Expand All @@ -21,7 +21,7 @@
PROJECT_LOCK_TIMEOUT: Final[datetime.timedelta] = datetime.timedelta(seconds=10)
ProjectLock = Lock

ProjectLockError = redis.exceptions.LockError
ProjectLockError: TypeAlias = redis.exceptions.LockError


async def _auto_extend_project_lock(project_lock: Lock) -> None:
Expand Down
3 changes: 2 additions & 1 deletion services/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -755,9 +755,10 @@ services:

# WEBSERVER_PROJECTS
WEBSERVER_PROJECTS: ${WEBSERVER_PROJECTS}
PROJECTS_INACTIVITY_INTERVAL: ${PROJECTS_INACTIVITY_INTERVAL}
PROJECTS_MAX_COPY_SIZE_BYTES: ${PROJECTS_MAX_COPY_SIZE_BYTES}
PROJECTS_MAX_NUM_RUNNING_DYNAMIC_NODES: ${PROJECTS_MAX_NUM_RUNNING_DYNAMIC_NODES}
PROJECTS_INACTIVITY_INTERVAL: ${PROJECTS_INACTIVITY_INTERVAL}
PROJECTS_TRASH_RETENTION_DAYS: ${PROJECTS_TRASH_RETENTION_DAYS}

# WEBSERVER_RABBITMQ
RABBIT_HOST: ${RABBIT_HOST}
Expand Down
2 changes: 1 addition & 1 deletion services/web/server/VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.43.1
0.44.0
4 changes: 2 additions & 2 deletions services/web/server/requirements/_base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ certifi==2023.7.22
# -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt
# -c requirements/../../../../requirements/constraints.txt
# requests
cffi==1.15.0
cffi==1.17.1
# via cryptography
charset-normalizer==2.0.12
# via
Expand Down Expand Up @@ -257,7 +257,7 @@ mdurl==0.1.2
# via markdown-it-py
msgpack==1.0.7
# via -r requirements/_base.in
multidict==6.0.2
multidict==6.1.0
# via
# aiohttp
# yarl
Expand Down
2 changes: 1 addition & 1 deletion services/web/server/requirements/_test.txt
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ markupsafe==2.1.1
# via
# -c requirements/_base.txt
# mako
multidict==6.0.2
multidict==6.1.0
# via
# -c requirements/_base.txt
# aiohttp
Expand Down
Loading

0 comments on commit 3ec9333

Please sign in to comment.