From 050b3e63e393bdde73a3b3500bfc97718e674290 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 18 Nov 2024 21:44:39 +0100 Subject: [PATCH] =?UTF-8?q?=E2=99=BB=EF=B8=8F=E2=AC=86=EF=B8=8F=20Migratin?= =?UTF-8?q?g=20director=20service=20to=20fastapi=20=E2=9A=A0=EF=B8=8F=20?= =?UTF-8?q?=F0=9F=9A=A8=20(#6657)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: sanderegg <35365065+sanderegg@users.noreply.github.com> --- .env-devel | 11 +- .github/workflows/ci-testing-deploy.yml | 33 +- ci/github/unit-testing/director.bash | 41 +- packages/aws-library/setup.py | 2 +- packages/dask-task-models-library/setup.py | 2 +- packages/models-library/setup.py | 2 +- .../api_schemas_director}/__init__.py | 0 .../api_schemas_director/services.py | 5 + .../requirements/_test.in | 1 + .../requirements/_test.txt | 28 + packages/notifications-library/setup.py | 2 +- packages/pytest-simcore/setup.py | 2 +- .../src/pytest_simcore/docker.py | 12 + .../src/pytest_simcore/docker_compose.py | 55 +- .../src/pytest_simcore/docker_swarm.py | 48 +- .../src/pytest_simcore/environment_configs.py | 7 +- .../src/pytest_simcore/minio_service.py | 8 +- .../src/pytest_simcore/postgres_service.py | 12 +- .../src/pytest_simcore/rabbit_service.py | 14 +- .../src/pytest_simcore/redis_service.py | 9 +- .../simcore_service_library_fixtures.py | 2 +- .../src/pytest_simcore/simcore_services.py | 4 +- .../pytest_simcore/simcore_storage_service.py | 9 +- .../simcore_webserver_service.py | 7 +- .../src/pytest_simcore/traefik_service.py | 5 +- packages/service-integration/setup.py | 2 +- packages/service-library/setup.py | 2 +- .../src/servicelib/async_utils.py | 1 + .../src/settings_library/docker_registry.py | 1 + packages/simcore-sdk/setup.py | 2 +- services/agent/setup.py | 2 +- services/api-server/setup.py | 2 +- services/autoscaling/setup.py | 2 +- services/autoscaling/tests/unit/conftest.py | 7 +- services/catalog/setup.py | 2 +- .../api/rest/_services_resources.py | 2 +- services/clusters-keeper/setup.py | 2 +- services/dask-sidecar/setup.py | 2 +- services/datcore-adapter/setup.py | 2 +- services/director-v2/setup.py | 2 +- ...ixed_dynamic_sidecar_and_legacy_project.py | 71 +- .../director-v2/tests/integration/02/utils.py | 2 +- services/director/Dockerfile | 103 +-- services/director/Makefile | 10 - services/director/README.md | 85 -- services/director/VERSION | 2 +- services/director/codegen.sh | 157 ---- services/director/docker/boot.sh | 58 +- services/director/docker/entrypoint.sh | 124 ++- services/director/docker/healthcheck.py | 33 +- services/director/requirements/Makefile | 7 - services/director/requirements/_base.in | 79 +- services/director/requirements/_base.txt | 459 +++++++--- services/director/requirements/_test.in | 31 +- services/director/requirements/_test.txt | 309 +++---- services/director/requirements/_tools.in | 7 + services/director/requirements/_tools.txt | 85 ++ services/director/requirements/ci.txt | 10 +- services/director/requirements/dev.txt | 7 + services/director/requirements/prod.txt | 7 +- services/director/setup.cfg | 19 +- services/director/setup.py | 92 +- .../src/simcore_service_director/__main__.py | 5 - .../src/simcore_service_director/_meta.py | 43 + .../simcore_service_director/api}/__init__.py | 0 .../api/rest/__init__.py | 0 .../api/rest/_health.py | 16 + .../api/rest/_running_interactive_services.py | 134 +++ .../api/rest/_service_extras.py | 40 + .../api/rest/_services.py | 123 +++ .../api/rest/routes.py | 30 + .../cache_request_decorator.py | 31 - .../src/simcore_service_director/cli.py | 26 + .../client_session.py | 41 + .../src/simcore_service_director/config.py | 170 ---- .../src/simcore_service_director/constants.py | 23 + .../simcore_service_director/core/__init__.py | 0 .../core/application.py | 75 ++ .../simcore_service_director/core/errors.py | 42 + .../simcore_service_director/core/settings.py | 120 +++ .../simcore_service_director/docker_utils.py | 25 +- .../simcore_service_director/exceptions.py | 87 -- .../instrumentation.py | 86 ++ .../src/simcore_service_director/main.py | 53 +- .../models/__init__.py | 0 .../simcore_service_director/monitoring.py | 32 - .../src/simcore_service_director/producer.py | 794 +++++++++--------- .../registry_cache_task.py | 79 -- .../registry_proxy.py | 303 ++++--- .../src/simcore_service_director/resources.py | 36 - .../simcore_service_director/rest/__init__.py | 9 - .../rest/generated_code/models/__init__.py | 25 - .../rest/generated_code/models/base_model_.py | 66 -- .../generated_code/models/error_enveloped.py | 88 -- .../models/health_check_enveloped.py | 88 -- .../models/inline_response200.py | 88 -- .../models/inline_response2001.py | 88 -- .../models/inline_response2001_authors.py | 120 --- .../models/inline_response2001_badges.py | 122 --- .../models/inline_response2001_data.py | 332 -------- .../models/inline_response2002.py | 88 -- .../models/inline_response2002_authors.py | 120 --- .../models/inline_response2002_badges.py | 122 --- .../models/inline_response2002_data.py | 115 --- ...inline_response2002_data_container_spec.py | 60 -- ...ine_response2002_data_node_requirements.py | 147 ---- ...response2002_data_service_build_details.py | 110 --- .../models/inline_response2003.py | 88 -- .../models/inline_response2003_data.py | 399 --------- .../models/inline_response200_data.py | 135 --- .../models/inline_response201.py | 88 -- .../models/inline_response_default.py | 88 -- .../models/inline_response_default_error.py | 118 --- .../models/running_service_enveloped.py | 88 -- .../models/running_services_enveloped.py | 88 -- .../models/service_extras_enveloped.py | 88 -- .../models/services_enveloped.py | 88 -- .../generated_code/models/simcore_node.py | 447 ---------- .../rest/generated_code/routing.py | 94 --- .../rest/generated_code/typing_utils.py | 32 - .../rest/generated_code/util.py | 131 --- .../simcore_service_director/rest/handlers.py | 238 ------ .../services_common.py | 6 +- .../simcore_service_director/system_utils.py | 13 - .../src/simcore_service_director/utils.py | 29 - services/director/temp_generate_openapi.sh | 37 - services/director/tests/conftest.py | 115 --- .../director/tests/fixtures/fake_services.py | 242 ------ .../tests/helpers/json_schema_validator.py | 29 - .../director/tests/test_dummy_services.py | 24 - services/director/tests/test_handlers.py | 545 ------------ services/director/tests/test_json_schemas.py | 30 - services/director/tests/test_oas.py | 30 - services/director/tests/test_openapi.py | 25 - .../director/tests/test_openapi_schemas.py | 70 -- .../tests/test_registry_cache_task.py | 67 -- services/director/tests/test_utils.py | 38 - services/director/tests/unit/api/conftest.py | 40 + .../tests/unit/api/test_rest_health.py | 19 + .../test_rest_running_interactive_services.py | 309 +++++++ .../unit/api/test_rest_service_extras.py | 64 ++ .../tests/unit/api/test_rest_services.py | 199 +++++ services/director/tests/unit/conftest.py | 177 ++++ .../dummy_service_description-v1.json | 2 +- .../tests/unit/fixtures/fake_services.py | 321 +++++++ .../tests/unit/test__model_examples.py | 28 + services/director/tests/unit/test_cli.py | 34 + .../director/tests/unit/test_core_settings.py | 153 ++++ .../tests/{ => unit}/test_docker_utils.py | 37 +- .../tests/{ => unit}/test_producer.py | 190 +++-- .../tests/{ => unit}/test_registry_proxy.py | 156 ++-- services/docker-compose.local.yml | 3 +- services/docker-compose.yml | 38 +- services/dynamic-scheduler/setup.py | 2 +- services/dynamic-sidecar/setup.py | 2 +- services/efs-guardian/setup.py | 2 +- services/invitations/setup.py | 2 +- services/osparc-gateway-server/setup.py | 2 +- .../osparc-gateway-server/tests/conftest.py | 48 +- services/payments/setup.py | 2 +- services/resource-usage-tracker/setup.py | 2 +- services/storage/setup.py | 2 +- services/web/server/setup.py | 2 +- tests/e2e-playwright/Makefile | 4 +- tests/public-api/conftest.py | 14 +- tests/swarm-deploy/conftest.py | 6 +- tests/swarm-deploy/requirements/_test.in | 1 + tests/swarm-deploy/requirements/_test.txt | 7 +- 168 files changed, 4107 insertions(+), 7584 deletions(-) rename {services/director/src/simcore_service_director/rest/generated_code => packages/models-library/src/models_library/api_schemas_director}/__init__.py (100%) create mode 100644 packages/models-library/src/models_library/api_schemas_director/services.py delete mode 100755 services/director/codegen.sh mode change 100644 => 100755 services/director/docker/healthcheck.py delete mode 100644 services/director/src/simcore_service_director/__main__.py create mode 100644 services/director/src/simcore_service_director/_meta.py rename services/director/{tests/helpers => src/simcore_service_director/api}/__init__.py (100%) create mode 100644 services/director/src/simcore_service_director/api/rest/__init__.py create mode 100644 services/director/src/simcore_service_director/api/rest/_health.py create mode 100644 services/director/src/simcore_service_director/api/rest/_running_interactive_services.py create mode 100644 services/director/src/simcore_service_director/api/rest/_service_extras.py create mode 100644 services/director/src/simcore_service_director/api/rest/_services.py create mode 100644 services/director/src/simcore_service_director/api/rest/routes.py delete mode 100644 services/director/src/simcore_service_director/cache_request_decorator.py create mode 100644 services/director/src/simcore_service_director/cli.py create mode 100644 services/director/src/simcore_service_director/client_session.py delete mode 100644 services/director/src/simcore_service_director/config.py create mode 100644 services/director/src/simcore_service_director/constants.py create mode 100644 services/director/src/simcore_service_director/core/__init__.py create mode 100644 services/director/src/simcore_service_director/core/application.py create mode 100644 services/director/src/simcore_service_director/core/errors.py create mode 100644 services/director/src/simcore_service_director/core/settings.py delete mode 100644 services/director/src/simcore_service_director/exceptions.py create mode 100644 services/director/src/simcore_service_director/instrumentation.py create mode 100644 services/director/src/simcore_service_director/models/__init__.py delete mode 100644 services/director/src/simcore_service_director/monitoring.py delete mode 100644 services/director/src/simcore_service_director/registry_cache_task.py delete mode 100644 services/director/src/simcore_service_director/resources.py delete mode 100644 services/director/src/simcore_service_director/rest/__init__.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/__init__.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/base_model_.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/error_enveloped.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/health_check_enveloped.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response200.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_authors.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_badges.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_data.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_authors.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_badges.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_container_spec.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_node_requirements.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_service_build_details.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2003.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2003_data.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response200_data.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response201.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response_default.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response_default_error.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/running_service_enveloped.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/running_services_enveloped.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/service_extras_enveloped.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/services_enveloped.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/simcore_node.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/routing.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/typing_utils.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/util.py delete mode 100644 services/director/src/simcore_service_director/rest/handlers.py delete mode 100644 services/director/src/simcore_service_director/system_utils.py delete mode 100644 services/director/src/simcore_service_director/utils.py delete mode 100755 services/director/temp_generate_openapi.sh delete mode 100644 services/director/tests/conftest.py delete mode 100644 services/director/tests/fixtures/fake_services.py delete mode 100644 services/director/tests/helpers/json_schema_validator.py delete mode 100644 services/director/tests/test_dummy_services.py delete mode 100644 services/director/tests/test_handlers.py delete mode 100644 services/director/tests/test_json_schemas.py delete mode 100644 services/director/tests/test_oas.py delete mode 100644 services/director/tests/test_openapi.py delete mode 100644 services/director/tests/test_openapi_schemas.py delete mode 100644 services/director/tests/test_registry_cache_task.py delete mode 100644 services/director/tests/test_utils.py create mode 100644 services/director/tests/unit/api/conftest.py create mode 100644 services/director/tests/unit/api/test_rest_health.py create mode 100644 services/director/tests/unit/api/test_rest_running_interactive_services.py create mode 100644 services/director/tests/unit/api/test_rest_service_extras.py create mode 100644 services/director/tests/unit/api/test_rest_services.py create mode 100644 services/director/tests/unit/conftest.py rename services/director/tests/{ => unit}/fixtures/dummy_service_description-v1.json (96%) create mode 100644 services/director/tests/unit/fixtures/fake_services.py create mode 100644 services/director/tests/unit/test__model_examples.py create mode 100644 services/director/tests/unit/test_cli.py create mode 100644 services/director/tests/unit/test_core_settings.py rename services/director/tests/{ => unit}/test_docker_utils.py (50%) rename services/director/tests/{ => unit}/test_producer.py (69%) rename services/director/tests/{ => unit}/test_registry_proxy.py (65%) diff --git a/.env-devel b/.env-devel index 52fb6e84bfd..54e11e3fe5b 100644 --- a/.env-devel +++ b/.env-devel @@ -70,11 +70,17 @@ DASK_TLS_CA_FILE=/home/scu/.dask/dask-crt.pem DASK_TLS_CERT=/home/scu/.dask/dask-crt.pem DASK_TLS_KEY=/home/scu/.dask/dask-key.pem +DIRECTOR_DEFAULT_MAX_MEMORY=2147483648 +DIRECTOR_DEFAULT_MAX_NANO_CPUS=1000000000 DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS='{}' DIRECTOR_HOST=director -DIRECTOR_PORT=8080 -DIRECTOR_REGISTRY_CACHING_TTL=900 +DIRECTOR_LOGLEVEL=INFO +DIRECTOR_MONITORING_ENABLED=True +DIRECTOR_PORT=8000 +DIRECTOR_PUBLISHED_HOST_NAME="127.0.0.1:9081" +DIRECTOR_REGISTRY_CACHING_TTL=00:15:00 DIRECTOR_REGISTRY_CACHING=True +DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS=null DIRECTOR_TRACING={} EFS_USER_ID=8006 @@ -186,6 +192,7 @@ REDIS_SECURE=false REDIS_USER=null REGISTRY_AUTH=True +REGISTRY_PATH="" REGISTRY_PW=adminadminadmin REGISTRY_SSL=True REGISTRY_URL=registry.osparc-master.speag.com diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index d244228a191..2a27c8c6ffa 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -37,8 +37,8 @@ on: - all env: - DEFAULT_MAX_NANO_CPUS: 10000000 - DEFAULT_MAX_MEMORY: 268435456 + # NOTE: 'COLUMNS' is a shell env var that represents the width (number of columns) + # of the terminal or command-line interface in characters. COLUMNS: 120 concurrency: @@ -189,6 +189,8 @@ jobs: - 'packages/**' - 'services/director/**' - 'services/docker-compose*' + - 'scripts/mypy/*' + - 'mypy.ini' director-v2: - 'packages/**' - 'services/director-v2/**' @@ -912,11 +914,8 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - # KEEP 3.6 Development of this service is frozen - # KEEP ubuntu 20.04, else no python 3.6 - python: [3.6] - os: [ubuntu-20.04] - docker_buildx: [v0.10.4] + python: ["3.11"] + os: [ubuntu-22.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -929,12 +928,27 @@ jobs: uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} + - name: install uv + uses: astral-sh/setup-uv@v3 + with: + version: "0.4.x" + enable-cache: false + cache-dependency-glob: "**/director/requirements/ci.txt" - name: show system version run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/director.bash install + - name: typecheck + run: ./ci/github/unit-testing/director.bash typecheck - name: test + if: ${{ !cancelled() }} run: ./ci/github/unit-testing/director.bash test + - name: upload failed tests logs + if: ${{ !cancelled() }} + uses: actions/upload-artifact@v4 + with: + name: ${{ github.job }}_docker_logs + path: ./services/director/test_failures - uses: codecov/codecov-action@v5.0.2 env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} @@ -2132,6 +2146,11 @@ jobs: python: ["3.11"] os: [ubuntu-22.04] fail-fast: false + env: + # NOTE: DIRECTOR_DEFAULT_MAX_* used for integration-tests that include `director` service + DIRECTOR_DEFAULT_MAX_MEMORY: 268435456 + DIRECTOR_DEFAULT_MAX_NANO_CPUS: 10000000 + DIRECTOR_TRACING: null steps: - uses: actions/checkout@v4 - name: setup docker buildx diff --git a/ci/github/unit-testing/director.bash b/ci/github/unit-testing/director.bash index 6098c8fb29b..a29764642ee 100755 --- a/ci/github/unit-testing/director.bash +++ b/ci/github/unit-testing/director.bash @@ -6,44 +6,29 @@ set -o pipefail # don't hide errors within pipes IFS=$'\n\t' install() { - # Replaces 'bash ci/helpers/ensure_python_pip.bash' - - echo "INFO:" "$(python --version)" "@" "$(command -v python)" - - # installs pip if not in place - python -m ensurepip - - echo "INFO:" "$(pip --version)" "@" "$(command -v pip)" - # NOTE: pip<22.0 for python 3.6 - pip3 install --upgrade \ - pip~=21.0 \ - wheel \ - setuptools - python3 -m venv .venv + make devenv # shellcheck source=/dev/null source .venv/bin/activate pushd services/director - pip3 install -r requirements/ci.txt + make install-ci popd + uv pip list } test() { # shellcheck source=/dev/null source .venv/bin/activate pushd services/director - pytest \ - --color=yes \ - --cov-append \ - --cov-config=.coveragerc \ - --cov-report=term-missing \ - --cov-report=xml \ - --cov=simcore_service_director \ - --durations=10 \ - --keep-docker-up \ - --log-date-format="%Y-%m-%d %H:%M:%S" \ - --log-format="%(asctime)s %(levelname)s %(message)s" \ - --verbose \ - tests/ + make test-ci-unit + popd +} + +typecheck() { + # shellcheck source=/dev/null + source .venv/bin/activate + uv pip install mypy + pushd services/director + make mypy popd } diff --git a/packages/aws-library/setup.py b/packages/aws-library/setup.py index 32b5ea71e93..30ef0981954 100644 --- a/packages/aws-library/setup.py +++ b/packages/aws-library/setup.py @@ -33,7 +33,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), "author": "Sylvain Anderegg (sanderegg)", "description": "Core service library for AWS APIs", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "classifiers": [ "Development Status :: 2 - Pre-Alpha", "Intended Audience :: Developers", diff --git a/packages/dask-task-models-library/setup.py b/packages/dask-task-models-library/setup.py index f6a1e1c1f45..159e9aabdc7 100644 --- a/packages/dask-task-models-library/setup.py +++ b/packages/dask-task-models-library/setup.py @@ -36,7 +36,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), "author": "Sylvain Anderegg (sanderegg)", "description": "Core service library for simcore pydantic dask task models", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "classifiers": [ "Development Status :: 2 - Pre-Alpha", "Intended Audience :: Developers", diff --git a/packages/models-library/setup.py b/packages/models-library/setup.py index f93dd240462..439d6b7fc41 100644 --- a/packages/models-library/setup.py +++ b/packages/models-library/setup.py @@ -34,7 +34,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), "author": "Sylvain Anderegg (sanderegg)", "description": "Core service library for simcore pydantic models", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "classifiers": [ "Development Status :: 2 - Pre-Alpha", "Intended Audience :: Developers", diff --git a/services/director/src/simcore_service_director/rest/generated_code/__init__.py b/packages/models-library/src/models_library/api_schemas_director/__init__.py similarity index 100% rename from services/director/src/simcore_service_director/rest/generated_code/__init__.py rename to packages/models-library/src/models_library/api_schemas_director/__init__.py diff --git a/packages/models-library/src/models_library/api_schemas_director/services.py b/packages/models-library/src/models_library/api_schemas_director/services.py new file mode 100644 index 00000000000..52578fd7a69 --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_director/services.py @@ -0,0 +1,5 @@ +from ..services_metadata_published import ServiceMetaDataPublished + + +class ServiceDataGet(ServiceMetaDataPublished): + ... diff --git a/packages/notifications-library/requirements/_test.in b/packages/notifications-library/requirements/_test.in index eafb9291680..005795b87e7 100644 --- a/packages/notifications-library/requirements/_test.in +++ b/packages/notifications-library/requirements/_test.in @@ -8,6 +8,7 @@ # --constraint _base.txt +aiodocker coverage docker faker diff --git a/packages/notifications-library/requirements/_test.txt b/packages/notifications-library/requirements/_test.txt index e802554a901..36d7bffd645 100644 --- a/packages/notifications-library/requirements/_test.txt +++ b/packages/notifications-library/requirements/_test.txt @@ -1,3 +1,17 @@ +aiodocker==0.23.0 + # via -r requirements/_test.in +aiohappyeyeballs==2.4.3 + # via aiohttp +aiohttp==3.10.10 + # via + # -c requirements/../../../requirements/constraints.txt + # aiodocker +aiosignal==1.3.1 + # via aiohttp +attrs==24.2.0 + # via + # -c requirements/_base.txt + # aiohttp certifi==2024.8.30 # via # -c requirements/../../../requirements/constraints.txt @@ -12,6 +26,10 @@ docker==7.1.0 # via -r requirements/_test.in faker==29.0.0 # via -r requirements/_test.in +frozenlist==1.5.0 + # via + # aiohttp + # aiosignal greenlet==3.1.1 # via # -c requirements/_base.txt @@ -22,8 +40,14 @@ idna==3.10 # via # -c requirements/_base.txt # requests + # yarl iniconfig==2.0.0 # via pytest +multidict==6.1.0 + # via + # -c requirements/_base.txt + # aiohttp + # yarl mypy==1.12.0 # via sqlalchemy mypy-extensions==1.0.0 @@ -101,3 +125,7 @@ urllib3==2.2.3 # -c requirements/../../../requirements/constraints.txt # docker # requests +yarl==1.12.1 + # via + # -c requirements/_base.txt + # aiohttp diff --git a/packages/notifications-library/setup.py b/packages/notifications-library/setup.py index 5488afbbcc5..2a94424c29a 100644 --- a/packages/notifications-library/setup.py +++ b/packages/notifications-library/setup.py @@ -38,7 +38,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), "author": "Pedro Crespo-Valero (pcrespov)", "description": "simcore library for user notifications e.g. emails, sms, etc", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "classifiers": [ "Development Status :: 2 - Pre-Alpha", "Intended Audience :: Developers", diff --git a/packages/pytest-simcore/setup.py b/packages/pytest-simcore/setup.py index aeeaca60b0e..3bfff70d2ee 100644 --- a/packages/pytest-simcore/setup.py +++ b/packages/pytest-simcore/setup.py @@ -17,7 +17,7 @@ "description": "pytest plugin with fixtures and test helpers for osparc-simcore repo modules", "py_modules": ["pytest_simcore"], # WARNING: this is used in frozen services as well !!!! - "python_requires": ">=3.6", + "python_requires": "~=3.11", "install_requires": ["pytest>=3.5.0"], "extras_require": { "all": [ diff --git a/packages/pytest-simcore/src/pytest_simcore/docker.py b/packages/pytest-simcore/src/pytest_simcore/docker.py index 9b0a36cb1d8..89e88484a4b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker.py @@ -1,3 +1,9 @@ +# pylint:disable=protected-access +# pylint:disable=redefined-outer-name +# pylint:disable=too-many-arguments +# pylint:disable=unused-argument +# pylint:disable=unused-variable + import asyncio import contextlib from collections.abc import AsyncIterator, Callable @@ -7,6 +13,12 @@ import pytest +@pytest.fixture +async def async_docker_client() -> AsyncIterator[aiodocker.Docker]: + async with aiodocker.Docker() as docker_client: + yield docker_client + + @contextlib.asynccontextmanager async def _pause_container( async_docker_client: aiodocker.Docker, container_name: str diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_compose.py b/packages/pytest-simcore/src/pytest_simcore/docker_compose.py index 581fcf30187..737fa7bc35b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_compose.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_compose.py @@ -45,10 +45,13 @@ def temp_folder( @pytest.fixture(scope="session") -def testing_environ_vars(env_devel_file: Path) -> EnvVarsDict: +def env_vars_for_docker_compose(env_devel_file: Path) -> EnvVarsDict: """ - Loads and extends .env-devel returning - all environment variables key=value + Loads and extends .env-devel returning all environment variables key=value + + + NOTE: that these are then env-vars used in the services started in the + integration tests! """ env_devel = dotenv_values( env_devel_file, @@ -73,16 +76,32 @@ def testing_environ_vars(env_devel_file: Path) -> EnvVarsDict: env_devel[ "AIOCACHE_DISABLE" - ] = "1" # ensure that aio-caches are disabled for testing [https://aiocache.readthedocs.io/en/latest/testing.html] + # ensure that aio-caches are disabled for testing [https://aiocache.readthedocs.io/en/latest/testing.html] + ] = "1" env_devel[ "CATALOG_BACKGROUND_TASK_REST_TIME" - ] = "1" # ensure catalog refreshes services access rights fast - + # ensure catalog refreshes services access rights fast + ] = "1" + + # TRACING + # NOTE: should go away with pydantic v2 + env_devel["TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT"] = "null" + env_devel["TRACING_OPENTELEMETRY_COLLECTOR_PORT"] = "null" + for key in env_devel: + if key.endswith("_TRACING"): + env_devel[key] = "null" + + # DIRECTOR env_devel["DIRECTOR_REGISTRY_CACHING"] = "False" + # NOTE: this will make TracingSettings fail and therefore the default factory of every *_TRACING field will be set to None + + # NOTE: DIRECTOR_DEFAULT_MAX_* used for integration-tests that include `director` service + env_devel["DIRECTOR_DEFAULT_MAX_MEMORY"] = "268435456" + env_devel["DIRECTOR_DEFAULT_MAX_NANO_CPUS"] = "10000000" + env_devel["DIRECTOR_LOGLEVEL"] = "DEBUG" + env_devel["REGISTRY_PATH"] = "127.0.0.1:5000" + env_devel.setdefault("DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS", "") - env_devel.setdefault("DIRECTOR_SELF_SIGNED_SSL_SECRET_ID", "") - env_devel.setdefault("DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME", "") - env_devel.setdefault("DIRECTOR_SELF_SIGNED_SSL_FILENAME", "") env_devel["API_SERVER_DEV_FEATURES_ENABLED"] = "1" @@ -98,9 +117,9 @@ def testing_environ_vars(env_devel_file: Path) -> EnvVarsDict: @pytest.fixture(scope="module") -def env_file_for_testing( +def env_file_for_docker_compose( temp_folder: Path, - testing_environ_vars: dict[str, str], + env_vars_for_docker_compose: dict[str, str], osparc_simcore_root_dir: Path, ) -> Iterator[Path]: """Dumps all the environment variables into an $(temp_folder)/.env.test file @@ -117,7 +136,7 @@ def env_file_for_testing( f"# Auto-generated from env_file_for_testing in {__file__}", file=fh, ) - for key, value in sorted(testing_environ_vars.items()): + for key, value in sorted(env_vars_for_docker_compose.items()): # NOTE: python-dotenv parses JSON encoded strings correctly, but # writing them back shows an issue. if the original ENV is something like MY_ENV='{"correct": "encodedjson"}' # it goes to MY_ENV={"incorrect": "encodedjson"}! @@ -146,7 +165,7 @@ def env_file_for_testing( def simcore_docker_compose( osparc_simcore_root_dir: Path, osparc_simcore_scripts_dir: Path, - env_file_for_testing: Path, + env_file_for_docker_compose: Path, temp_folder: Path, ) -> dict[str, Any]: """Resolves docker-compose for simcore stack in local host @@ -156,7 +175,7 @@ def simcore_docker_compose( COMPOSE_FILENAMES = ["docker-compose.yml", "docker-compose.local.yml"] # ensures .env at git_root_dir - assert env_file_for_testing.exists() + assert env_file_for_docker_compose.exists() # target docker compose path docker_compose_paths = [ @@ -171,7 +190,7 @@ def simcore_docker_compose( project_dir=osparc_simcore_root_dir / "services", scripts_dir=osparc_simcore_scripts_dir, docker_compose_paths=docker_compose_paths, - env_file_path=env_file_for_testing, + env_file_path=env_file_for_docker_compose, destination_path=temp_folder / "simcore_docker_compose.yml", ) @@ -180,7 +199,7 @@ def simcore_docker_compose( def ops_docker_compose( osparc_simcore_root_dir: Path, osparc_simcore_scripts_dir: Path, - env_file_for_testing: Path, + env_file_for_docker_compose: Path, temp_folder: Path, ) -> dict[str, Any]: """Filters only services in docker-compose-ops.yml and returns yaml data @@ -188,7 +207,7 @@ def ops_docker_compose( Produces same as `make .stack-ops.yml` in a temporary folder """ # ensures .env at git_root_dir, which will be used as current directory - assert env_file_for_testing.exists() + assert env_file_for_docker_compose.exists() # target docker compose path docker_compose_path = ( @@ -200,7 +219,7 @@ def ops_docker_compose( project_dir=osparc_simcore_root_dir / "services", scripts_dir=osparc_simcore_scripts_dir, docker_compose_paths=docker_compose_path, - env_file_path=env_file_for_testing, + env_file_path=env_file_for_docker_compose, destination_path=temp_folder / "ops_docker_compose.yml", ) diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py b/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py index b0c3f1e62f0..579d9b52bca 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py @@ -10,13 +10,15 @@ from collections.abc import Iterator from contextlib import suppress from pathlib import Path -from typing import Any +from typing import Any, AsyncIterator, Awaitable, Callable +import aiodocker import docker import pytest import yaml from docker.errors import APIError -from tenacity import Retrying, TryAgain, retry +from faker import Faker +from tenacity import AsyncRetrying, Retrying, TryAgain, retry from tenacity.before_sleep import before_sleep_log from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay @@ -251,12 +253,12 @@ def docker_stack( core_docker_compose_file: Path, ops_docker_compose_file: Path, keep_docker_up: bool, - testing_environ_vars: EnvVarsDict, + env_vars_for_docker_compose: EnvVarsDict, ) -> Iterator[dict]: """deploys core and ops stacks and returns as soon as all are running""" # WARNING: keep prefix "pytest-" in stack names - core_stack_name = testing_environ_vars["SWARM_STACK_NAME"] + core_stack_name = env_vars_for_docker_compose["SWARM_STACK_NAME"] ops_stack_name = "pytest-ops" assert core_stack_name @@ -390,3 +392,41 @@ async def _check_all_services_are_running(): raise _ResourceStillNotRemovedError(msg) _fetch_and_print_services(docker_client, "[AFTER REMOVED]") + + +@pytest.fixture +async def docker_network( + docker_swarm: None, + async_docker_client: aiodocker.Docker, + faker: Faker, +) -> AsyncIterator[Callable[..., Awaitable[dict[str, Any]]]]: + networks = [] + + async def _network_creator(**network_config_kwargs) -> dict[str, Any]: + network = await async_docker_client.networks.create( + config={"Name": faker.uuid4(), "Driver": "overlay"} | network_config_kwargs + ) + assert network + print(f"--> created network {network=}") + networks.append(network) + return await network.show() + + yield _network_creator + + # wait until all networks are really gone + async def _wait_for_network_deletion(network: aiodocker.docker.DockerNetwork): + network_name = (await network.show())["Name"] + await network.delete() + async for attempt in AsyncRetrying( + reraise=True, wait=wait_fixed(1), stop=stop_after_delay(60) + ): + with attempt: + print(f"<-- waiting for network '{network_name}' deletion...") + list_of_network_names = [ + n["Name"] for n in await async_docker_client.networks.list() + ] + assert network_name not in list_of_network_names + print(f"<-- network '{network_name}' deleted") + + print(f"<-- removing all networks {networks=}") + await asyncio.gather(*[_wait_for_network_deletion(network) for network in networks]) diff --git a/packages/pytest-simcore/src/pytest_simcore/environment_configs.py b/packages/pytest-simcore/src/pytest_simcore/environment_configs.py index 33a38db4c7e..0454335bf91 100644 --- a/packages/pytest-simcore/src/pytest_simcore/environment_configs.py +++ b/packages/pytest-simcore/src/pytest_simcore/environment_configs.py @@ -102,7 +102,7 @@ def docker_compose_service_environment_dict( """ service = services_docker_compose_dict["services"][service_name] - def _substitute(key, value): + def _substitute(key, value) -> tuple[str, str]: if m := re.match(r"\${([^{}:-]\w+)", value): expected_env_var = m.group(1) try: @@ -112,9 +112,10 @@ def _substitute(key, value): return key, value except KeyError: pytest.fail( - f"{expected_env_var} is not defined in {env_devel_file} but used in docker-compose services[{service}].environment[{key}]" + f"{expected_env_var} is not defined in '{env_devel_file}' but it " + f"is used as a rhs variable in the docker-compose services[{service_name}].environment[{key}]" ) - return None + return key, value envs: EnvVarsDict = {} for key, value in service.get("environment", {}).items(): diff --git a/packages/pytest-simcore/src/pytest_simcore/minio_service.py b/packages/pytest-simcore/src/pytest_simcore/minio_service.py index 46cee6fbeeb..ff7586a40de 100644 --- a/packages/pytest-simcore/src/pytest_simcore/minio_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/minio_service.py @@ -13,15 +13,15 @@ @pytest.fixture def minio_s3_settings( - docker_stack: dict, testing_environ_vars: dict, faker: Faker + docker_stack: dict, env_vars_for_docker_compose: EnvVarsDict, faker: Faker ) -> S3Settings: assert "pytest-ops_minio" in docker_stack["services"] return S3Settings( - S3_ACCESS_KEY=testing_environ_vars["S3_ACCESS_KEY"], - S3_SECRET_KEY=testing_environ_vars["S3_SECRET_KEY"], + S3_ACCESS_KEY=env_vars_for_docker_compose["S3_ACCESS_KEY"], + S3_SECRET_KEY=env_vars_for_docker_compose["S3_SECRET_KEY"], S3_ENDPOINT=f"http://{get_localhost_ip()}:{get_service_published_port('minio')}", - S3_BUCKET_NAME=testing_environ_vars["S3_BUCKET_NAME"], + S3_BUCKET_NAME=env_vars_for_docker_compose["S3_BUCKET_NAME"], S3_REGION="us-east-1", ) diff --git a/packages/pytest-simcore/src/pytest_simcore/postgres_service.py b/packages/pytest-simcore/src/pytest_simcore/postgres_service.py index 24eddd0221f..c4df0b40c3b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/postgres_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/postgres_service.py @@ -131,16 +131,18 @@ def database_from_template_before_each_function( @pytest.fixture(scope="module") -def postgres_dsn(docker_stack: dict, testing_environ_vars: dict) -> PostgresTestConfig: +def postgres_dsn( + docker_stack: dict, env_vars_for_docker_compose: EnvVarsDict +) -> PostgresTestConfig: assert "pytest-simcore_postgres" in docker_stack["services"] pg_config: PostgresTestConfig = { - "user": testing_environ_vars["POSTGRES_USER"], - "password": testing_environ_vars["POSTGRES_PASSWORD"], - "database": testing_environ_vars["POSTGRES_DB"], + "user": env_vars_for_docker_compose["POSTGRES_USER"], + "password": env_vars_for_docker_compose["POSTGRES_PASSWORD"], + "database": env_vars_for_docker_compose["POSTGRES_DB"], "host": get_localhost_ip(), "port": get_service_published_port( - "postgres", testing_environ_vars["POSTGRES_PORT"] + "postgres", env_vars_for_docker_compose["POSTGRES_PORT"] ), } diff --git a/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py b/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py index 47188400e79..938a2435283 100644 --- a/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py @@ -36,19 +36,21 @@ async def wait_till_rabbit_responsive(url: str) -> None: @pytest.fixture def rabbit_env_vars_dict( docker_stack: dict, - testing_environ_vars: dict, + env_vars_for_docker_compose: EnvVarsDict, ) -> EnvVarsDict: - prefix = testing_environ_vars["SWARM_STACK_NAME"] + prefix = env_vars_for_docker_compose["SWARM_STACK_NAME"] assert f"{prefix}_rabbit" in docker_stack["services"] - port = get_service_published_port("rabbit", testing_environ_vars["RABBIT_PORT"]) + port = get_service_published_port( + "rabbit", env_vars_for_docker_compose["RABBIT_PORT"] + ) return { - "RABBIT_USER": testing_environ_vars["RABBIT_USER"], - "RABBIT_PASSWORD": testing_environ_vars["RABBIT_PASSWORD"], + "RABBIT_USER": env_vars_for_docker_compose["RABBIT_USER"], + "RABBIT_PASSWORD": env_vars_for_docker_compose["RABBIT_PASSWORD"], "RABBIT_HOST": get_localhost_ip(), "RABBIT_PORT": f"{port}", - "RABBIT_SECURE": testing_environ_vars["RABBIT_SECURE"], + "RABBIT_SECURE": env_vars_for_docker_compose["RABBIT_SECURE"], } diff --git a/packages/pytest-simcore/src/pytest_simcore/redis_service.py b/packages/pytest-simcore/src/pytest_simcore/redis_service.py index e9c16abcda5..98cf03a595b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/redis_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/redis_service.py @@ -19,6 +19,7 @@ from .helpers.docker import get_service_published_port from .helpers.host import get_localhost_ip +from .helpers.typing_env import EnvVarsDict log = logging.getLogger(__name__) @@ -26,21 +27,21 @@ @pytest.fixture async def redis_settings( docker_stack: dict, # stack is up - testing_environ_vars: dict, + env_vars_for_docker_compose: EnvVarsDict, ) -> RedisSettings: """Returns the settings of a redis service that is up and responsive""" - prefix = testing_environ_vars["SWARM_STACK_NAME"] + prefix = env_vars_for_docker_compose["SWARM_STACK_NAME"] assert f"{prefix}_redis" in docker_stack["services"] port = get_service_published_port( - "simcore_redis", testing_environ_vars["REDIS_PORT"] + "simcore_redis", int(env_vars_for_docker_compose["REDIS_PORT"]) ) # test runner is running on the host computer settings = RedisSettings( REDIS_HOST=get_localhost_ip(), REDIS_PORT=PortInt(port), - REDIS_PASSWORD=testing_environ_vars["REDIS_PASSWORD"], + REDIS_PASSWORD=env_vars_for_docker_compose["REDIS_PASSWORD"], ) await wait_till_redis_responsive(settings.build_redis_dsn(RedisDatabase.RESOURCES)) diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_service_library_fixtures.py b/packages/pytest-simcore/src/pytest_simcore/simcore_service_library_fixtures.py index fc85ee7a690..90104625cf0 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_service_library_fixtures.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_service_library_fixtures.py @@ -1,4 +1,4 @@ -from typing import AsyncIterable +from collections.abc import AsyncIterable import pytest from servicelib.async_utils import cancel_sequential_workers diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_services.py b/packages/pytest-simcore/src/pytest_simcore/simcore_services.py index 1bdb143f418..11dd165a963 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_services.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_services.py @@ -116,11 +116,11 @@ def create(cls, service_name: str, baseurl): def services_endpoint( core_services_selection: list[str], docker_stack: dict, - testing_environ_vars: EnvVarsDict, + env_vars_for_docker_compose: EnvVarsDict, ) -> dict[str, URL]: services_endpoint = {} - stack_name = testing_environ_vars["SWARM_STACK_NAME"] + stack_name = env_vars_for_docker_compose["SWARM_STACK_NAME"] for service in core_services_selection: service = _SERVICE_NAME_REPLACEMENTS.get(service, service) assert f"{stack_name}_{service}" in docker_stack["services"] diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py index 9628d1058c9..72431180f4a 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py @@ -17,14 +17,17 @@ from .helpers.docker import get_service_published_port from .helpers.host import get_localhost_ip +from .helpers.typing_env import EnvVarsDict @pytest.fixture(scope="module") -def storage_endpoint(docker_stack: dict, testing_environ_vars: dict) -> Iterable[URL]: - prefix = testing_environ_vars["SWARM_STACK_NAME"] +def storage_endpoint( + docker_stack: dict, env_vars_for_docker_compose: EnvVarsDict +) -> Iterable[URL]: + prefix = env_vars_for_docker_compose["SWARM_STACK_NAME"] assert f"{prefix}_storage" in docker_stack["services"] - default_port = testing_environ_vars["STORAGE_ENDPOINT"].split(":")[1] + default_port = env_vars_for_docker_compose["STORAGE_ENDPOINT"].split(":")[1] endpoint = ( f"{get_localhost_ip()}:{get_service_published_port('storage', default_port)}" ) diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py b/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py index 3d5d083edfe..b885b62232f 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py @@ -10,11 +10,14 @@ from yarl import URL from .helpers.docker import get_service_published_port +from .helpers.typing_env import EnvVarsDict @pytest.fixture(scope="module") -def webserver_endpoint(docker_stack: dict, testing_environ_vars: dict) -> URL: - prefix = testing_environ_vars["SWARM_STACK_NAME"] +def webserver_endpoint( + docker_stack: dict, env_vars_for_docker_compose: EnvVarsDict +) -> URL: + prefix = env_vars_for_docker_compose["SWARM_STACK_NAME"] assert f"{prefix}_webserver" in docker_stack["services"] endpoint = f"127.0.0.1:{get_service_published_port('webserver', 8080)}" diff --git a/packages/pytest-simcore/src/pytest_simcore/traefik_service.py b/packages/pytest-simcore/src/pytest_simcore/traefik_service.py index 462dfdb29e2..a75df5aae08 100644 --- a/packages/pytest-simcore/src/pytest_simcore/traefik_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/traefik_service.py @@ -12,16 +12,17 @@ from yarl import URL from .helpers.docker import get_service_published_port +from .helpers.typing_env import EnvVarsDict @pytest.fixture(scope="module") def traefik_endpoints( - docker_stack: dict, testing_environ_vars: dict + docker_stack: dict, env_vars_for_docker_compose: EnvVarsDict ) -> tuple[URL, URL, URL]: """get the endpoint for the given simcore_service. NOTE: simcore_service defined as a parametrization """ - prefix = testing_environ_vars["SWARM_STACK_NAME"] + prefix = env_vars_for_docker_compose["SWARM_STACK_NAME"] assert f"{prefix}_traefik" in docker_stack["services"] traefik_api_endpoint = f"127.0.0.1:{get_service_published_port('traefik', 8080)}" diff --git a/packages/service-integration/setup.py b/packages/service-integration/setup.py index dac440b0f9d..4dc34180e23 100644 --- a/packages/service-integration/setup.py +++ b/packages/service-integration/setup.py @@ -64,7 +64,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "Framework :: Pytest", ], "long_description": Path(CURRENT_DIR / "README.md").read_text(), - "python_requires": ">=3.6", + "python_requires": "~=3.11", "license": "MIT license", "install_requires": INSTALL_REQUIREMENTS, "packages": find_packages(where="src"), diff --git a/packages/service-library/setup.py b/packages/service-library/setup.py index ced858bd59d..521b491b918 100644 --- a/packages/service-library/setup.py +++ b/packages/service-library/setup.py @@ -35,7 +35,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "author": "Pedro Crespo-Valero (pcrespov)", "description": "Core service library for simcore (or servicelib)", "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "install_requires": tuple(PROD_REQUIREMENTS), "packages": find_packages(where="src"), "package_data": {"": ["py.typed"]}, diff --git a/packages/service-library/src/servicelib/async_utils.py b/packages/service-library/src/servicelib/async_utils.py index 3385ad5820e..42ba951aafb 100644 --- a/packages/service-library/src/servicelib/async_utils.py +++ b/packages/service-library/src/servicelib/async_utils.py @@ -39,6 +39,7 @@ class QueueElement: output: Any | None = None +# NOTE: If you get issues with event loop already closed error use ensure_run_in_sequence_context_is_empty fixture in your tests _sequential_jobs_contexts: dict[str, Context] = {} diff --git a/packages/settings-library/src/settings_library/docker_registry.py b/packages/settings-library/src/settings_library/docker_registry.py index bb365cb9785..08ca0818b7d 100644 --- a/packages/settings-library/src/settings_library/docker_registry.py +++ b/packages/settings-library/src/settings_library/docker_registry.py @@ -10,6 +10,7 @@ class RegistrySettings(BaseCustomSettings): REGISTRY_AUTH: bool = Field(..., description="do registry authentication") REGISTRY_PATH: str | None = Field( default=None, + # This is useful in case of a local registry, where the registry url (path) is relative to the host docker engine" description="development mode only, in case a local registry is used", ) # NOTE: name is missleading, http or https protocol are not included diff --git a/packages/simcore-sdk/setup.py b/packages/simcore-sdk/setup.py index cd763f3ac3a..cb7c8f88edf 100644 --- a/packages/simcore-sdk/setup.py +++ b/packages/simcore-sdk/setup.py @@ -36,7 +36,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "packages": find_packages(where="src"), "package_data": {"": ["py.typed"]}, "package_dir": {"": "src"}, - "python_requires": ">=3.6", + "python_requires": "~=3.11", "install_requires": INSTALL_REQUIREMENTS, "tests_require": TEST_REQUIREMENTS, "extras_require": {"test": TEST_REQUIREMENTS}, diff --git a/services/agent/setup.py b/services/agent/setup.py index 22c1b7ae1d7..6745e6db31d 100755 --- a/services/agent/setup.py +++ b/services/agent/setup.py @@ -45,7 +45,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": DESCRIPTION, "long_description": README, "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/api-server/setup.py b/services/api-server/setup.py index 8acef0b60b1..ca8eeb93434 100755 --- a/services/api-server/setup.py +++ b/services/api-server/setup.py @@ -46,7 +46,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": DESCRIPTION, "long_description": README, "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/autoscaling/setup.py b/services/autoscaling/setup.py index 2cdde15f3ed..516747aa082 100755 --- a/services/autoscaling/setup.py +++ b/services/autoscaling/setup.py @@ -49,7 +49,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": DESCRIPTION, "long_description": README, "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/autoscaling/tests/unit/conftest.py b/services/autoscaling/tests/unit/conftest.py index 243ccbc429e..35446bf3f69 100644 --- a/services/autoscaling/tests/unit/conftest.py +++ b/services/autoscaling/tests/unit/conftest.py @@ -86,6 +86,7 @@ "pytest_simcore.aws_iam_service", "pytest_simcore.aws_ssm_service", "pytest_simcore.dask_scheduler", + "pytest_simcore.docker", "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", @@ -426,12 +427,6 @@ async def autoscaling_docker() -> AsyncIterator[AutoscalingDocker]: yield cast(AutoscalingDocker, docker_client) -@pytest.fixture -async def async_docker_client() -> AsyncIterator[aiodocker.Docker]: - async with aiodocker.Docker() as docker_client: - yield docker_client - - @pytest.fixture async def host_node( docker_swarm: None, diff --git a/services/catalog/setup.py b/services/catalog/setup.py index 9aae69d9a65..cd21924e00c 100644 --- a/services/catalog/setup.py +++ b/services/catalog/setup.py @@ -43,7 +43,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": "Manages and maintains a catalog of all published components (e.g. macro-algorithms, scripts, etc)", "long_description": (CURRENT_DIR / "README.md").read_text(), "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/catalog/src/simcore_service_catalog/api/rest/_services_resources.py b/services/catalog/src/simcore_service_catalog/api/rest/_services_resources.py index 701c4b41f3d..9bbca5b902f 100644 --- a/services/catalog/src/simcore_service_catalog/api/rest/_services_resources.py +++ b/services/catalog/src/simcore_service_catalog/api/rest/_services_resources.py @@ -148,7 +148,7 @@ async def _get_service_labels( # `busybox:latest` or `traefik:latest` because # the director-v0 cannot extract labels from them # and will fail validating the key or the version - if err.status_code == status.HTTP_400_BAD_REQUEST: + if err.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY: return None raise diff --git a/services/clusters-keeper/setup.py b/services/clusters-keeper/setup.py index df644386545..1caa625e8b6 100755 --- a/services/clusters-keeper/setup.py +++ b/services/clusters-keeper/setup.py @@ -46,7 +46,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": DESCRIPTION, "long_description": README, "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/dask-sidecar/setup.py b/services/dask-sidecar/setup.py index 3299eab5a6c..ed223f163a6 100644 --- a/services/dask-sidecar/setup.py +++ b/services/dask-sidecar/setup.py @@ -36,7 +36,7 @@ def read_reqs(reqs_path: Path) -> set[str]: ], "long_description": (CURRENT_DIR / "README.md").read_text(), "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/datcore-adapter/setup.py b/services/datcore-adapter/setup.py index c08158cf963..c9f49735bd0 100644 --- a/services/datcore-adapter/setup.py +++ b/services/datcore-adapter/setup.py @@ -37,7 +37,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": "Interfaces with datcore storage", "long_description": (CURRENT_DIR / "README.md").read_text(), "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/director-v2/setup.py b/services/director-v2/setup.py index d1f9ffa19ec..0f8aa66e5cc 100644 --- a/services/director-v2/setup.py +++ b/services/director-v2/setup.py @@ -47,7 +47,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": "Orchestrates the pipeline of services defined by the user", "long_description": (CURRENT_DIR / "README.md").read_text(), "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py index 4d7c348a336..4bfe998ad59 100644 --- a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py +++ b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py @@ -1,13 +1,14 @@ -# pylint:disable=redefined-outer-name -# pylint:disable=too-many-arguments -# pylint:disable=too-many-positional-arguments -# pylint:disable=unused-argument +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable import asyncio import logging from collections.abc import AsyncIterable, AsyncIterator, Awaitable, Callable from contextlib import asynccontextmanager -from typing import Any +from typing import Any, cast from unittest import mock import aiodocker @@ -46,14 +47,11 @@ "migration", "postgres", "rabbit", - "storage", "redis", + "storage", ] -pytest_simcore_ops_services_selection = [ - "adminer", - "minio", -] +pytest_simcore_ops_services_selection = ["adminer", "minio", "portainer"] @pytest.fixture() @@ -79,29 +77,32 @@ def mock_env( catalog_port = services_endpoint["catalog"].port assert catalog_port - env_vars: EnvVarsDict = { - "DYNAMIC_SIDECAR_PROMETHEUS_SERVICE_LABELS": "{}", - "TRAEFIK_SIMCORE_ZONE": "test_traefik_zone", - "SWARM_STACK_NAME": "pytest-simcore", - "DYNAMIC_SIDECAR_LOG_LEVEL": "DEBUG", - "SC_BOOT_MODE": "production", - "DYNAMIC_SIDECAR_EXPOSE_PORT": "true", - "PROXY_EXPOSE_PORT": "true", - "SIMCORE_SERVICES_NETWORK_NAME": network_name, - "DIRECTOR_V2_DYNAMIC_SCHEDULER_ENABLED": "true", - "POSTGRES_HOST": f"{get_localhost_ip()}", - "COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED": "false", - "COMPUTATIONAL_BACKEND_ENABLED": "false", - "R_CLONE_PROVIDER": "MINIO", - "DIRECTOR_V2_PROMETHEUS_INSTRUMENTATION_ENABLED": "1", - "DIRECTOR_HOST": director_host, - "DIRECTOR_PORT": f"{director_port}", - "CATALOG_HOST": catalog_host, - "CATALOG_PORT": f"{catalog_port}", - } - setenvs_from_dict(monkeypatch, env_vars) monkeypatch.delenv("DYNAMIC_SIDECAR_MOUNT_PATH_DEV", raising=False) - return mock_env | env_vars + mock_env.pop("DYNAMIC_SIDECAR_MOUNT_PATH_DEV", None) + + return mock_env | setenvs_from_dict( + monkeypatch, + { + "DYNAMIC_SIDECAR_PROMETHEUS_SERVICE_LABELS": "{}", + "TRAEFIK_SIMCORE_ZONE": "test_traefik_zone", + "SWARM_STACK_NAME": "pytest-simcore", + "DYNAMIC_SIDECAR_LOG_LEVEL": "DEBUG", + "SC_BOOT_MODE": "production", + "DYNAMIC_SIDECAR_EXPOSE_PORT": "true", + "PROXY_EXPOSE_PORT": "true", + "SIMCORE_SERVICES_NETWORK_NAME": network_name, + "DIRECTOR_V2_DYNAMIC_SCHEDULER_ENABLED": "true", + "POSTGRES_HOST": f"{get_localhost_ip()}", + "COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED": "false", + "COMPUTATIONAL_BACKEND_ENABLED": "false", + "R_CLONE_PROVIDER": "MINIO", + "DIRECTOR_V2_PROMETHEUS_INSTRUMENTATION_ENABLED": "1", + "DIRECTOR_HOST": director_host, + "DIRECTOR_PORT": f"{director_port}", + "CATALOG_HOST": catalog_host, + "CATALOG_PORT": f"{catalog_port}", + }, + ) @pytest.fixture @@ -117,17 +118,17 @@ def minimal_configuration( @pytest.fixture def uuid_legacy(faker: Faker) -> str: - return faker.uuid4() + return cast(str, faker.uuid4()) @pytest.fixture def uuid_dynamic_sidecar(faker: Faker) -> str: - return faker.uuid4() + return cast(str, faker.uuid4()) @pytest.fixture def uuid_dynamic_sidecar_compose(faker: Faker) -> str: - return faker.uuid4() + return cast(str, faker.uuid4()) @pytest.fixture diff --git a/services/director-v2/tests/integration/02/utils.py b/services/director-v2/tests/integration/02/utils.py index 0c5f10c07bd..ff90ac59488 100644 --- a/services/director-v2/tests/integration/02/utils.py +++ b/services/director-v2/tests/integration/02/utils.py @@ -425,7 +425,7 @@ async def assert_all_services_running( ) ) - assert all(x == "running" for x in service_states) + assert all(state == "running" for state in service_states) print("--> all services are up and running!") diff --git a/services/director/Dockerfile b/services/director/Dockerfile index 3449ba3db80..247188f6360 100644 --- a/services/director/Dockerfile +++ b/services/director/Dockerfile @@ -1,6 +1,12 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.6.10" -FROM python:${PYTHON_VERSION}-slim-buster AS base + +# Define arguments in the global scope +ARG PYTHON_VERSION="3.11.9" +ARG UV_VERSION="0.4" +FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build +# we docker image is built based on debian +FROM python:${PYTHON_VERSION}-slim-bookworm AS base + # # USAGE: # cd sercices/director @@ -14,8 +20,8 @@ LABEL maintainer=sanderegg # for docker apt caching to work this needs to be added: [https://vsupalov.com/buildkit-cache-mount-dockerfile/] RUN rm -f /etc/apt/apt.conf.d/docker-clean && \ echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache -RUN --mount=type=cache,id=basecache36,target=/var/cache/apt,mode=0755,sharing=locked \ - --mount=type=cache,id=baseapt36,target=/var/lib/apt,mode=0755,sharing=locked \ +RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ + --mount=type=cache,target=/var/lib/apt,mode=0755,sharing=private \ set -eux && \ apt-get update && \ apt-get install -y --no-install-recommends \ @@ -44,22 +50,13 @@ ENV LANG=C.UTF-8 # Turns off writing .pyc files; superfluous on an ephemeral container. ENV PYTHONDONTWRITEBYTECODE=1 \ VIRTUAL_ENV=/home/scu/.venv + # Ensures that the python and pip executables used # in the image will be those from our virtualenv. ENV PATH="${VIRTUAL_ENV}/bin:$PATH" -# environment variables -ENV REGISTRY_AUTH='' \ - REGISTRY_USER='' \ - REGISTRY_PW='' \ - REGISTRY_URL='' \ - REGISTRY_VERSION='v2' \ - PUBLISHED_HOST_NAME='' \ - SIMCORE_SERVICES_NETWORK_NAME='' \ - EXTRA_HOSTS_SUFFIX='undefined' - - -EXPOSE 8080 +EXPOSE 8000 +EXPOSE 3000 # -------------------------- Build stage ------------------- # Installs build/package management tools and third party dependencies @@ -71,36 +68,26 @@ FROM base AS build ENV SC_BUILD_TARGET=build -RUN --mount=type=cache,id=buildbasecache36,target=/var/cache/apt,mode=0755,sharing=locked \ - --mount=type=cache,id=buildbaseapt36,target=/var/lib/apt,mode=0755,sharing=locked \ +RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ + --mount=type=cache,target=/var/lib/apt,mode=0755,sharing=private \ set -eux \ && apt-get update \ && apt-get install -y --no-install-recommends \ - build-essential \ - git + build-essential +# install UV https://docs.astral.sh/uv/guides/integration/docker/#installing-uv +COPY --from=uv_build /uv /uvx /bin/ -# NOTE: python virtualenv is used here such that installed packages may be moved to production image easily by copying the venv -RUN python -m venv "${VIRTUAL_ENV}" +# NOTE: python virtualenv is used here such that installed +# packages may be moved to production image easily by copying the venv +RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,id=pip36,mode=0755,target=/root/.cache/pip \ - pip install --upgrade \ - pip~=21.3 \ +RUN --mount=type=cache,target=/root/.cache/uv \ + uv pip install --upgrade \ wheel \ setuptools -# install base 3rd party dependencies (NOTE: this speeds up devel mode) -RUN \ - --mount=type=bind,source=packages,target=/build/packages,rw \ - --mount=type=bind,source=services/director,target=/build/services/director,rw \ - pip install \ - -r /build/services/director/requirements/_base.txt - -# FIXME: -# necessary to prevent duplicated files. -# Will be removed when director is refactored using cookiecutter as this will not be necessary anymore -COPY --chown=scu:scu api/specs/director/schemas/node-meta-v0.0.1.json \ - /build/services/director/src/simcore_service_director/api/v0/oas-parts/schemas/node-meta-v0.0.1.json +WORKDIR /build # --------------------------Prod-depends-only stage ------------------- # This stage is for production only dependencies that get partially wiped out afterwards (final docker image concerns) @@ -110,12 +97,18 @@ COPY --chown=scu:scu api/specs/director/schemas/node-meta-v0.0.1.json \ # FROM build AS prod-only-deps -WORKDIR /build/services/director ENV SC_BUILD_TARGET=prod-only-deps + +WORKDIR /build/services/director + RUN \ --mount=type=bind,source=packages,target=/build/packages,rw \ --mount=type=bind,source=services/director,target=/build/services/director,rw \ - pip install -r requirements/prod.txt + --mount=type=cache,target=/root/.cache/uv \ + uv pip sync \ + requirements/prod.txt \ + && uv pip list + # --------------------------Production stage ------------------- # Final cleanup up to reduce image size and startup setup @@ -128,25 +121,32 @@ FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production + ENV PYTHONOPTIMIZE=TRUE +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu - # ensure home folder is read/writable for user scu RUN chown -R scu /home/scu -# bring installed package without build tools -COPY --from=prod-only-deps --chown=scu:scu ${VIRTUAL_ENV} ${VIRTUAL_ENV} + +# Starting from clean base image, copies pre-installed virtualenv from prod-only-deps +COPY --chown=scu:scu --from=prod-only-deps ${VIRTUAL_ENV} ${VIRTUAL_ENV} + +# Copies booting scripts COPY --chown=scu:scu services/director/docker services/director/docker RUN chmod +x services/director/docker/*.sh -HEALTHCHECK --interval=30s \ - --timeout=120s \ +HEALTHCHECK --interval=10s \ + --timeout=5s \ --start-period=30s \ - --retries=3 \ - CMD ["python3", "/home/scu/services/director/docker/healthcheck.py", "http://localhost:8080/v0/"] -ENTRYPOINT [ "services/director/docker/entrypoint.sh" ] -CMD ["services/director/docker/boot.sh"] + --start-interval=1s \ + --retries=5 \ + CMD ["python3", "/home/scu/services/director/docker/healthcheck.py", "http://localhost:8000/v0/"] + +ENTRYPOINT [ "/bin/sh", "services/director/docker/entrypoint.sh" ] +CMD ["/bin/sh", "services/director/docker/boot.sh"] # --------------------------Development stage ------------------- @@ -159,9 +159,12 @@ CMD ["services/director/docker/boot.sh"] # FROM build AS development -ENV SC_BUILD_TARGET=development -ENV NODE_SCHEMA_LOCATION=../../../api/specs/director/schemas/node-meta-v0.0.1.json +ENV SC_BUILD_TARGET=development \ + SC_DEVEL_MOUNT=/devel/services/director + WORKDIR /devel + RUN chown -R scu:scu "${VIRTUAL_ENV}" + ENTRYPOINT [ "/bin/sh", "services/director/docker/entrypoint.sh" ] CMD ["/bin/sh", "services/director/docker/boot.sh"] diff --git a/services/director/Makefile b/services/director/Makefile index 0e91426d6d2..140d05c72d0 100644 --- a/services/director/Makefile +++ b/services/director/Makefile @@ -3,13 +3,3 @@ # include ../../scripts/common.Makefile include ../../scripts/common-service.Makefile - - -_check_python_version: - # Checking that runs with correct python version - @python3 -c "import sys; current_version=[int(d) for d in '3.6'.split('.')]; assert sys.version_info[:2]==tuple(current_version[:2]), f'Expected python $(EXPECTED_PYTHON_VERSION), got {sys.version_info}'" - - -.PHONY: openapi-specs -openapi-specs: ## updates and validates openapi specifications - $(MAKE) -C $(CURDIR)/src/simcore_service_${APP_NAME}/api $@ diff --git a/services/director/README.md b/services/director/README.md index 21146025f16..d919b4f4e98 100644 --- a/services/director/README.md +++ b/services/director/README.md @@ -1,86 +1 @@ # director - -[![Docker Pulls](https://img.shields.io/docker/pulls/itisfoundation/director.svg)](https://hub.docker.com/r/itisfoundation/director/tags) -[![](https://images.microbadger.com/badges/image/itisfoundation/director.svg)](https://microbadger.com/images/itisfoundation/director "More on service image in registry") -[![](https://images.microbadger.com/badges/version/itisfoundation/director.svg)](https://microbadger.com/images/itisfoundation/director "More on service image in registry") -[![](https://images.microbadger.com/badges/commit/itisfoundation/director.svg)](https://microbadger.com/images/itisfoundation/director "More on service image in registry") - - -## Usage - -```bash - # go to director folder - cd /services/director - # install - pip install . - # start director - simcore-service-director - # or - python -m simcore_service_director -``` - -## Development - -```bash - # go to director folder - cd /services/director - # install with symlinks - pip install -r requirements-dev.txt -``` - -The director implements a REST API defined in __/src/simcore_service_director/api/v1/openapi.yaml__. -First extend the API and validate the API before implementing any new route. - -## Current status - -End validation of the requests/responses is missing as some issues arose with using the openapi-core library. It seems it is not happy with referencing a json schema file. An issue was filed to see if something may be done quickly [github](https://github.com/p1c2u/openapi-core/issues/90). - -## docker - -- Uses multi-stage dockerfile to extend a common stack of layers into production or development images -- Main difference between development and production stages is whether the code gets copied or not inside of the image -- Development stage is set first to avoid re-building when files are changed -- ``boot.sh`` is necessary to activate the virtual environment inside of the docker - -```bash - - # development image - docker build --target development -t director:dev . - docker run -v %DIRECTOR_SRC_CODE:/home/scu/src director:dev - - # production image - docker build -t director:prod . - # or - docker build --target production -t director:prod . - docker run director:prod - -``` - -### local testing - -Using the main Makefile of the oSparc platform allows for testing the director: - -```bash - # go to root folder - make build-devel - # switch the docker swarm on in development mode - make up-devel -``` - -Then open [director-swagger-ui](http://localhost:8080/apidoc/) to see the director API and try out the different routes. - -## code generation from REST API "server side" - -Execute the following script for generating the necessary code server side - -```bash -./codegen.sh -``` - -NOTE: Issue #3 must still be taken care of manually! - -### Issues - -1. SwaggerRouter must be created with __version_ui__ set to 3 or the swagger ui must be access with ?version=3 -2. SwaggerRouter.include needs to have the argument __basePath__ filled to serve the API at the right location (ndlr /v1) [Github bug entry](https://github.com/aamalev/aiohttp_apiset/issues/45) -3. The generated models need to be manually corrected when the properties are __nullable__ as the code generator does add a check for __None__ value that triggers a ValueError exception even though the value is allowed to be null [Python server models generation issue with __nullable: true__ on GitHub](https://github.com/OpenAPITools/openapi-generator/issues/579) diff --git a/services/director/VERSION b/services/director/VERSION index 6e8bf73aa55..3eefcb9dd5b 100644 --- a/services/director/VERSION +++ b/services/director/VERSION @@ -1 +1 @@ -0.1.0 +1.0.0 diff --git a/services/director/codegen.sh b/services/director/codegen.sh deleted file mode 100755 index bd5b6600cf6..00000000000 --- a/services/director/codegen.sh +++ /dev/null @@ -1,157 +0,0 @@ -#!/bin/bash -# define the input specification file and the output directory -# typical structure: -# /src/package-name/.openapi/v1/package_api.yaml -- this is the input file -# /src/package-name/rest/generated_code -- this is the output directory -SOURCE_DIR=./src/simcore_service_director -API_VERSION=v0 -INPUT_SPEC=${SOURCE_DIR}/api/${API_VERSION}/openapi.yaml -OUTPUT_DIR=${SOURCE_DIR}/rest -OUTPUT_DIR_GEN=${SOURCE_DIR}/rest/generated_code -INIT_FILE_PATH=${OUTPUT_DIR}/__init__.py -HANDLERS_FILE_PATH=${OUTPUT_DIR}/handlers.py -ROUTING_FILE_PATH=${OUTPUT_DIR_GEN}/routing.py - -# create the folder for the output -mkdir -p $OUTPUT_DIR -# generate the python server models code -ABSOLUTE_INPUT_PATH=$(realpath "${INPUT_SPEC}") -ABSOLUTE_OUTPUT_DIR=$(realpath "${OUTPUT_DIR}") -ABSOLUTE_OUTPUT_DIR_GEN=$(realpath "${OUTPUT_DIR_GEN}") -../../scripts/openapi/openapi_python_server_codegen.sh -i ${ABSOLUTE_INPUT_PATH} -o ${ABSOLUTE_OUTPUT_DIR_GEN} -# replace import entries in python code -find ${OUTPUT_DIR_GEN}/models -type f -exec sed -i 's/openapi_server.models././g' {} \; -find ${OUTPUT_DIR_GEN}/models -type f -exec sed -i 's/openapi_server/../g' {} \; -find ${OUTPUT_DIR_GEN} -maxdepth 1 -type f -exec sed -i 's/openapi_server/./g' {} \; -# create __init__.py if always -cat > "${INIT_FILE_PATH}" << EOF -"""GENERATED CODE from codegen.sh -It is advisable to not modify this code if possible. -This will be overriden next time the code generator is called. -""" -from .generated_code import ( - models, - util, - routing -) -EOF - -# only generate stub if necessary -if [ ! -e "${HANDLERS_FILE_PATH}" ]; then - cat > "${HANDLERS_FILE_PATH}" << EOF -"""This is a generated stub of handlers to be connected to the paths defined in the API - -""" -import logging - -from aiohttp import web_exceptions - -log = logging.getLogger(__name__) - -# This module shall contain the handlers of the API (implementation side of the openapi server side). -# Each operation is typically defined as -# async def root_get(request): -# return "hello API world" - -# The API shall define a path where the entry operationId: -# operationId: root_get -EOF -fi - -# always generate routing -cat > "${ROUTING_FILE_PATH}" << EOF -"""GENERATED CODE from codegen.sh -It is advisable to not modify this code if possible. -This will be overriden next time the code generator is called. - -use create_web_app to initialise the web application using the specification file. -The base folder is the root of the package. -""" - - -import logging -from pathlib import Path - -from aiohttp import hdrs, web -from aiohttp_apiset import SwaggerRouter -from aiohttp_apiset.exceptions import ValidationError -from aiohttp_apiset.middlewares import Jsonify, jsonify -from aiohttp_apiset.swagger.loader import ExtendedSchemaFile -from aiohttp_apiset.swagger.operations import OperationIdMapping - -from .. import handlers -from .models.base_model_ import Model - -log = logging.getLogger(__name__) - -@web.middleware -async def __handle_errors(request, handler): - try: - log.debug("error middleware handling request %s to handler %s", request, handler) - response = await handler(request) - return response - except ValidationError as ex: - # aiohttp apiset errors - log.exception("error happened in handling route") - error = dict(status=ex.status, message=ex.to_tree()) - error_enveloped = dict(error=error) - return web.json_response(error_enveloped, status=ex.status) - except web.HTTPError as ex: - log.exception("error happened in handling route") - error = dict(status=ex.status, message=str(ex.reason)) - error_enveloped = dict(data=error) - return web.json_response(error_enveloped, status=ex.status) - - -def create_web_app(base_folder, spec_file, additional_middlewares = None): - # create the default mapping of the operationId to the implementation code in handlers - opmap = __create_default_operation_mapping(Path(base_folder / spec_file)) - - # generate a version 3 of the API documentation - router = SwaggerRouter( - swagger_ui='/apidoc/', - version_ui=3, # forces the use of version 3 by default - search_dirs=[base_folder], - default_validate=True, - ) - - # add automatic jsonification of the models located in generated code - jsonify.singleton = Jsonify(indent=3, ensure_ascii=False) - jsonify.singleton.add_converter(Model, lambda o: o.to_dict(), score=0) - - middlewares = [jsonify, __handle_errors] - if additional_middlewares: - middlewares.extend(additional_middlewares) - # create the web application using the API - app = web.Application( - router=router, - middlewares=middlewares, - ) - router.set_cors(app, domains='*', headers=( - (hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, hdrs.AUTHORIZATION), - )) - - # Include our specifications in a router, - # is now available in the swagger-ui to the address http://localhost:8080/swagger/?spec=v1 - router.include( - spec=Path(base_folder / spec_file), - operationId_mapping=opmap, - name='v0', # name to access in swagger-ui, - basePath="/v0" # BUG: in apiset with openapi 3.0.0 [Github bug entry](https://github.com/aamalev/aiohttp_apiset/issues/45) - ) - - return app - -def __create_default_operation_mapping(specs_file): - operation_mapping = {} - yaml_specs = ExtendedSchemaFile(specs_file) - paths = yaml_specs['paths'] - for path in paths.items(): - for method in path[1].items(): # can be get, post, patch, put, delete... - op_str = "operationId" - if op_str not in method[1]: - raise Exception("The API %s does not contain the operationId tag for route %s %s" % (specs_file, path[0], method[0])) - operation_id = method[1][op_str] - operation_mapping[operation_id] = getattr(handlers, operation_id) - return OperationIdMapping(**operation_mapping) -EOF diff --git a/services/director/docker/boot.sh b/services/director/docker/boot.sh index 2a77aa40daa..eba9085247c 100755 --- a/services/director/docker/boot.sh +++ b/services/director/docker/boot.sh @@ -6,32 +6,56 @@ IFS=$(printf '\n\t') INFO="INFO: [$(basename "$0")] " -# BOOTING application --------------------------------------------- echo "$INFO" "Booting in ${SC_BOOT_MODE} mode ..." -echo " User :$(id "$(whoami)")" -echo " Workdir :$(pwd)" +echo "$INFO" "User :$(id "$(whoami)")" +echo "$INFO" "Workdir : $(pwd)" +# +# DEVELOPMENT MODE +# +# - prints environ info +# - installs requirements in mounted volume +# if [ "${SC_BUILD_TARGET}" = "development" ]; then echo "$INFO" "Environment :" printenv | sed 's/=/: /' | sed 's/^/ /' | sort echo "$INFO" "Python :" python --version | sed 's/^/ /' command -v python | sed 's/^/ /' - cd services/director || exit 1 - # speedup for legacy service with all essential dependencies pinned - # in this case `--no-deps` does the trick, for details see link - # https://stackoverflow.com/a/65793484/2855718 - pip install --no-cache-dir --no-deps -r requirements/dev.txt - cd - || exit 1 - echo "$INFO" "PIP :" - pip list | sed 's/^/ /' + + cd services/director + uv pip --quiet --no-cache-dir sync requirements/dev.txt + cd - + uv pip list +fi + +if [ "${SC_BOOT_MODE}" = "debug" ]; then + # NOTE: production does NOT pre-installs debugpy + uv pip install --no-cache-dir debugpy fi -# RUNNING application ---------------------------------------- -if [ "${SC_BOOT_MODE}" = "debug-ptvsd" ]; then - watchmedo auto-restart --recursive --pattern="*.py;*/src/*" --ignore-patterns="*test*;pytest_simcore/*;setup.py;*ignore*" --ignore-directories -- \ - python3 -m ptvsd --host 0.0.0.0 --port 3000 -m \ - simcore_service_director --loglevel="${LOGLEVEL}" +# +# RUNNING application +# + +APP_LOG_LEVEL=${DIRECTOR_LOGLEVEL:-${LOG_LEVEL:-${LOGLEVEL:-INFO}}} +SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') +echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" + +if [ "${SC_BOOT_MODE}" = "debug" ]; then + reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) + + exec sh -c " + cd services/director/src/simcore_service_director && \ + python -m debugpy --listen 0.0.0.0:${DIRECTOR_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + --host 0.0.0.0 \ + --reload \ + $reload_dir_packages + --reload-dir . \ + --log-level \"${SERVER_LOG_LEVEL}\" + " else - exec simcore-service-director --loglevel="${LOGLEVEL}" + exec uvicorn simcore_service_director.main:the_app \ + --host 0.0.0.0 \ + --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/director/docker/entrypoint.sh b/services/director/docker/entrypoint.sh index 232da22ba7e..ad982fd8d5c 100755 --- a/services/director/docker/entrypoint.sh +++ b/services/director/docker/entrypoint.sh @@ -1,4 +1,9 @@ #!/bin/sh +# +# - Executes *inside* of the container upon start as --user [default root] +# - Notice that the container *starts* as --user [default root] but +# *runs* as non-root user [scu] +# set -o errexit set -o nounset @@ -10,86 +15,75 @@ ERROR="ERROR: [$(basename "$0")] " # Read self-signed SSH certificates (if applicable) # -# In case the director must access a docker registry in a secure way using +# In case clusters-keeper must access a docker registry in a secure way using # non-standard certificates (e.g. such as self-signed certificates), this call is needed. -# It needs to be executed as root. +# It needs to be executed as root. Also required to any access for example to secure rabbitmq. update-ca-certificates -# This entrypoint script: -# -# - Executes *inside* of the container upon start as --user [default root] -# - Notice that the container *starts* as --user [default root] but -# *runs* as non-root user [scu] -# echo "$INFO" "Entrypoint for stage ${SC_BUILD_TARGET} ..." -echo "$INFO" "User :$(id "$(whoami)")" -echo "$INFO" "Workdir :$(pwd)" -echo scuUser :"$(id scu)" - -if [ "${SC_BUILD_TARGET}" = "development" ] -then - # NOTE: expects docker run ... -v $(pwd):/devel/services/director - DEVEL_MOUNT=/devel/services/director +echo "$INFO" "User :$(id "$(whoami)")" +echo "$INFO" "Workdir : $(pwd)" +echo "$INFO" "User : $(id scu)" +echo "$INFO" "python : $(command -v python)" +echo "$INFO" "pip : $(command -v pip)" - stat $DEVEL_MOUNT > /dev/null 2>&1 || \ - (echo "$ERROR" "You must mount '$DEVEL_MOUNT' to deduce user and group ids" && exit 1) # FIXME: exit does not stop script +# +# DEVELOPMENT MODE +# - expects docker run ... -v $(pwd):$SC_DEVEL_MOUNT +# - mounts source folders +# - deduces host's uid/gip and assigns to user within docker +# +if [ "${SC_BUILD_TARGET}" = "development" ]; then + echo "$INFO" "development mode detected..." + stat "${SC_DEVEL_MOUNT}" >/dev/null 2>&1 || + (echo "$ERROR" "You must mount '$SC_DEVEL_MOUNT' to deduce user and group ids" && exit 1) - echo "setting correct user id/group id..." - HOST_USERID=$(stat --format=%u "${DEVEL_MOUNT}") - HOST_GROUPID=$(stat --format=%g "${DEVEL_MOUNT}") - CONT_GROUPNAME=$(getent group "${HOST_GROUPID}" | cut --delimiter=: --fields=1) - if [ "$HOST_USERID" -eq 0 ] - then - echo "Warning: Folder mounted owned by root user... adding $SC_USER_NAME to root..." - adduser "$SC_USER_NAME" root + echo "$INFO" "setting correct user id/group id..." + HOST_USERID=$(stat --format=%u "${SC_DEVEL_MOUNT}") + HOST_GROUPID=$(stat --format=%g "${SC_DEVEL_MOUNT}") + CONT_GROUPNAME=$(getent group "${HOST_GROUPID}" | cut --delimiter=: --fields=1) + if [ "$HOST_USERID" -eq 0 ]; then + echo "$WARNING" "Folder mounted owned by root user... adding $SC_USER_NAME to root..." + adduser "$SC_USER_NAME" root + else + echo "$INFO" "Folder mounted owned by user $HOST_USERID:$HOST_GROUPID-'$CONT_GROUPNAME'..." + # take host's credentials in $SC_USER_NAME + if [ -z "$CONT_GROUPNAME" ]; then + echo "$WARNING" "Creating new group grp$SC_USER_NAME" + CONT_GROUPNAME=grp$SC_USER_NAME + addgroup --gid "$HOST_GROUPID" "$CONT_GROUPNAME" else - echo "Folder mounted owned by user $HOST_USERID:$HOST_GROUPID-'$CONT_GROUPNAME'..." - # take host's credentials in $SC_USER_NAME - if [ -z "$CONT_GROUPNAME" ] - then - echo "Creating new group my$SC_USER_NAME" - CONT_GROUPNAME=my$SC_USER_NAME - addgroup --gid "$HOST_GROUPID" "$CONT_GROUPNAME" - else - echo "group already exists" - fi - echo "adding $SC_USER_NAME to group $CONT_GROUPNAME..." - adduser "$SC_USER_NAME" "$CONT_GROUPNAME" - - echo "changing $SC_USER_NAME:$SC_USER_NAME ($SC_USER_ID:$SC_USER_ID) to $SC_USER_NAME:$CONT_GROUPNAME ($HOST_USERID:$HOST_GROUPID)" - usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" - - echo "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around - echo "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + echo "$INFO" "group already exists" fi -fi + echo "$INFO" "Adding $SC_USER_NAME to group $CONT_GROUPNAME..." + adduser "$SC_USER_NAME" "$CONT_GROUPNAME" + echo "$WARNING" "Changing ownership [this could take some time]" + echo "$INFO" "Changing $SC_USER_NAME:$SC_USER_NAME ($SC_USER_ID:$SC_USER_ID) to $SC_USER_NAME:$CONT_GROUPNAME ($HOST_USERID:$HOST_GROUPID)" + usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" -if [ "${SC_BOOT_MODE}" = "debug-ptvsd" ] -then - # NOTE: production does NOT pre-installs ptvsd - python3 -m pip install ptvsd + echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" + find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; + # change user property of files already around + echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" + find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fi fi # Appends docker group if socket is mounted DOCKER_MOUNT=/var/run/docker.sock -if stat $DOCKER_MOUNT > /dev/null 2>&1 -then - echo "$INFO detected docker socket is mounted, adding user to group..." - GROUPID=$(stat --format=%g $DOCKER_MOUNT) - GROUPNAME=scdocker +if stat $DOCKER_MOUNT >/dev/null 2>&1; then + echo "$INFO detected docker socket is mounted, adding user to group..." + GROUPID=$(stat --format=%g $DOCKER_MOUNT) + GROUPNAME=scdocker - if ! addgroup --gid "$GROUPID" $GROUPNAME > /dev/null 2>&1 - then - echo "$WARNING docker group with $GROUPID already exists, getting group name..." - # if group already exists in container, then reuse name - GROUPNAME=$(getent group "${GROUPID}" | cut --delimiter=: --fields=1) - echo "$WARNING docker group with $GROUPID has name $GROUPNAME" - fi - adduser "$SC_USER_NAME" "$GROUPNAME" + if ! addgroup --gid "$GROUPID" $GROUPNAME >/dev/null 2>&1; then + echo "$WARNING docker group with $GROUPID already exists, getting group name..." + # if group already exists in container, then reuse name + GROUPNAME=$(getent group "${GROUPID}" | cut --delimiter=: --fields=1) + echo "$WARNING docker group with $GROUPID has name $GROUPNAME" + fi + adduser "$SC_USER_NAME" "$GROUPNAME" fi echo "$INFO Starting $* ..." diff --git a/services/director/docker/healthcheck.py b/services/director/docker/healthcheck.py old mode 100644 new mode 100755 index b3a1e7e8cad..10e58d00e21 --- a/services/director/docker/healthcheck.py +++ b/services/director/docker/healthcheck.py @@ -8,7 +8,7 @@ --timeout=30s \ --start-period=1s \ --retries=3 \ - CMD python3 docker/healthcheck.py http://localhost:8080/v0/ + CMD python3 docker/healthcheck.py http://localhost:8000/ ``` Q&A: @@ -18,23 +18,24 @@ import os import sys +from contextlib import suppress from urllib.request import urlopen -SUCCESS, UNHEALTHY = 0, 1 +# Disabled if boots with debugger (e.g. debug, pdb-debug, debug-ptvsd, etc) +SC_BOOT_MODE = os.environ.get("SC_BOOT_MODE", "") -# Disabled if boots with debugger -ok = os.environ.get("SC_BOOT_MODE") == "debug" +# Adds a base-path if defined in environ +SIMCORE_NODE_BASEPATH = os.environ.get("SIMCORE_NODE_BASEPATH", "") -# Queries host -# pylint: disable=consider-using-with -ok = ( - ok - or urlopen( - "{host}{baseurl}".format( - host=sys.argv[1], baseurl=os.environ.get("SIMCORE_NODE_BASEPATH", "") - ) # adds a base-path if defined in environ - ).getcode() - == 200 -) -sys.exit(SUCCESS if ok else UNHEALTHY) +def is_service_healthy() -> bool: + if "debug" in SC_BOOT_MODE.lower(): + return True + + with suppress(Exception): + with urlopen(f"{sys.argv[1]}{SIMCORE_NODE_BASEPATH}") as f: + return f.getcode() == 200 + return False + + +sys.exit(os.EX_OK if is_service_healthy() else os.EX_UNAVAILABLE) diff --git a/services/director/requirements/Makefile b/services/director/requirements/Makefile index 7aacec9e5ee..3f25442b790 100644 --- a/services/director/requirements/Makefile +++ b/services/director/requirements/Makefile @@ -4,10 +4,3 @@ include ../../../requirements/base.Makefile # Add here any extra explicit dependency: e.g. _migration.txt: _base.txt - - -_test.txt: _base.txt _test.in - ## NOTE: this recipe override has to be removed - ## to execute target upgrades e.g. due to vulnerability of - ## a library. - @echo INFO: test.txt is frozen. Skipping upgrade. diff --git a/services/director/requirements/_base.in b/services/director/requirements/_base.in index 0618d6c7759..468bb684525 100644 --- a/services/director/requirements/_base.in +++ b/services/director/requirements/_base.in @@ -1,70 +1,19 @@ # -# Specifies third-party dependencies for 'director' +# Specifies third-party dependencies for 'services/web/server/src' # +--constraint ../../../requirements/constraints.txt -# IMPORTANT: All requirements (including the packages in this repository) as FROZEN to those in itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0 -# - current service is going to be replaced by director-v2 -# -# - -# This list was obtained as follows -# -# $ docker pull itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0 -# master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0: Pulling from itisfoundation/director -# Digest: sha256:84ba999ca348bf9d56d9ef0af2e3494ede0cd06d357d289e2a09a4191e7a56d3 -# Status: Image is up to date for itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0 -# docker.io/itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0 -# -# $ docker inspect itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0| jq '.[0] | .RepoTags, .ContainerConfig.Labels' -# [ -# "itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0" -# ] -# { -# "io.osparc.api-version": "0.1.0", -# "maintainer": "sanderegg", -# "org.label-schema.build-date": "2020-11-05T14:02:31Z", -# "org.label-schema.schema-version": "1.0", -# "org.label-schema.vcs-ref": "c8669fb", -# "org.label-schema.vcs-url": "https://github.com/ITISFoundation/osparc-simcore.git" -# } -# -# $ docker run -it itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0 pip freeze -# +# intra-repo required dependencies +--requirement ../../../packages/models-library/requirements/_base.in +--requirement ../../../packages/settings-library/requirements/_base.in +# service-library[fastapi] +--requirement ../../../packages/service-library/requirements/_base.in +--requirement ../../../packages/service-library/requirements/_fastapi.in -aiodebug==1.1.2 -aiodocker==0.14.0 -aiohttp==3.3.2 -aiohttp-apiset @ git+https://github.com/ITISFoundation/aiohttp_apiset.git@5c8a61ceb6de7ed9e09db5b4609b458a0d3773df -aiopg==1.0.0 -aiozipkin==0.7.1 -async-generator==1.10 -async-timeout==3.0.1 -asyncio-extras==1.3.2 -attrs==20.2.0 -certifi==2019.3.9 -chardet==3.0.4 -dataclasses==0.7 -idna==2.8 -idna-ssl==1.1.0 -isodate==0.6.0 -jsonschema==2.6.0 -lazy-object-proxy==1.4.3 -multidict==4.5.2 -openapi-core==0.12.0 -openapi-spec-validator==0.2.9 -prometheus-client==0.8.0 -psycopg2-binary==2.8.6 -pydantic==1.7.2 -PyYAML==5.4 # CVE-2020-1747 -requests==2.27.1 # -simcore-service-library @ git+https://github.com/ITISFoundation/osparc-simcore.git@c8669fb52659b684514fefa4f3b4599f57f276a0#egg=simcore-service-library&subdirectory=packages/service-library -six==1.12.0 -SQLAlchemy==1.3.20 -strict-rfc3339==0.7 -tenacity==6.0.0 -trafaret==2.1.0 -ujson==4.0.1 -urllib3==1.26.5 # CVE-2021-33503 -Werkzeug==1.0.1 -yarl==1.3.0 +aiocache +aiodocker +fastapi[all] +httpx +prometheus-client +pydantic diff --git a/services/director/requirements/_base.txt b/services/director/requirements/_base.txt index 2c6e016526f..f88e7c85550 100644 --- a/services/director/requirements/_base.txt +++ b/services/director/requirements/_base.txt @@ -1,152 +1,397 @@ -aiodebug==1.1.2 +aio-pika==9.4.3 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiocache==0.12.3 # via + # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in - # simcore-service-library -aiodocker==0.14.0 - # via -r requirements/_base.in -aiohttp==3.3.2 +aiodebug==2.3.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiodocker==0.23.0 # via + # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in - # aiodocker - # aiohttp-apiset - # aiozipkin - # simcore-service-library -aiohttp-apiset @ git+https://github.com/ITISFoundation/aiohttp_apiset.git@5c8a61ceb6de7ed9e09db5b4609b458a0d3773df - # via -r requirements/_base.in -aiopg==1.0.0 - # via - # -r requirements/_base.in - # simcore-service-library -aiozipkin==0.7.1 +aiofiles==24.1.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiohappyeyeballs==2.4.3 + # via aiohttp +aiohttp==3.11.1 # via - # -r requirements/_base.in - # simcore-service-library -async-generator==1.10 + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # aiodocker +aiormq==6.8.1 + # via aio-pika +aiosignal==1.3.1 + # via aiohttp +anyio==4.6.2.post1 # via - # -r requirements/_base.in - # asyncio-extras -async-timeout==3.0.1 + # fast-depends + # faststream + # httpx + # starlette + # watchfiles +arrow==1.3.0 # via - # -r requirements/_base.in - # aiohttp -asyncio-extras==1.3.2 - # via -r requirements/_base.in -attrs==20.2.0 + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in +asgiref==3.8.1 + # via opentelemetry-instrumentation-asgi +attrs==24.2.0 # via - # -r requirements/_base.in # aiohttp - # openapi-core - # simcore-service-library -certifi==2019.3.9 + # jsonschema + # referencing +certifi==2024.8.30 # via - # -r requirements/_base.in + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # httpcore + # httpx # requests -chardet==3.0.4 +charset-normalizer==3.4.0 + # via requests +click==8.1.7 + # via + # typer + # uvicorn +deprecated==1.2.14 # via + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http + # opentelemetry-semantic-conventions +dnspython==2.7.0 + # via email-validator +email-validator==2.2.0 + # via + # fastapi + # pydantic +fast-depends==2.4.12 + # via faststream +fastapi==0.99.1 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in + # prometheus-fastapi-instrumentator +faststream==0.5.30 + # via -r requirements/../../../packages/service-library/requirements/_base.in +frozenlist==1.5.0 + # via # aiohttp -charset-normalizer==2.0.12 - # via requests -dataclasses==0.7 - # via -r requirements/_base.in -idna==2.8 + # aiosignal +googleapis-common-protos==1.66.0 + # via + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +grpcio==1.67.1 + # via opentelemetry-exporter-otlp-proto-grpc +h11==0.14.0 + # via + # httpcore + # uvicorn +httpcore==1.0.6 + # via httpx +httptools==0.6.4 + # via uvicorn +httpx==0.27.2 # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in - # idna-ssl + # fastapi +idna==3.10 + # via + # anyio + # email-validator + # httpx # requests # yarl -idna-ssl==1.1.0 +importlib-metadata==8.5.0 + # via opentelemetry-api +itsdangerous==2.2.0 + # via fastapi +jinja2==3.1.4 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # fastapi +jsonschema==4.23.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +jsonschema-specifications==2023.7.1 + # via jsonschema +markdown-it-py==3.0.0 + # via rich +markupsafe==3.0.2 + # via jinja2 +mdurl==0.1.2 + # via markdown-it-py +multidict==6.1.0 # via - # -r requirements/_base.in # aiohttp -isodate==0.6.0 + # yarl +opentelemetry-api==1.28.1 # via - # -r requirements/_base.in - # openapi-core -jsonschema==2.6.0 + # -r requirements/../../../packages/service-library/requirements/_base.in + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http + # opentelemetry-instrumentation + # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx + # opentelemetry-instrumentation-redis + # opentelemetry-instrumentation-requests + # opentelemetry-sdk + # opentelemetry-semantic-conventions +opentelemetry-exporter-otlp==1.28.1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-exporter-otlp-proto-common==1.28.1 # via - # -r requirements/_base.in - # aiohttp-apiset - # openapi-spec-validator - # simcore-service-library -lazy-object-proxy==1.4.3 + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +opentelemetry-exporter-otlp-proto-grpc==1.28.1 + # via opentelemetry-exporter-otlp +opentelemetry-exporter-otlp-proto-http==1.28.1 + # via opentelemetry-exporter-otlp +opentelemetry-instrumentation==0.49b1 # via - # -r requirements/_base.in - # openapi-core - # simcore-service-library -multidict==4.5.2 + # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx + # opentelemetry-instrumentation-redis + # opentelemetry-instrumentation-requests +opentelemetry-instrumentation-asgi==0.49b1 + # via opentelemetry-instrumentation-fastapi +opentelemetry-instrumentation-fastapi==0.49b1 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-httpx==0.49b1 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-redis==0.49b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-requests==0.49b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-proto==1.28.1 + # via + # opentelemetry-exporter-otlp-proto-common + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +opentelemetry-sdk==1.28.1 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +opentelemetry-semantic-conventions==0.49b1 + # via + # opentelemetry-instrumentation + # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx + # opentelemetry-instrumentation-redis + # opentelemetry-instrumentation-requests + # opentelemetry-sdk +opentelemetry-util-http==0.49b1 + # via + # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx + # opentelemetry-instrumentation-requests +orjson==3.10.11 # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # fastapi +packaging==24.2 + # via opentelemetry-instrumentation +pamqp==3.3.0 + # via aiormq +prometheus-client==0.21.0 + # via + # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in + # prometheus-fastapi-instrumentator +prometheus-fastapi-instrumentator==6.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +propcache==0.2.0 + # via # aiohttp # yarl -openapi-core==0.12.0 +protobuf==5.28.3 # via - # -r requirements/_base.in - # simcore-service-library -openapi-spec-validator==0.2.9 + # googleapis-common-protos + # opentelemetry-proto +psutil==6.1.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +pydantic==1.10.19 # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in - # openapi-core -prometheus-client==0.8.0 + # fast-depends + # fastapi +pygments==2.18.0 + # via rich +pyinstrument==5.0.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +python-dateutil==2.9.0.post0 + # via arrow +python-dotenv==1.0.1 + # via uvicorn +python-multipart==0.0.17 + # via fastapi +pyyaml==6.0.2 # via - # -r requirements/_base.in - # simcore-service-library -psycopg2-binary==2.8.6 + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_base.in + # fastapi + # uvicorn +redis==5.2.0 # via - # -r requirements/_base.in - # aiopg - # simcore-service-library -pydantic==1.7.2 + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_base.in +referencing==0.29.3 # via - # -r requirements/_base.in - # simcore-service-library -pyyaml==5.4 + # -c requirements/../../../packages/service-library/requirements/./constraints.txt + # jsonschema + # jsonschema-specifications +repro-zipfile==0.3.1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +requests==2.32.3 + # via opentelemetry-exporter-otlp-proto-http +rich==13.9.4 # via - # -r requirements/_base.in - # aiohttp-apiset - # openapi-spec-validator - # simcore-service-library -requests==2.27.1 - # via -r requirements/_base.in -simcore-service-library @ git+https://github.com/ITISFoundation/osparc-simcore.git@c8669fb52659b684514fefa4f3b4599f57f276a0#egg=simcore-service-library&subdirectory=packages/service-library - # via -r requirements/_base.in -six==1.12.0 + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # typer +rpds-py==0.21.0 # via - # -r requirements/_base.in - # isodate - # openapi-core - # openapi-spec-validator - # tenacity -sqlalchemy==1.3.20 + # jsonschema + # referencing +shellingham==1.5.4 + # via typer +six==1.16.0 + # via python-dateutil +sniffio==1.3.1 # via - # -r requirements/_base.in - # simcore-service-library -strict-rfc3339==0.7 + # anyio + # httpx +starlette==0.27.0 # via - # -r requirements/_base.in - # openapi-core -tenacity==6.0.0 + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # fastapi +tenacity==9.0.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +toolz==1.0.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +tqdm==4.67.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +typer==0.13.0 # via - # -r requirements/_base.in - # simcore-service-library -trafaret==2.1.0 + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in +types-python-dateutil==2.9.0.20241003 + # via arrow +typing-extensions==4.12.2 # via - # -r requirements/_base.in - # simcore-service-library -ujson==4.0.1 + # aiodebug + # fastapi + # faststream + # opentelemetry-sdk + # pydantic + # typer +ujson==5.10.0 # via - # -r requirements/_base.in - # simcore-service-library -urllib3==1.26.5 + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # fastapi +urllib3==2.2.3 # via - # -r requirements/_base.in + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # requests -werkzeug==1.0.1 +uvicorn==0.32.0 # via - # -r requirements/_base.in - # simcore-service-library -yarl==1.3.0 + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # fastapi +uvloop==0.21.0 + # via uvicorn +watchfiles==0.24.0 + # via uvicorn +websockets==14.1 + # via uvicorn +wrapt==1.16.0 # via - # -r requirements/_base.in - # aiodocker + # deprecated + # opentelemetry-instrumentation + # opentelemetry-instrumentation-httpx + # opentelemetry-instrumentation-redis +yarl==1.17.1 + # via + # aio-pika # aiohttp + # aiormq +zipp==3.21.0 + # via importlib-metadata diff --git a/services/director/requirements/_test.in b/services/director/requirements/_test.in index d480d049a73..a6bd90a3acf 100644 --- a/services/director/requirements/_test.in +++ b/services/director/requirements/_test.in @@ -1,32 +1,25 @@ +# Specifies dependencies required to run 'services/api-server/test' +# both for unit and integration tests!! # -# Specifies dependencies required to run 'director' -# - -# frozen specs ---requirement _base.txt +--constraint ../../../requirements/constraints.txt -# NOTE: -# FROZEN (see notes in _base.in) -# DO NOT CHANGE ANYTHING HERE. -# IT WON'T HAVE ANY EFFECT +# Adds base AS CONSTRAINT specs, not requirement. +# - Resulting _text.txt is a frozen list of EXTRA packages for testing, besides _base.txt # - -# FROZEN as well (DO NOT CHANGE anything in pytest-simcore, it will have no effect in the director package) -pytest-simcore @ git+https://github.com/ITISFoundation/osparc-simcore.git@79f866219bf650c5eeb4fcdf8f017319087c92c7#egg=pytest-simcore&subdirectory=packages/pytest-simcore - +--constraint _base.txt # testing +asgi_lifespan aioresponses -coverage==4.5.1 # TODO: Downgraded because of a bug https://github.com/nedbat/coveragepy/issues/716 docker -openapi-spec-validator~=0.2 # TODO: this library is limiting jsonschema<3 -ptvsd -pylint +faker +jsonref pytest -pytest-aiohttp # incompatible with pytest-asyncio. See https://github.com/pytest-dev/pytest-asyncio/issues/76 +pytest-asyncio pytest-cov +pytest-docker pytest-instafail pytest-mock pytest-runner pytest-sugar -python-dotenv +respx diff --git a/services/director/requirements/_test.txt b/services/director/requirements/_test.txt index 97f4c9313fa..8d14d466266 100644 --- a/services/director/requirements/_test.txt +++ b/services/director/requirements/_test.txt @@ -1,251 +1,150 @@ -# -# This file is autogenerated by pip-compile with python 3.6 -# To update, run: -# -# pip-compile --output-file=requirements/_test.txt --strip-extras requirements/_test.in -# -aiodebug==1.1.2 - # via - # -r requirements/_base.txt - # simcore-service-library -aiodocker==0.14.0 - # via -r requirements/_base.txt -aiohttp==3.3.2 - # via - # -r requirements/_base.txt - # aiodocker - # aiohttp-apiset +aiohappyeyeballs==2.4.3 + # via + # -c requirements/_base.txt + # aiohttp +aiohttp==3.11.1 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt # aioresponses - # aiozipkin - # pytest-aiohttp - # simcore-service-library -aiohttp-apiset @ git+https://github.com/ITISFoundation/aiohttp_apiset.git@5c8a61ceb6de7ed9e09db5b4609b458a0d3773df - # via -r requirements/_base.txt -aiopg==1.0.0 - # via - # -r requirements/_base.txt - # simcore-service-library -aioresponses==0.7.2 +aioresponses==0.7.6 # via -r requirements/_test.in -aiozipkin==0.7.1 +aiosignal==1.3.1 # via - # -r requirements/_base.txt - # simcore-service-library -astroid==2.4.2 - # via pylint -async-generator==1.10 + # -c requirements/_base.txt + # aiohttp +anyio==4.6.2.post1 # via - # -r requirements/_base.txt - # asyncio-extras -async-timeout==3.0.1 + # -c requirements/_base.txt + # httpx +asgi-lifespan==2.1.0 + # via -r requirements/_test.in +attrs==24.2.0 # via - # -r requirements/_base.txt + # -c requirements/_base.txt # aiohttp -asyncio-extras==1.3.2 - # via -r requirements/_base.txt -attrs==20.2.0 + # pytest-docker +certifi==2024.8.30 # via - # -r requirements/_base.txt - # aiohttp - # openapi-core - # pytest - # simcore-service-library -certifi==2019.3.9 + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # httpcore + # httpx + # requests +charset-normalizer==3.4.0 # via - # -r requirements/_base.txt + # -c requirements/_base.txt # requests -chardet==3.0.4 +coverage==7.6.5 + # via pytest-cov +docker==7.1.0 + # via -r requirements/_test.in +faker==33.0.0 + # via -r requirements/_test.in +frozenlist==1.5.0 # via - # -r requirements/_base.txt + # -c requirements/_base.txt # aiohttp -charset-normalizer==2.0.12 + # aiosignal +h11==0.14.0 # via - # -r requirements/_base.txt - # requests -coverage==4.5.1 + # -c requirements/_base.txt + # httpcore +httpcore==1.0.6 # via - # -r requirements/_test.in - # coveralls - # pytest-cov -dataclasses==0.7 + # -c requirements/_base.txt + # httpx +httpx==0.27.2 # via - # -r requirements/_base.txt - # pydantic -docker==4.3.1 - # via -r requirements/_test.in -docopt==0.6.2 - # via coveralls -idna==2.8 + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # respx +idna==3.10 # via - # -r requirements/_base.txt - # idna-ssl + # -c requirements/_base.txt + # anyio + # httpx # requests # yarl -idna-ssl==1.1.0 - # via - # -r requirements/_base.txt - # aiohttp -importlib-metadata==2.0.0 - # via - # pluggy - # pytest -iniconfig==1.1.1 +iniconfig==2.0.0 # via pytest -isodate==0.6.0 - # via - # -r requirements/_base.txt - # openapi-core -isort==5.6.4 - # via pylint -jsonschema==2.6.0 - # via - # -r requirements/_base.txt - # aiohttp-apiset - # openapi-spec-validator - # simcore-service-library -lazy-object-proxy==1.4.3 - # via - # -r requirements/_base.txt - # astroid - # openapi-core - # simcore-service-library -mccabe==0.6.1 - # via pylint -multidict==4.5.2 - # via - # -r requirements/_base.txt +jsonref==1.1.0 + # via -r requirements/_test.in +multidict==6.1.0 + # via + # -c requirements/_base.txt # aiohttp # yarl -openapi-core==0.12.0 - # via - # -r requirements/_base.txt - # simcore-service-library -openapi-spec-validator==0.2.9 - # via - # -r requirements/_base.txt - # -r requirements/_test.in - # openapi-core -packaging==20.4 +packaging==24.2 # via + # -c requirements/_base.txt # pytest # pytest-sugar -pluggy==0.13.1 - # via pytest -prometheus-client==0.8.0 - # via - # -r requirements/_base.txt - # simcore-service-library -psycopg2-binary==2.8.6 - # via - # -r requirements/_base.txt - # aiopg - # simcore-service-library -ptvsd==4.3.2 - # via -r requirements/_test.in -py==1.9.0 +pluggy==1.5.0 # via pytest -pydantic==1.7.2 +propcache==0.2.0 # via - # -r requirements/_base.txt - # simcore-service-library -pylint==2.6.0 - # via -r requirements/_test.in -pyparsing==2.4.7 - # via packaging -pytest==6.1.2 + # -c requirements/_base.txt + # aiohttp + # yarl +pytest==8.3.3 # via # -r requirements/_test.in - # pytest-aiohttp + # pytest-asyncio # pytest-cov + # pytest-docker # pytest-instafail # pytest-mock - # pytest-simcore # pytest-sugar -pytest-aiohttp==0.3.0 - # via -r requirements/_test.in -pytest-cov==2.10.1 - # via -r requirements/_test.in -pytest-instafail==0.4.2 +pytest-asyncio==0.23.8 + # via + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_test.in +pytest-cov==6.0.0 # via -r requirements/_test.in -pytest-mock==3.3.1 +pytest-docker==3.1.1 # via -r requirements/_test.in -pytest-runner==5.2 +pytest-instafail==0.5.0 # via -r requirements/_test.in -pytest-simcore @ git+https://github.com/ITISFoundation/osparc-simcore.git@79f866219bf650c5eeb4fcdf8f017319087c92c7#subdirectory=packages/pytest-simcore +pytest-mock==3.14.0 # via -r requirements/_test.in -pytest-sugar==0.9.4 +pytest-runner==6.0.1 # via -r requirements/_test.in -python-dotenv==0.15.0 +pytest-sugar==1.0.0 # via -r requirements/_test.in -pyyaml==5.4 +python-dateutil==2.9.0.post0 # via - # -r requirements/_base.txt - # aiohttp-apiset - # openapi-spec-validator - # simcore-service-library -requests==2.27.1 + # -c requirements/_base.txt + # faker +requests==2.32.3 # via - # -r requirements/_base.txt - # codecov - # coveralls + # -c requirements/_base.txt # docker -simcore-service-library @ git+https://github.com/ITISFoundation/osparc-simcore.git@c8669fb52659b684514fefa4f3b4599f57f276a0#subdirectory=packages/service-library - # via -r requirements/_base.txt -six==1.12.0 - # via - # -r requirements/_base.txt - # astroid - # docker - # isodate - # openapi-core - # openapi-spec-validator - # packaging - # tenacity - # websocket-client -sqlalchemy==1.3.20 - # via - # -r requirements/_base.txt - # simcore-service-library -strict-rfc3339==0.7 - # via - # -r requirements/_base.txt - # openapi-core -tenacity==6.0.0 - # via - # -r requirements/_base.txt - # simcore-service-library -termcolor==1.1.0 - # via pytest-sugar -toml==0.10.2 +respx==0.21.1 + # via -r requirements/_test.in +six==1.16.0 # via - # pylint - # pytest -trafaret==2.1.0 + # -c requirements/_base.txt + # python-dateutil +sniffio==1.3.1 # via - # -r requirements/_base.txt - # simcore-service-library -typed-ast==1.4.1 - # via astroid -ujson==4.0.1 + # -c requirements/_base.txt + # anyio + # asgi-lifespan + # httpx +termcolor==2.5.0 + # via pytest-sugar +typing-extensions==4.12.2 # via - # -r requirements/_base.txt - # simcore-service-library -urllib3==1.26.5 + # -c requirements/_base.txt + # faker +urllib3==2.2.3 # via - # -r requirements/_base.txt + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # docker # requests -websocket-client==0.57.0 - # via docker -werkzeug==1.0.1 - # via - # -r requirements/_base.txt - # simcore-service-library -wrapt==1.12.1 - # via astroid -yarl==1.3.0 - # via - # -r requirements/_base.txt - # aiodocker +yarl==1.17.1 + # via + # -c requirements/_base.txt # aiohttp -zipp==3.4.0 - # via importlib-metadata diff --git a/services/director/requirements/_tools.in b/services/director/requirements/_tools.in index e69de29bb2d..52a9a39d162 100644 --- a/services/director/requirements/_tools.in +++ b/services/director/requirements/_tools.in @@ -0,0 +1,7 @@ +--constraint ../../../requirements/constraints.txt +--constraint _base.txt +--constraint _test.txt + +--requirement ../../../requirements/devenv.txt + +watchdog[watchmedo] diff --git a/services/director/requirements/_tools.txt b/services/director/requirements/_tools.txt index e69de29bb2d..815963069c8 100644 --- a/services/director/requirements/_tools.txt +++ b/services/director/requirements/_tools.txt @@ -0,0 +1,85 @@ +astroid==3.3.5 + # via pylint +black==24.10.0 + # via -r requirements/../../../requirements/devenv.txt +build==1.2.2.post1 + # via pip-tools +bump2version==1.0.1 + # via -r requirements/../../../requirements/devenv.txt +cfgv==3.4.0 + # via pre-commit +click==8.1.7 + # via + # -c requirements/_base.txt + # black + # pip-tools +dill==0.3.9 + # via pylint +distlib==0.3.9 + # via virtualenv +filelock==3.16.1 + # via virtualenv +identify==2.6.2 + # via pre-commit +isort==5.13.2 + # via + # -r requirements/../../../requirements/devenv.txt + # pylint +mccabe==0.7.0 + # via pylint +mypy==1.13.0 + # via -r requirements/../../../requirements/devenv.txt +mypy-extensions==1.0.0 + # via + # black + # mypy +nodeenv==1.9.1 + # via pre-commit +packaging==24.2 + # via + # -c requirements/_base.txt + # -c requirements/_test.txt + # black + # build +pathspec==0.12.1 + # via black +pip==24.3.1 + # via pip-tools +pip-tools==7.4.1 + # via -r requirements/../../../requirements/devenv.txt +platformdirs==4.3.6 + # via + # black + # pylint + # virtualenv +pre-commit==4.0.1 + # via -r requirements/../../../requirements/devenv.txt +pylint==3.3.1 + # via -r requirements/../../../requirements/devenv.txt +pyproject-hooks==1.2.0 + # via + # build + # pip-tools +pyyaml==6.0.2 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # pre-commit + # watchdog +ruff==0.7.3 + # via -r requirements/../../../requirements/devenv.txt +setuptools==75.5.0 + # via pip-tools +tomlkit==0.13.2 + # via pylint +typing-extensions==4.12.2 + # via + # -c requirements/_base.txt + # -c requirements/_test.txt + # mypy +virtualenv==20.27.1 + # via pre-commit +watchdog==6.0.0 + # via -r requirements/_tools.in +wheel==0.45.0 + # via pip-tools diff --git a/services/director/requirements/ci.txt b/services/director/requirements/ci.txt index 8edcd5f2bfe..e805fec3802 100644 --- a/services/director/requirements/ci.txt +++ b/services/director/requirements/ci.txt @@ -7,7 +7,15 @@ # # installs base + tests requirements +--requirement _base.txt --requirement _test.txt +--requirement _tools.txt + +# installs this repo's packages +simcore-models-library @ ../../packages/models-library +pytest-simcore @ ../../packages/pytest-simcore/ +simcore-service-library[fastapi] @ ../../packages/service-library +simcore-settings-library @ ../../packages/settings-library/ # installs current package -. +simcore-service-director @ . diff --git a/services/director/requirements/dev.txt b/services/director/requirements/dev.txt index dac3f0a494b..f278b7206fd 100644 --- a/services/director/requirements/dev.txt +++ b/services/director/requirements/dev.txt @@ -12,5 +12,12 @@ --requirement _test.txt --requirement _tools.txt + +# installs this repo's packages +--editable ../../packages/models-library +--editable ../../packages/pytest-simcore/ +--editable ../../packages/service-library[fastapi] +--editable ../../packages/settings-library/ + # installs current package --editable . diff --git a/services/director/requirements/prod.txt b/services/director/requirements/prod.txt index dc0ec561efe..8a8b1d29125 100644 --- a/services/director/requirements/prod.txt +++ b/services/director/requirements/prod.txt @@ -9,5 +9,10 @@ # installs base requirements --requirement _base.txt +# installs this repo's packages +simcore-models-library @ ../../packages/models-library +simcore-service-library[fastapi] @ ../../packages/service-library +simcore-settings-library @ ../../packages/settings-library/ + # installs current package -. +simcore-service-director @ . diff --git a/services/director/setup.cfg b/services/director/setup.cfg index 8e7e8ea592f..eb3d7554b27 100644 --- a/services/director/setup.cfg +++ b/services/director/setup.cfg @@ -1,14 +1,19 @@ [bumpversion] -current_version = 0.1.0 +current_version = 1.0.0 commit = True message = director api version: {current_version} → {new_version} tag = False commit_args = --no-verify -[bumpversion:file:setup.py] -search = "{current_version}" -replace = "{new_version}" - [bumpversion:file:VERSION] -[bumpversion:file:../../api/specs/director/openapi.yaml] -[bumpversion:file:./src/simcore_service_director/api/v0/openapi.yaml] + + +[tool:pytest] +asyncio_mode = auto +markers = + testit: "marks test to run during development" + + +[mypy] +plugins = + pydantic.mypy diff --git a/services/director/setup.py b/services/director/setup.py index 8c12d36f5cb..9577a7ffc86 100644 --- a/services/director/setup.py +++ b/services/director/setup.py @@ -4,63 +4,65 @@ from setuptools import find_packages, setup -here = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -if not (sys.version_info.major == 3 and sys.version_info.minor == 6): - raise RuntimeError( - "Requires <=3.6, got %s. Did you forget to activate virtualenv?" - % sys.version_info - ) +def read_reqs(reqs_path: Path) -> set[str]: + return { + r + for r in re.findall( + r"(^[^#\n-][\w\[,\]]+[-~>=<.\w]*)", + reqs_path.read_text(), + re.MULTILINE, + ) + if isinstance(r, str) + } -def read_reqs(reqs_path: Path): - reqs = re.findall( - r"(^[^#\n-][\w\[,\]]+[-~>=<.\w]*)", reqs_path.read_text(), re.MULTILINE - ) - # TODO: temporary excluding requirements using git - # https://pip.pypa.io/en/stable/reference/pip_install/#vcs-support - return [r for r in reqs if not r.startswith("git")] +CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -install_requirements = read_reqs(here / "requirements" / "_base.txt") + [ - "aiohttp-apiset", - "simcore-service-library", -] +NAME = "simcore-service-director" +VERSION = (CURRENT_DIR / "VERSION").read_text().strip() +AUTHORS = ("Sylvain Anderegg (sanderegg)",) +DESCRIPTION = "oSparc Director webserver service" +README = (CURRENT_DIR / "README.md").read_text() -test_requirements = read_reqs(here / "requirements" / "_test.txt") +PROD_REQUIREMENTS = tuple( + read_reqs(CURRENT_DIR / "requirements" / "_base.txt") + | { + "simcore-models-library", + "simcore-service-library[fastapi]", + "simcore-settings-library", + } +) + +TEST_REQUIREMENTS = tuple(read_reqs(CURRENT_DIR / "requirements" / "_test.txt")) -_CONFIG = dict( - name="simcore-service-director", - version="0.1.0", - description="oSparc Director webserver service", - author="Sylvain Anderegg (sanderegg)", - python_requires="~=3.6", - packages=find_packages(where="src"), - package_dir={ + +SETUP = { + "name": NAME, + "version": VERSION, + "author": AUTHORS, + "description": DESCRIPTION, + "long_description": README, + "license": "MIT license", + "python_requires": "~=3.11", + "packages": find_packages(where="src"), + "package_dir": { "": "src", }, - include_package_data=True, - install_requires=install_requirements, - tests_require=test_requirements, - setup_requires=["pytest-runner"], - package_data={ - "": ["api/v0/openapi.yaml", "api/v0/schemas/*.json"], - }, - entry_points={ + "include_package_data": True, + "install_requires": PROD_REQUIREMENTS, + "test_suite": "tests", + "tests_require": TEST_REQUIREMENTS, + "extras_require": {"test": TEST_REQUIREMENTS}, + "entry_points": { "console_scripts": [ - "simcore-service-director = simcore_service_director.__main__:main", - "simcore-service = simcore_service_director.__main__:main", + "simcore-service-director = simcore_service_director.cli:main", + "simcore-service = simcore_service_director.cli:main", ], }, -) - - -def main(): - """Execute the setup commands.""" - setup(**_CONFIG) - return 0 # syccessful termination - +} if __name__ == "__main__": - raise SystemExit(main()) + setup(**SETUP) diff --git a/services/director/src/simcore_service_director/__main__.py b/services/director/src/simcore_service_director/__main__.py deleted file mode 100644 index 73227b1c129..00000000000 --- a/services/director/src/simcore_service_director/__main__.py +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env python3 - -from .main import main - -main() diff --git a/services/director/src/simcore_service_director/_meta.py b/services/director/src/simcore_service_director/_meta.py new file mode 100644 index 00000000000..5bf4218d678 --- /dev/null +++ b/services/director/src/simcore_service_director/_meta.py @@ -0,0 +1,43 @@ +""" Application's metadata + +""" + +from typing import Final + +from models_library.basic_types import VersionStr, VersionTag +from packaging.version import Version +from servicelib.utils_meta import PackageInfo + +info: Final = PackageInfo(package_name="simcore-service-director") +__version__: Final[VersionStr] = info.__version__ + + +PROJECT_NAME: Final[str] = info.project_name +VERSION: Final[Version] = info.version +API_VERSION: Final[VersionStr] = info.__version__ +APP_NAME: Final[str] = PROJECT_NAME +API_VTAG: Final[VersionTag] = VersionTag(info.api_prefix_path_tag) +SUMMARY: Final[str] = info.get_summary() + + +# NOTE: https://patorjk.com/software/taag/#p=display&f=Electronic&t=Director-v0 +APP_STARTED_BANNER_MSG = r""" + + ▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄ ▄ ▄▄▄▄▄▄▄▄▄ +▐░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌ ▐░▌ ▐░▌▐░░░░░░░░░▌ +▐░█▀▀▀▀▀▀▀█░▌▀▀▀▀█░█▀▀▀▀ ▐░█▀▀▀▀▀▀▀█░▌▐░█▀▀▀▀▀▀▀▀▀ ▐░█▀▀▀▀▀▀▀▀▀ ▀▀▀▀█░█▀▀▀▀ ▐░█▀▀▀▀▀▀▀█░▌▐░█▀▀▀▀▀▀▀█░▌ ▐░▌ ▐░▌▐░█░█▀▀▀▀▀█░▌ +▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░▌ ▐░▌ +▐░▌ ▐░▌ ▐░▌ ▐░█▄▄▄▄▄▄▄█░▌▐░█▄▄▄▄▄▄▄▄▄ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░█▄▄▄▄▄▄▄█░▌ ▄▄▄▄▄▄▄▄▄▄▄▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ +▐░▌ ▐░▌ ▐░▌ ▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░▌ ▐░▌ ▐░▌ ▐░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ +▐░▌ ▐░▌ ▐░▌ ▐░█▀▀▀▀█░█▀▀ ▐░█▀▀▀▀▀▀▀▀▀ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░█▀▀▀▀█░█▀▀ ▀▀▀▀▀▀▀▀▀▀▀ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ +▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░▌ +▐░█▄▄▄▄▄▄▄█░▌▄▄▄▄█░█▄▄▄▄ ▐░▌ ▐░▌ ▐░█▄▄▄▄▄▄▄▄▄ ▐░█▄▄▄▄▄▄▄▄▄ ▐░▌ ▐░█▄▄▄▄▄▄▄█░▌▐░▌ ▐░▌ ▐░▐░▌ ▐░█▄▄▄▄▄█░█░▌ +▐░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░▌ ▐░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌ ▐░▌ ▐░░░░░░░░░░░▌▐░▌ ▐░▌ ▐░▌ ▐░░░░░░░░░▌ + ▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀▀ ▀ ▀ ▀▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀▀ ▀ ▀▀▀▀▀▀▀▀▀▀▀ ▀ ▀ ▀ ▀▀▀▀▀▀▀▀▀ + {} +""".format( + f"v{__version__}" +) + + +APP_FINISHED_BANNER_MSG = info.get_finished_banner() diff --git a/services/director/tests/helpers/__init__.py b/services/director/src/simcore_service_director/api/__init__.py similarity index 100% rename from services/director/tests/helpers/__init__.py rename to services/director/src/simcore_service_director/api/__init__.py diff --git a/services/director/src/simcore_service_director/api/rest/__init__.py b/services/director/src/simcore_service_director/api/rest/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/director/src/simcore_service_director/api/rest/_health.py b/services/director/src/simcore_service_director/api/rest/_health.py new file mode 100644 index 00000000000..19a00014b33 --- /dev/null +++ b/services/director/src/simcore_service_director/api/rest/_health.py @@ -0,0 +1,16 @@ +import arrow +from fastapi import APIRouter +from fastapi.responses import PlainTextResponse + +router = APIRouter() + + +@router.api_route( + "/", + methods=["GET", "HEAD"], + include_in_schema=False, + response_class=PlainTextResponse, +) +async def health_check() -> str: + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" diff --git a/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py b/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py new file mode 100644 index 00000000000..61457413688 --- /dev/null +++ b/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py @@ -0,0 +1,134 @@ +import logging +from pathlib import Path +from typing import Annotated, Any +from uuid import UUID + +from fastapi import APIRouter, Depends, FastAPI, Header, HTTPException, status +from models_library.generics import Envelope +from models_library.projects import ProjectID +from models_library.services_types import ServiceKey, ServiceVersion +from models_library.users import UserID +from servicelib.fastapi.dependencies import get_app + +from ... import producer +from ...core.errors import ( + RegistryConnectionError, + ServiceNotAvailableError, + ServiceUUIDInUseError, + ServiceUUIDNotFoundError, +) + +router = APIRouter() + +_logger = logging.getLogger(__name__) + + +@router.get("/running_interactive_services") +async def list_running_services( + the_app: Annotated[FastAPI, Depends(get_app)], + user_id: UserID | None = None, + project_id: ProjectID | None = None, +) -> Envelope[list[dict[str, Any]]]: + _logger.debug( + "Client does list_running_services request user_id %s, project_id %s", + user_id, + project_id, + ) + services = await producer.get_services_details( + the_app, + f"{user_id}" if user_id else None, + f"{project_id}" if project_id else None, + ) + return Envelope[list[dict[str, Any]]](data=services) + + +@router.post( + "/running_interactive_services", + status_code=status.HTTP_201_CREATED, +) +async def start_service( + the_app: Annotated[FastAPI, Depends(get_app)], + user_id: UserID, + project_id: ProjectID, + service_key: ServiceKey, + service_uuid: UUID, + service_basepath: Path = Path(), + service_tag: ServiceVersion | None = None, + x_simcore_user_agent: str = Header(...), +) -> Envelope[dict[str, Any]]: + _logger.debug( + "Client does start_service with user_id %s, project_id %s, service %s:%s, service_uuid %s, service_basepath %s, request_simcore_user_agent %s", + user_id, + project_id, + service_key, + service_tag, + service_uuid, + service_basepath, + x_simcore_user_agent, + ) + try: + service = await producer.start_service( + the_app, + f"{user_id}", + f"{project_id}", + service_key, + service_tag, + f"{service_uuid}", + f"{service_basepath}", + x_simcore_user_agent, + ) + return Envelope[dict[str, Any]](data=service) + except ServiceNotAvailableError as err: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" + ) from err + except ServiceUUIDInUseError as err: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, detail=f"{err}" + ) from err + except RegistryConnectionError as err: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" + ) from err + + +@router.get("/running_interactive_services/{service_uuid}") +async def get_running_service( + the_app: Annotated[FastAPI, Depends(get_app)], + service_uuid: UUID, +) -> Envelope[dict[str, Any]]: + _logger.debug( + "Client does get_running_service with service_uuid %s", + service_uuid, + ) + try: + service = await producer.get_service_details(the_app, f"{service_uuid}") + return Envelope[dict[str, Any]](data=service) + except ServiceUUIDNotFoundError as err: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" + ) from err + + +@router.delete( + "/running_interactive_services/{service_uuid}", + status_code=status.HTTP_204_NO_CONTENT, +) +async def stop_service( + the_app: Annotated[FastAPI, Depends(get_app)], + service_uuid: UUID, + save_state: bool = True, +) -> None: + _logger.debug( + "Client does stop_service with service_uuid %s", + service_uuid, + ) + try: + await producer.stop_service( + the_app, node_uuid=f"{service_uuid}", save_state=save_state + ) + + except ServiceUUIDNotFoundError as err: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" + ) from err diff --git a/services/director/src/simcore_service_director/api/rest/_service_extras.py b/services/director/src/simcore_service_director/api/rest/_service_extras.py new file mode 100644 index 00000000000..ab61e8ac1ad --- /dev/null +++ b/services/director/src/simcore_service_director/api/rest/_service_extras.py @@ -0,0 +1,40 @@ +import logging +from typing import Annotated, Any + +from fastapi import APIRouter, Depends, FastAPI, HTTPException, status +from models_library.generics import Envelope +from models_library.services_types import ServiceKey, ServiceVersion +from servicelib.fastapi.dependencies import get_app + +from ... import registry_proxy +from ...core.errors import RegistryConnectionError, ServiceNotAvailableError + +router = APIRouter() + +_logger = logging.getLogger(__name__) + + +@router.get("/service_extras/{service_key:path}/{service_version}") +async def list_service_extras( + the_app: Annotated[FastAPI, Depends(get_app)], + service_key: ServiceKey, + service_version: ServiceVersion, +) -> Envelope[dict[str, Any]]: + _logger.debug( + "Client does service_extras_by_key_version_get request with service_key %s, service_version %s", + service_key, + service_version, + ) + try: + service_extras = await registry_proxy.get_service_extras( + the_app, service_key, service_version + ) + return Envelope[dict[str, Any]](data=service_extras) + except ServiceNotAvailableError as err: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" + ) from err + except RegistryConnectionError as err: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" + ) from err diff --git a/services/director/src/simcore_service_director/api/rest/_services.py b/services/director/src/simcore_service_director/api/rest/_services.py new file mode 100644 index 00000000000..157f5305d1b --- /dev/null +++ b/services/director/src/simcore_service_director/api/rest/_services.py @@ -0,0 +1,123 @@ +import logging +from typing import Annotated, Any + +from fastapi import APIRouter, Depends, FastAPI, HTTPException, status +from models_library.generics import Envelope +from models_library.services_enums import ServiceType +from models_library.services_types import ServiceKey, ServiceVersion +from pydantic import BaseModel +from servicelib.fastapi.dependencies import get_app + +from ... import registry_proxy +from ...core.errors import RegistryConnectionError, ServiceNotAvailableError + +router = APIRouter() + +_logger = logging.getLogger(__name__) + + +class _ErrorMessage(BaseModel): + message: str + + +@router.get( + "/services", + response_model=Envelope[list[dict[str, Any]]], + responses={ + status.HTTP_401_UNAUTHORIZED: { + "model": _ErrorMessage, + "description": "Could not connect with Docker Registry", + }, + status.HTTP_500_INTERNAL_SERVER_ERROR: { + "model": _ErrorMessage, + "description": "Unexpected error", + }, + }, +) +async def list_services( + the_app: Annotated[FastAPI, Depends(get_app)], + service_type: ServiceType | None = None, +) -> Envelope[list[dict[str, Any]]]: + _logger.debug( + "Client does list_services request with service_type %s", + service_type, + ) + try: + services: list[dict[str, Any]] = [] + if not service_type: + services = await registry_proxy.list_services( + the_app, registry_proxy.ServiceType.ALL + ) + elif service_type is ServiceType.COMPUTATIONAL: + services = await registry_proxy.list_services( + the_app, registry_proxy.ServiceType.COMPUTATIONAL + ) + elif service_type is ServiceType.DYNAMIC: + services = await registry_proxy.list_services( + the_app, registry_proxy.ServiceType.DYNAMIC + ) + # NOTE: the validation is done in the catalog. This entrypoint IS and MUST BE only used by the catalog!! + # NOTE2: the catalog will directly talk to the registry see case #2165 [https://github.com/ITISFoundation/osparc-simcore/issues/2165] + # services = node_validator.validate_nodes(services) + return Envelope[list[dict[str, Any]]](data=services) + except RegistryConnectionError as err: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" + ) from err + + +# NOTE: be careful that /labels must be defined before the more generic get_service +@router.get("/services/{service_key:path}/{service_version}/labels") +async def list_service_labels( + the_app: Annotated[FastAPI, Depends(get_app)], + service_key: ServiceKey, + service_version: ServiceVersion, +) -> Envelope[dict[str, Any]]: + _logger.debug( + "Retrieving service labels with service_key %s, service_version %s", + service_key, + service_version, + ) + try: + service_labels, _ = await registry_proxy.get_image_labels( + the_app, service_key, service_version + ) + return Envelope[dict[str, Any]](data=service_labels) + + except ServiceNotAvailableError as err: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" + ) from err + + except RegistryConnectionError as err: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" + ) from err + + +@router.get("/services/{service_key:path}/{service_version}") +async def get_service( + the_app: Annotated[FastAPI, Depends(get_app)], + service_key: ServiceKey, + service_version: ServiceVersion, +) -> Envelope[list[dict[str, Any]]]: + _logger.debug( + "Client does get_service with service_key %s, service_version %s", + service_key, + service_version, + ) + try: + services = [ + await registry_proxy.get_image_details( + the_app, service_key, service_version + ) + ] + return Envelope[list[dict[str, Any]]](data=services) + except ServiceNotAvailableError as err: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" + ) from err + except RegistryConnectionError as err: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" + ) from err diff --git a/services/director/src/simcore_service_director/api/rest/routes.py b/services/director/src/simcore_service_director/api/rest/routes.py new file mode 100644 index 00000000000..3d789ba02ef --- /dev/null +++ b/services/director/src/simcore_service_director/api/rest/routes.py @@ -0,0 +1,30 @@ +from typing import Final + +from fastapi import APIRouter, FastAPI, HTTPException +from servicelib.fastapi.exceptions_utils import ( + handle_errors_as_500, + http_exception_as_json_response, +) + +from . import _health, _running_interactive_services, _service_extras, _services + +_V0_VTAG: Final[str] = "v0" + + +def setup_api_routes(app: FastAPI): + """ + Composes resources/sub-resources routers + """ + + app.include_router(_health.router, tags=["operations"]) + app.include_router(_health.router, tags=["operations"], prefix=f"/{_V0_VTAG}") + + # include the rest under /vX + api_router = APIRouter(prefix=f"/{_V0_VTAG}") + api_router.include_router(_services.router, tags=["services"]) + api_router.include_router(_service_extras.router, tags=["services"]) + api_router.include_router(_running_interactive_services.router, tags=["services"]) + app.include_router(api_router) + + app.add_exception_handler(Exception, handle_errors_as_500) + app.add_exception_handler(HTTPException, http_exception_as_json_response) diff --git a/services/director/src/simcore_service_director/cache_request_decorator.py b/services/director/src/simcore_service_director/cache_request_decorator.py deleted file mode 100644 index 431a7216e90..00000000000 --- a/services/director/src/simcore_service_director/cache_request_decorator.py +++ /dev/null @@ -1,31 +0,0 @@ -from functools import wraps -from typing import Coroutine, Dict, Tuple - -from aiohttp import web -from simcore_service_director import config - - -def cache_requests(func: Coroutine, no_cache: bool = False): - @wraps(func) - async def wrapped( - app: web.Application, url: str, method: str, *args, **kwargs - ) -> Tuple[Dict, Dict]: - is_cache_enabled = config.DIRECTOR_REGISTRY_CACHING and method == "GET" - cache_key = f"{url}:{method}" - if is_cache_enabled and not no_cache: - cache_data = app[config.APP_REGISTRY_CACHE_DATA_KEY] - if cache_key in cache_data: - return cache_data[cache_key] - - resp_data, resp_headers = await func(app, url, method, *args, **kwargs) - - if is_cache_enabled and not no_cache: - cache_data = app[config.APP_REGISTRY_CACHE_DATA_KEY] - cache_data[cache_key] = (resp_data, resp_headers) - - return (resp_data, resp_headers) - - return wrapped - - -__all__ = ["cache_requests"] diff --git a/services/director/src/simcore_service_director/cli.py b/services/director/src/simcore_service_director/cli.py new file mode 100644 index 00000000000..f2e16f6b97e --- /dev/null +++ b/services/director/src/simcore_service_director/cli.py @@ -0,0 +1,26 @@ +import logging + +import typer +from settings_library.utils_cli import create_settings_command, create_version_callback + +from ._meta import PROJECT_NAME, __version__ +from .core.settings import ApplicationSettings + +_logger = logging.getLogger(__name__) + +main = typer.Typer(name=PROJECT_NAME) + +main.command()( + create_settings_command(settings_cls=ApplicationSettings, logger=_logger) +) +main.callback()(create_version_callback(__version__)) + + +@main.command() +def run(): + """Runs application""" + typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") + typer.secho( + "$ uvicorn simcore_service_director.main:the_app", + fg=typer.colors.BLUE, + ) diff --git a/services/director/src/simcore_service_director/client_session.py b/services/director/src/simcore_service_director/client_session.py new file mode 100644 index 00000000000..74647f13822 --- /dev/null +++ b/services/director/src/simcore_service_director/client_session.py @@ -0,0 +1,41 @@ +from aiohttp import ClientSession, ClientTimeout +from fastapi import FastAPI +from models_library.utils.json_serialization import json_dumps +from servicelib.utils import ( + get_http_client_request_aiohttp_connect_timeout, + get_http_client_request_aiohttp_sock_connect_timeout, + get_http_client_request_total_timeout, +) + + +def setup_client_session(app: FastAPI) -> None: + async def on_startup() -> None: + # SEE https://github.com/ITISFoundation/osparc-simcore/issues/4628 + + # ANE: it is important to have fast connection handshakes + # also requests should be as fast as possible + # some services are not that fast to reply + timeout_settings = ClientTimeout( + total=get_http_client_request_total_timeout(), + connect=get_http_client_request_aiohttp_connect_timeout(), + sock_connect=get_http_client_request_aiohttp_sock_connect_timeout(), + ) + session = ClientSession( + timeout=timeout_settings, + json_serialize=json_dumps, + ) + app.state.aiohttp_client_session = session + + async def on_shutdown() -> None: + session = app.state.aiohttp_client_session + assert isinstance(session, ClientSession) # nosec + await session.close() + + app.add_event_handler("startup", on_startup) + app.add_event_handler("shutdown", on_shutdown) + + +def get_client_session(app: FastAPI) -> ClientSession: + session = app.state.aiohttp_client_session + assert isinstance(session, ClientSession) # nosec + return session diff --git a/services/director/src/simcore_service_director/config.py b/services/director/src/simcore_service_director/config.py deleted file mode 100644 index 67a15cb05ac..00000000000 --- a/services/director/src/simcore_service_director/config.py +++ /dev/null @@ -1,170 +0,0 @@ -"""Director service configuration -""" - -import json -import logging -import os -import warnings -from distutils.util import strtobool -from typing import Dict, Optional - -from servicelib.client_session import ( # pylint: disable=no-name-in-module - APP_CLIENT_SESSION_KEY, -) - -LOGLEVEL_STR = os.environ.get("LOGLEVEL", "WARNING").upper() -log_level = getattr(logging, LOGLEVEL_STR) -logging.basicConfig( - level=log_level, - format="%(levelname)s:%(name)s-%(lineno)d: %(message)s", -) -logging.root.setLevel(log_level) - -# TODO: debug mode is define by the LOG-LEVEL and not the other way around. I leave it like that for the moment ... -DEBUG_MODE = log_level == logging.DEBUG - -API_VERSION: str = "v0" -API_ROOT: str = "api" - - -def _from_env_with_default(env: str, python_type, default): - env_value = python_type(os.environ.get(env, default)) - - return default if env_value <= 0 else env_value - - -# NOTE: these settings must be in sync with settings-library: comp_services.py (since the director is frozen) -DEFAULT_MAX_NANO_CPUS: int = _from_env_with_default( - "DEFAULT_MAX_NANO_CPUS", int, 1 * pow(10, 9) -) -DEFAULT_MAX_MEMORY: int = _from_env_with_default( - "DEFAULT_MAX_MEMORY", int, 2 * pow(1024, 3) -) # 2 GiB - -SERVICE_RUNTIME_SETTINGS: str = "simcore.service.settings" -SERVICE_REVERSE_PROXY_SETTINGS: str = "simcore.service.reverse-proxy-settings" -SERVICE_RUNTIME_BOOTSETTINGS: str = "simcore.service.bootsettings" - -ORG_LABELS_TO_SCHEMA_LABELS = { - "org.label-schema.build-date": "build_date", - "org.label-schema.vcs-ref": "vcs_ref", - "org.label-schema.vcs-url": "vcs_url", -} - -DIRECTOR_REGISTRY_CACHING: bool = strtobool( - os.environ.get("DIRECTOR_REGISTRY_CACHING", "True") -) -DIRECTOR_REGISTRY_CACHING_TTL: int = int( - os.environ.get("DIRECTOR_REGISTRY_CACHING_TTL", 15 * 60) -) - -DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS: str = os.environ.get( - "DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS", "" -) - - -def _parse_placement_substitutions() -> Dict[str, str]: - str_env_var: str = os.environ.get( - "DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS", "{}" - ) - result: Dict[str, str] = json.loads(str_env_var) - - if len(result) > 0: - warnings.warn( # noqa: B028 - "Generic resources will be replaced by the following " - f"placement constraints {result}. This is a workaround " - "for https://github.com/moby/swarmkit/pull/3162", - UserWarning, - ) - if len(result) != len(set(result.values())): - msg = f"Dictionary values must be unique, provided: {result}" - raise ValueError(msg) - - return result - - -DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: Dict[ - str, str -] = _parse_placement_substitutions() - -# for passing self-signed certificate to spawned services -DIRECTOR_SELF_SIGNED_SSL_SECRET_ID: str = os.environ.get( - "DIRECTOR_SELF_SIGNED_SSL_SECRET_ID", "" -) -DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME: str = os.environ.get( - "DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME", "" -) -DIRECTOR_SELF_SIGNED_SSL_FILENAME: str = os.environ.get( - "DIRECTOR_SELF_SIGNED_SSL_FILENAME", "" -) - -DIRECTOR_SERVICES_RESTART_POLICY_MAX_ATTEMPTS: int = int( - os.environ.get("DIRECTOR_SERVICES_RESTART_POLICY_MAX_ATTEMPTS", 10) -) -DIRECTOR_SERVICES_RESTART_POLICY_DELAY_S: int = int( - os.environ.get("DIRECTOR_SERVICES_RESTART_POLICY_DELAY_S", 12) -) - -DIRECTOR_SERVICES_STATE_MONITOR_S: int = int( - os.environ.get("DIRECTOR_SERVICES_STATE_MONITOR_S", 8) -) - -TRAEFIK_SIMCORE_ZONE: str = os.environ.get( - "TRAEFIK_SIMCORE_ZONE", "internal_simcore_stack" -) -APP_REGISTRY_CACHE_DATA_KEY: str = __name__ + "_registry_cache_data" - -REGISTRY_AUTH: bool = strtobool(os.environ.get("REGISTRY_AUTH", "False")) -REGISTRY_USER: str = os.environ.get("REGISTRY_USER", "") -REGISTRY_PW: str = os.environ.get("REGISTRY_PW", "") -REGISTRY_URL: str = os.environ.get("REGISTRY_URL", "") -REGISTRY_PATH: str = os.environ.get("REGISTRY_PATH", None) or os.environ.get( - "REGISTRY_URL", "" -) # This is useful in case of a local registry, where the registry url (path) is relative to the host docker engine -REGISTRY_SSL: bool = strtobool(os.environ.get("REGISTRY_SSL", "True")) - -EXTRA_HOSTS_SUFFIX: str = os.environ.get("EXTRA_HOSTS_SUFFIX", "undefined") - -# these are the envs passed to the dynamic services by default -SERVICES_DEFAULT_ENVS: Dict[str, str] = { - "POSTGRES_ENDPOINT": os.environ.get( - "POSTGRES_ENDPOINT", "undefined postgres endpoint" - ), - "POSTGRES_USER": os.environ.get("POSTGRES_USER", "undefined postgres user"), - "POSTGRES_PASSWORD": os.environ.get( - "POSTGRES_PASSWORD", "undefined postgres password" - ), - "POSTGRES_DB": os.environ.get("POSTGRES_DB", "undefined postgres db"), - "STORAGE_ENDPOINT": os.environ.get( - "STORAGE_ENDPOINT", "undefined storage endpoint" - ), -} - -# some services need to know the published host to be functional (paraview) -# TODO: please review if needed -PUBLISHED_HOST_NAME: str = os.environ.get("PUBLISHED_HOST_NAME", "") - -SWARM_STACK_NAME: str = os.environ.get("SWARM_STACK_NAME", "undefined-please-check") - -# used when in devel mode vs release mode -NODE_SCHEMA_LOCATION: str = os.environ.get( - "NODE_SCHEMA_LOCATION", f"{API_ROOT}/{API_VERSION}/schemas/node-meta-v0.0.1.json" -) -# used to find the right network name -SIMCORE_SERVICES_NETWORK_NAME: Optional[str] = os.environ.get( - "SIMCORE_SERVICES_NETWORK_NAME" -) -# useful when developing with an alternative registry namespace -SIMCORE_SERVICES_PREFIX: str = os.environ.get( - "SIMCORE_SERVICES_PREFIX", "simcore/services" -) - -# monitoring -# NOTE: keep disabled for unit-testing otherwise mocks will not hold -MONITORING_ENABLED: bool = strtobool(os.environ.get("MONITORING_ENABLED", "False")) - -# resources: not taken from servicelib.resources since the director uses a fixed hash of that library -CPU_RESOURCE_LIMIT_KEY = "SIMCORE_NANO_CPUS_LIMIT" -MEM_RESOURCE_LIMIT_KEY = "SIMCORE_MEMORY_BYTES_LIMIT" - -__all__ = ["APP_CLIENT_SESSION_KEY"] diff --git a/services/director/src/simcore_service_director/constants.py b/services/director/src/simcore_service_director/constants.py new file mode 100644 index 00000000000..bb11b71cec9 --- /dev/null +++ b/services/director/src/simcore_service_director/constants.py @@ -0,0 +1,23 @@ +from typing import Final + +SERVICE_RUNTIME_SETTINGS: Final[str] = "simcore.service.settings" +SERVICE_REVERSE_PROXY_SETTINGS: Final[str] = "simcore.service.reverse-proxy-settings" +SERVICE_RUNTIME_BOOTSETTINGS: Final[str] = "simcore.service.bootsettings" + +ORG_LABELS_TO_SCHEMA_LABELS: Final[dict[str, str]] = { + "org.label-schema.build-date": "build_date", + "org.label-schema.vcs-ref": "vcs_ref", + "org.label-schema.vcs-url": "vcs_url", +} + + +CPU_RESOURCE_LIMIT_KEY: Final[str] = "SIMCORE_NANO_CPUS_LIMIT" +MEM_RESOURCE_LIMIT_KEY: Final[str] = "SIMCORE_MEMORY_BYTES_LIMIT" + +APP_REGISTRY_CACHE_DATA_KEY: Final[str] = __name__ + "_registry_cache_data" + +API_ROOT: Final[str] = "api" + +DIRECTOR_SIMCORE_SERVICES_PREFIX: Final[str] = "simcore/services" + +DATETIME_FORMAT: Final[str] = "%Y-%m-%dT%H:%M:%S.%f" diff --git a/services/director/src/simcore_service_director/core/__init__.py b/services/director/src/simcore_service_director/core/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/director/src/simcore_service_director/core/application.py b/services/director/src/simcore_service_director/core/application.py new file mode 100644 index 00000000000..10fb32b5518 --- /dev/null +++ b/services/director/src/simcore_service_director/core/application.py @@ -0,0 +1,75 @@ +import logging +from typing import Final + +from fastapi import FastAPI +from servicelib.async_utils import cancel_sequential_workers +from servicelib.fastapi.tracing import setup_tracing + +from .._meta import ( + API_VERSION, + API_VTAG, + APP_FINISHED_BANNER_MSG, + APP_NAME, + APP_STARTED_BANNER_MSG, +) +from ..api.rest.routes import setup_api_routes +from ..client_session import setup_client_session +from ..instrumentation import setup as setup_instrumentation +from ..registry_proxy import setup as setup_registry +from .settings import ApplicationSettings + +_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR +_NOISY_LOGGERS: Final[tuple[str]] = ("werkzeug",) + +_logger = logging.getLogger(__name__) + + +def create_app(settings: ApplicationSettings) -> FastAPI: + # keep mostly quiet noisy loggers + quiet_level: int = max( + min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING + ) + for name in _NOISY_LOGGERS: + logging.getLogger(name).setLevel(quiet_level) + + _logger.info("app settings: %s", settings.json(indent=1)) + + app = FastAPI( + debug=settings.DIRECTOR_DEBUG, + title=APP_NAME, + description="Director-v0 service", + version=API_VERSION, + openapi_url=f"/api/{API_VTAG}/openapi.json", + docs_url="/dev/doc", + redoc_url=None, # default disabled + ) + # STATE + app.state.settings = settings + assert app.state.settings.API_VERSION == API_VERSION # nosec + + # PLUGINS SETUP + setup_api_routes(app) + + if app.state.settings.DIRECTOR_TRACING: + setup_tracing(app, app.state.settings.DIRECTOR_TRACING, APP_NAME) + + # replace by httpx client + setup_client_session(app) + setup_registry(app) + + setup_instrumentation(app) + + # ERROR HANDLERS + + # EVENTS + async def _on_startup() -> None: + print(APP_STARTED_BANNER_MSG, flush=True) # noqa: T201 + + async def _on_shutdown() -> None: + await cancel_sequential_workers() + print(APP_FINISHED_BANNER_MSG, flush=True) # noqa: T201 + + app.add_event_handler("startup", _on_startup) + app.add_event_handler("shutdown", _on_shutdown) + + return app diff --git a/services/director/src/simcore_service_director/core/errors.py b/services/director/src/simcore_service_director/core/errors.py new file mode 100644 index 00000000000..ebbf885451b --- /dev/null +++ b/services/director/src/simcore_service_director/core/errors.py @@ -0,0 +1,42 @@ +from typing import Any + +from models_library.errors_classes import OsparcErrorMixin + + +class DirectorRuntimeError(OsparcErrorMixin, RuntimeError): + def __init__(self, **ctx: Any) -> None: + super().__init__(**ctx) + + msg_template: str = "Director-v0 unexpected error: {msg}" + + +class ConfigurationError(DirectorRuntimeError): + msg_template: str = "Application misconfiguration: {msg}" + + +class GenericDockerError(DirectorRuntimeError): + msg_template: str = "Docker error: {err}" + + +class ServiceNotAvailableError(DirectorRuntimeError): + msg_template: str = "Service {service_name}:{service_tag} is not available" + + +class ServiceUUIDNotFoundError(DirectorRuntimeError): + msg_template: str = "Service with uuid {service_uuid} was not found" + + +class ServiceUUIDInUseError(DirectorRuntimeError): + msg_template: str = "Service with uuid {service_uuid} is already in use" + + +class ServiceStateSaveError(DirectorRuntimeError): + msg_template: str = "Failed to save state of service {service_uuid}: {reason}" + + +class RegistryConnectionError(DirectorRuntimeError): + msg_template: str = "Unexpected connection error while accessing registry: {msg}" + + +class ServiceStartTimeoutError(DirectorRuntimeError): + msg_template: str = "Service {service_name}:{service_uuid} failed to start in time" diff --git a/services/director/src/simcore_service_director/core/settings.py b/services/director/src/simcore_service_director/core/settings.py new file mode 100644 index 00000000000..e0a352aed82 --- /dev/null +++ b/services/director/src/simcore_service_director/core/settings.py @@ -0,0 +1,120 @@ +import datetime +import warnings +from typing import cast + +from fastapi import FastAPI +from models_library.basic_types import LogLevel, PortInt, VersionTag +from pydantic import Field, NonNegativeInt, validator +from servicelib.logging_utils_filtering import LoggerName, MessageSubstring +from settings_library.application import BaseApplicationSettings +from settings_library.docker_registry import RegistrySettings +from settings_library.postgres import PostgresSettings +from settings_library.tracing import TracingSettings +from settings_library.utils_logging import MixinLoggingSettings + +from .._meta import API_VERSION, API_VTAG, APP_NAME + + +class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): + API_VERSION: str = API_VERSION + APP_NAME: str = APP_NAME + API_VTAG: VersionTag = API_VTAG + + DIRECTOR_DEBUG: bool = Field( + default=False, description="Debug mode", env=["DIRECTOR_DEBUG", "DEBUG"] + ) + DIRECTOR_REMOTE_DEBUG_PORT: PortInt = PortInt(3000) + + DIRECTOR_LOGLEVEL: LogLevel = Field( + ..., env=["DIRECTOR_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"] + ) + DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( + ..., + env=[ + "DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED", + "LOG_FORMAT_LOCAL_DEV_ENABLED", + ], + description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", + ) + DIRECTOR_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field( + default_factory=dict, + env=["DIRECTOR_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING"], + description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", + ) + DIRECTOR_TRACING: TracingSettings | None = Field( + auto_default_from_env=True, description="settings for opentelemetry tracing" + ) + + DIRECTOR_DEFAULT_MAX_NANO_CPUS: NonNegativeInt = Field(default=0) + DIRECTOR_DEFAULT_MAX_MEMORY: NonNegativeInt = Field(default=0) + DIRECTOR_REGISTRY_CACHING: bool = Field( + ..., description="cache the docker registry internally" + ) + DIRECTOR_REGISTRY_CACHING_TTL: datetime.timedelta = Field( + ..., description="cache time to live value (defaults to 15 minutes)" + ) + + DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS: str | None + + DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: dict[str, str] + + DIRECTOR_SERVICES_RESTART_POLICY_MAX_ATTEMPTS: int = 10 + DIRECTOR_SERVICES_RESTART_POLICY_DELAY_S: int = 12 + DIRECTOR_SERVICES_STATE_MONITOR_S: int = 8 + + DIRECTOR_TRAEFIK_SIMCORE_ZONE: str = Field( + ..., + env=["DIRECTOR_TRAEFIK_SIMCORE_ZONE", "TRAEFIK_SIMCORE_ZONE"], + ) + + DIRECTOR_REGISTRY: RegistrySettings = Field( + auto_default_from_env=True, + description="settings for the private registry deployed with the platform", + ) + + DIRECTOR_POSTGRES: PostgresSettings = Field(auto_default_from_env=True) + STORAGE_ENDPOINT: str = Field(..., description="storage endpoint without scheme") + + DIRECTOR_PUBLISHED_HOST_NAME: str = Field( + ..., env=["DIRECTOR_PUBLISHED_HOST_NAME", "PUBLISHED_HOST_NAME"] + ) + + DIRECTOR_SWARM_STACK_NAME: str = Field( + ..., + env=["DIRECTOR_SWARM_STACK_NAME", "SWARM_STACK_NAME"], + ) + + DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME: str | None = Field( + # used to find the right network name + ..., + env=["DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME", "SIMCORE_SERVICES_NETWORK_NAME"], + ) + + DIRECTOR_MONITORING_ENABLED: bool = Field( + ..., env=["DIRECTOR_MONITORING_ENABLED", "MONITORING_ENABLED"] + ) + + @validator("DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS") + @classmethod + def _validate_substitutions(cls, v): + if v: + warnings.warn( # noqa: B028 + "Generic resources will be replaced by the following " + f"placement constraints {v}. This is a workaround " + "for https://github.com/moby/swarmkit/pull/3162", + UserWarning, + ) + if len(v) != len(set(v.values())): + msg = f"Dictionary values must be unique, provided: {v}" + raise ValueError(msg) + + return v + + @validator("DIRECTOR_LOGLEVEL", pre=True) + @classmethod + def _valid_log_level(cls, value: str) -> str: + return cls.validate_log_level(value) + + +def get_application_settings(app: FastAPI) -> ApplicationSettings: + return cast(ApplicationSettings, app.state.settings) diff --git a/services/director/src/simcore_service_director/docker_utils.py b/services/director/src/simcore_service_director/docker_utils.py index 56dfba1bc3a..7c1a832141a 100644 --- a/services/director/src/simcore_service_director/docker_utils.py +++ b/services/director/src/simcore_service_director/docker_utils.py @@ -1,40 +1,37 @@ import logging +from collections.abc import AsyncIterator +from contextlib import asynccontextmanager import aiodocker -from asyncio_extras import async_contextmanager -log = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) -@async_contextmanager -async def docker_client() -> aiodocker.docker.Docker: +@asynccontextmanager +async def docker_client() -> AsyncIterator[aiodocker.docker.Docker]: try: client = aiodocker.Docker() yield client except aiodocker.exceptions.DockerError: - log.exception(msg="Unexpected error with docker client") + _logger.exception(msg="Unexpected error with docker client") raise finally: await client.close() async def swarm_get_number_nodes() -> int: - async with docker_client() as client: # pylint: disable=not-async-context-manager + async with docker_client() as client: nodes = await client.nodes.list() return len(nodes) async def swarm_has_manager_nodes() -> bool: - async with docker_client() as client: # pylint: disable=not-async-context-manager + async with docker_client() as client: nodes = await client.nodes.list(filters={"role": "manager"}) - if nodes: - return True - return False + return bool(nodes) async def swarm_has_worker_nodes() -> bool: - async with docker_client() as client: # pylint: disable=not-async-context-manager + async with docker_client() as client: nodes = await client.nodes.list(filters={"role": "worker"}) - if nodes: - return True - return False + return bool(nodes) diff --git a/services/director/src/simcore_service_director/exceptions.py b/services/director/src/simcore_service_director/exceptions.py deleted file mode 100644 index cdb25145cb2..00000000000 --- a/services/director/src/simcore_service_director/exceptions.py +++ /dev/null @@ -1,87 +0,0 @@ -""" Defines the different exceptions that may arise in the director - - -TODO: Exceptions should provide all info to create Error instances of the API model -For instance, assume there is a ficticious exception class FieldValidationError, then it would -translate into something like - -// response - 422 -{ - "error": { - "status": 422, - "error": "FIELDS_VALIDATION_ERROR", - "description": "One or more fields raised validation errors." - "fields": { - "email": "Invalid email address.", - "password": "Password too short." - } - } -} -""" - -from typing import Optional - -from aiodocker.exceptions import DockerError - - -class DirectorException(Exception): - """Basic exception""" - - def __init__(self, msg: Optional[str] = None): - super().__init__(msg or "Unexpected error was triggered") - - -class GenericDockerError(DirectorException): - """Generic docker library error""" - - def __init__(self, msg: str, original_exception: DockerError): - super().__init__(msg + f": {original_exception.message}") - self.original_exception = original_exception - - -class ServiceNotAvailableError(DirectorException): - """Service not found""" - - def __init__(self, service_name: str, service_tag: Optional[str] = None): - service_tag = service_tag or "UNDEFINED" - super().__init__(f"The service {service_name}:{service_tag} does not exist") - self.service_name = service_name - self.service_tag = service_tag - - -class ServiceUUIDNotFoundError(DirectorException): - """Service not found""" - - def __init__(self, service_uuid: str): - super().__init__(f"The service with uuid {service_uuid} was not found") - self.service_uuid = service_uuid - - -class ServiceUUIDInUseError(DirectorException): - """Service UUID is already in use""" - - def __init__(self, service_uuid: str): - super().__init__(f"The service uuid {service_uuid} is already in use") - self.service_uuid = service_uuid - - -class ServiceStateSaveError(DirectorException): - def __init__(self, service_uuid: str, reason: str): - super().__init__(f"Failed to save state of service {service_uuid}: {reason}") - self.service_uuid = service_uuid - - -class RegistryConnectionError(DirectorException): - """Error while connecting to the docker regitry""" - - def __init__(self, msg: str): - super().__init__(msg or "Unexpected connection error while accessing registry") - - -class ServiceStartTimeoutError(DirectorException): - """The service was created but never run (time-out)""" - - def __init__(self, service_name: str, service_uuid: str): - super().__init__(f"Service {service_name}:{service_uuid} failed to start ") - self.service_name = service_name - self.service_uuid = service_uuid diff --git a/services/director/src/simcore_service_director/instrumentation.py b/services/director/src/simcore_service_director/instrumentation.py new file mode 100644 index 00000000000..cb63d5f35f5 --- /dev/null +++ b/services/director/src/simcore_service_director/instrumentation.py @@ -0,0 +1,86 @@ +from dataclasses import dataclass, field +from typing import cast + +from fastapi import FastAPI +from prometheus_client import CollectorRegistry, Counter +from servicelib.fastapi.prometheus_instrumentation import ( + setup_prometheus_instrumentation, +) +from servicelib.instrumentation import MetricsBase, get_metrics_namespace + +from ._meta import APP_NAME +from .core.errors import ConfigurationError +from .core.settings import get_application_settings + +MONITOR_SERVICE_STARTED_LABELS: list[str] = [ + "service_key", + "service_tag", + "simcore_user_agent", +] + +MONITOR_SERVICE_STOPPED_LABELS: list[str] = [ + "service_key", + "service_tag", + "result", + "simcore_user_agent", +] + + +@dataclass(slots=True, kw_only=True) +class DirectorV0Instrumentation(MetricsBase): + registry: CollectorRegistry + + services_started: Counter = field(init=False) + services_stopped: Counter = field(init=False) + + def __post_init__(self) -> None: + self.services_started = Counter( + name="services_started_total", + documentation="Counts the services started", + labelnames=MONITOR_SERVICE_STARTED_LABELS, + namespace=get_metrics_namespace(APP_NAME), + subsystem=self.subsystem, + registry=self.registry, + ) + + self.services_stopped = Counter( + name="services_stopped_total", + documentation="Counts the services stopped", + labelnames=MONITOR_SERVICE_STOPPED_LABELS, + namespace=get_metrics_namespace(APP_NAME), + subsystem=self.subsystem, + registry=self.registry, + ) + + +def setup(app: FastAPI) -> None: + app_settings = get_application_settings(app) + if not app_settings.DIRECTOR_MONITORING_ENABLED: + return + + # NOTE: this must be setup before application startup + instrumentator = setup_prometheus_instrumentation(app) + + async def on_startup() -> None: + metrics_subsystem = "" + app.state.instrumentation = DirectorV0Instrumentation( + registry=instrumentator.registry, subsystem=metrics_subsystem + ) + + async def on_shutdown() -> None: + ... + + app.add_event_handler("startup", on_startup) + app.add_event_handler("shutdown", on_shutdown) + + +def get_instrumentation(app: FastAPI) -> DirectorV0Instrumentation: + if not app.state.instrumentation: + raise ConfigurationError( + msg="Instrumentation not setup. Please check the configuration." + ) + return cast(DirectorV0Instrumentation, app.state.instrumentation) + + +def has_instrumentation(app: FastAPI) -> bool: + return hasattr(app.state, "instrumentation") diff --git a/services/director/src/simcore_service_director/main.py b/services/director/src/simcore_service_director/main.py index 0bf6edccc57..02636e753bf 100644 --- a/services/director/src/simcore_service_director/main.py +++ b/services/director/src/simcore_service_director/main.py @@ -1,42 +1,23 @@ -#!/usr/bin/env python3 -import logging - -from aiohttp import web - -# NOTE: notice that servicelib is frozen to c8669fb52659b684514fefa4f3b4599f57f276a0 -# pylint: disable=no-name-in-module -from servicelib.client_session import persistent_client_session -from simcore_service_director import registry_cache_task, resources -from simcore_service_director.monitoring import setup_app_monitoring -from simcore_service_director.rest import routing - -from .registry_proxy import setup_registry - -log = logging.getLogger(__name__) - +"""Main application to be deployed by uvicorn (or equivalent) server -def setup_app() -> web.Application: - api_spec_path = resources.get_path(resources.RESOURCE_OPEN_API) - app = routing.create_web_app(api_spec_path.parent, api_spec_path.name) +""" - # NOTE: ensure client session is context is run first, then any further get_client_sesions will be correctly closed - app.cleanup_ctx.append(persistent_client_session) - app.cleanup_ctx.append(setup_registry) - - registry_cache_task.setup(app) - - setup_app_monitoring(app, "simcore_service_director") - - # NOTE: removed tracing from director. Users old version of servicelib and - # in any case this service will be completely replaced - - return app +import logging +from fastapi import FastAPI +from servicelib.logging_utils import config_all_loggers +from simcore_service_director.core.application import create_app +from simcore_service_director.core.settings import ApplicationSettings -def main() -> None: - app = setup_app() - web.run_app(app, port=8080) +_the_settings = ApplicationSettings.create_from_envs() +# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 +logging.basicConfig(level=_the_settings.DIRECTOR_LOGLEVEL) +logging.root.setLevel(_the_settings.DIRECTOR_LOGLEVEL) +config_all_loggers( + log_format_local_dev_enabled=_the_settings.DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=_the_settings.DIRECTOR_LOG_FILTER_MAPPING, +) -if __name__ == "__main__": - main() +# SINGLETON FastAPI app +the_app: FastAPI = create_app(_the_settings) diff --git a/services/director/src/simcore_service_director/models/__init__.py b/services/director/src/simcore_service_director/models/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/director/src/simcore_service_director/monitoring.py b/services/director/src/simcore_service_director/monitoring.py deleted file mode 100644 index 403e4ef10ad..00000000000 --- a/services/director/src/simcore_service_director/monitoring.py +++ /dev/null @@ -1,32 +0,0 @@ -import prometheus_client -from aiohttp import web -from prometheus_client import CONTENT_TYPE_LATEST -from prometheus_client.registry import CollectorRegistry - - -from servicelib.monitor_services import ( # pylint: disable=no-name-in-module - add_instrumentation as add_services_instrumentation, -) - -from . import config - -kCOLLECTOR_REGISTRY = f"{__name__}.collector_registry" - - -async def metrics_handler(request: web.Request): - # TODO: prometheus_client.generate_latest blocking! -> Consider https://github.com/claws/aioprometheus - reg = request.app[kCOLLECTOR_REGISTRY] - resp = web.Response(body=prometheus_client.generate_latest(registry=reg)) - resp.content_type = CONTENT_TYPE_LATEST - return resp - - -def setup_app_monitoring(app: web.Application, app_name: str) -> None: - if not config.MONITORING_ENABLED: - return - # app-scope registry - app[kCOLLECTOR_REGISTRY] = reg = CollectorRegistry(auto_describe=True) - - add_services_instrumentation(app, reg, app_name) - - app.router.add_get("/metrics", metrics_handler) diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index b74da40c913..ff791a4066f 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -1,16 +1,18 @@ import asyncio +import contextlib import json import logging import re -from datetime import datetime, timedelta -from distutils.version import StrictVersion +from datetime import timedelta from enum import Enum from http import HTTPStatus from pprint import pformat -from typing import Dict, List, Optional, Set, Tuple +from typing import Any, Final, cast import aiodocker +import aiodocker.networks import aiohttp +import arrow import tenacity from aiohttp import ( ClientConnectionError, @@ -18,27 +20,41 @@ ClientResponse, ClientResponseError, ClientSession, - web, + ClientTimeout, ) +from fastapi import FastAPI +from packaging.version import Version from servicelib.async_utils import run_sequentially_in_context -from servicelib.monitor_services import service_started, service_stopped +from servicelib.docker_utils import to_datetime +from settings_library.docker_registry import RegistrySettings from tenacity import retry from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_attempt from tenacity.wait import wait_fixed -from . import config, docker_utils, exceptions, registry_proxy -from .config import ( - APP_CLIENT_SESSION_KEY, +from . import docker_utils, registry_proxy +from .client_session import get_client_session +from .constants import ( CPU_RESOURCE_LIMIT_KEY, MEM_RESOURCE_LIMIT_KEY, + SERVICE_REVERSE_PROXY_SETTINGS, + SERVICE_RUNTIME_BOOTSETTINGS, + SERVICE_RUNTIME_SETTINGS, ) -from .exceptions import ServiceStateSaveError +from .core.errors import ( + DirectorRuntimeError, + GenericDockerError, + ServiceNotAvailableError, + ServiceStartTimeoutError, + ServiceStateSaveError, + ServiceUUIDInUseError, + ServiceUUIDNotFoundError, +) +from .core.settings import ApplicationSettings, get_application_settings +from .instrumentation import get_instrumentation from .services_common import ServicesCommonSettings -from .system_utils import get_system_extra_hosts_raw -from .utils import parse_as_datetime -log = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) class ServiceState(Enum): @@ -50,14 +66,17 @@ class ServiceState(Enum): FAILED = "failed" -async def _create_auth() -> Dict[str, str]: - return {"username": config.REGISTRY_USER, "password": config.REGISTRY_PW} +async def _create_auth(registry_settings: RegistrySettings) -> dict[str, str]: + return { + "username": registry_settings.REGISTRY_USER, + "password": registry_settings.REGISTRY_PW.get_secret_value(), + } async def _check_node_uuid_available( client: aiodocker.docker.Docker, node_uuid: str ) -> None: - log.debug("Checked if UUID %s is already in use", node_uuid) + _logger.debug("Checked if UUID %s is already in use", node_uuid) # check if service with same uuid already exists try: # not filtering by "swarm_stack_name" label because it's safer @@ -66,25 +85,24 @@ async def _check_node_uuid_available( "label": f"{_to_simcore_runtime_docker_label_key('node_id')}={node_uuid}" } ) - except aiodocker.exceptions.DockerError as err: - log.exception("Error while retrieving services list") - raise exceptions.GenericDockerError( - "Error while retrieving services", err - ) from err + except aiodocker.DockerError as err: + msg = "Error while retrieving services" + raise GenericDockerError(err=msg) from err if list_of_running_services_w_uuid: - raise exceptions.ServiceUUIDInUseError(node_uuid) - log.debug("UUID %s is free", node_uuid) + raise ServiceUUIDInUseError(service_uuid=node_uuid) + _logger.debug("UUID %s is free", node_uuid) -def _check_setting_correctness(setting: Dict) -> None: +def _check_setting_correctness(setting: dict) -> None: if "name" not in setting or "type" not in setting or "value" not in setting: - raise exceptions.DirectorException("Invalid setting in %s" % setting) + msg = f"Invalid setting in {setting}" + raise DirectorRuntimeError(msg=msg) -def _parse_mount_settings(settings: List[Dict]) -> List[Dict]: +def _parse_mount_settings(settings: list[dict]) -> list[dict]: mounts = [] for s in settings: - log.debug("Retrieved mount settings %s", s) + _logger.debug("Retrieved mount settings %s", s) mount = {} mount["ReadOnly"] = True if "ReadOnly" in s and s["ReadOnly"] in ["false", "False", False]: @@ -94,40 +112,45 @@ def _parse_mount_settings(settings: List[Dict]) -> List[Dict]: if field in s: mount[field] = s[field] else: - log.warning( + _logger.warning( "Mount settings have wrong format. Required keys [Source, Target, Type]" ) continue - log.debug("Append mount settings %s", mount) + _logger.debug("Append mount settings %s", mount) mounts.append(mount) return mounts -def _parse_env_settings(settings: List[str]) -> Dict: +_ENV_NUM_ELEMENTS: Final[int] = 2 + + +def _parse_env_settings(settings: list[str]) -> dict: envs = {} for s in settings: - log.debug("Retrieved env settings %s", s) + _logger.debug("Retrieved env settings %s", s) if "=" in s: parts = s.split("=") - if len(parts) == 2: + if len(parts) == _ENV_NUM_ELEMENTS: envs.update({parts[0]: parts[1]}) - log.debug("Parsed env settings %s", s) + _logger.debug("Parsed env settings %s", s) return envs async def _read_service_settings( - app: web.Application, key: str, tag: str, settings_name: str -) -> Dict: + app: FastAPI, key: str, tag: str, settings_name: str +) -> dict[str, Any] | list[Any] | None: image_labels, _ = await registry_proxy.get_image_labels(app, key, tag) - settings = ( - json.loads(image_labels[settings_name]) if settings_name in image_labels else {} + settings: dict[str, Any] | list[Any] | None = ( + json.loads(image_labels[settings_name]) + if settings_name in image_labels + else None ) - log.debug("Retrieved %s settings: %s", settings_name, pformat(settings)) + _logger.debug("Retrieved %s settings: %s", settings_name, pformat(settings)) return settings @@ -140,7 +163,8 @@ def _to_simcore_runtime_docker_label_key(key: str) -> str: # pylint: disable=too-many-branches async def _create_docker_service_params( - app: web.Application, + app: FastAPI, + *, client: aiodocker.docker.Docker, service_key: str, service_tag: str, @@ -149,29 +173,37 @@ async def _create_docker_service_params( node_uuid: str, project_id: str, node_base_path: str, - internal_network_id: Optional[str], + internal_network_id: str | None, request_simcore_user_agent: str, -) -> Dict: +) -> dict: # pylint: disable=too-many-statements + app_settings = get_application_settings(app) + service_parameters_labels = await _read_service_settings( - app, service_key, service_tag, config.SERVICE_RUNTIME_SETTINGS + app, service_key, service_tag, SERVICE_RUNTIME_SETTINGS ) reverse_proxy_settings = await _read_service_settings( - app, service_key, service_tag, config.SERVICE_REVERSE_PROXY_SETTINGS + app, service_key, service_tag, SERVICE_REVERSE_PROXY_SETTINGS ) service_name = registry_proxy.get_service_last_names(service_key) + "_" + node_uuid - log.debug("Converting labels to docker runtime parameters") - container_spec = { - "Image": f"{config.REGISTRY_PATH}/{service_key}:{service_tag}", + _logger.debug("Converting labels to docker runtime parameters") + service_default_envs = { + "POSTGRES_ENDPOINT": app_settings.DIRECTOR_POSTGRES.dsn, + "POSTGRES_USER": app_settings.DIRECTOR_POSTGRES.POSTGRES_USER, + "POSTGRES_PASSWORD": app_settings.DIRECTOR_POSTGRES.POSTGRES_PASSWORD.get_secret_value(), + "POSTGRES_DB": app_settings.DIRECTOR_POSTGRES.POSTGRES_DB, + "STORAGE_ENDPOINT": app_settings.STORAGE_ENDPOINT, + } + container_spec: dict[str, Any] = { + "Image": f"{app_settings.DIRECTOR_REGISTRY.resolved_registry_url}/{service_key}:{service_tag}", "Env": { - **config.SERVICES_DEFAULT_ENVS, + **service_default_envs, "SIMCORE_USER_ID": user_id, "SIMCORE_NODE_UUID": node_uuid, "SIMCORE_PROJECT_ID": project_id, "SIMCORE_NODE_BASEPATH": node_base_path or "", "SIMCORE_HOST_NAME": service_name, }, - "Hosts": get_system_extra_hosts_raw(config.EXTRA_HOSTS_SUFFIX), "Init": True, "Labels": { _to_simcore_runtime_docker_label_key("user_id"): user_id, @@ -179,7 +211,7 @@ async def _create_docker_service_params( _to_simcore_runtime_docker_label_key("node_id"): node_uuid, _to_simcore_runtime_docker_label_key( "swarm_stack_name" - ): config.SWARM_STACK_NAME, + ): app_settings.DIRECTOR_SWARM_STACK_NAME, _to_simcore_runtime_docker_label_key( "simcore_user_agent" ): request_simcore_user_agent, @@ -192,53 +224,42 @@ async def _create_docker_service_params( "Mounts": [], } - if ( - config.DIRECTOR_SELF_SIGNED_SSL_FILENAME - and config.DIRECTOR_SELF_SIGNED_SSL_SECRET_ID - and config.DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME - ): - # Note: this is useful for S3 client in case of self signed certificate - container_spec["Env"][ - "SSL_CERT_FILE" - ] = config.DIRECTOR_SELF_SIGNED_SSL_FILENAME - container_spec["Secrets"] = [ - { - "SecretID": config.DIRECTOR_SELF_SIGNED_SSL_SECRET_ID, - "SecretName": config.DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME, - "File": { - "Name": config.DIRECTOR_SELF_SIGNED_SSL_FILENAME, - "Mode": 444, - "UID": "0", - "GID": "0", - }, - } - ] - # SEE https://docs.docker.com/engine/api/v1.41/#operation/ServiceCreate - docker_params = { - "auth": await _create_auth() if config.REGISTRY_AUTH else {}, - "registry": config.REGISTRY_PATH if config.REGISTRY_AUTH else "", + docker_params: dict[str, Any] = { + "auth": ( + await _create_auth(app_settings.DIRECTOR_REGISTRY) + if app_settings.DIRECTOR_REGISTRY.REGISTRY_AUTH + else {} + ), + "registry": ( + app_settings.DIRECTOR_REGISTRY.resolved_registry_url + if app_settings.DIRECTOR_REGISTRY.REGISTRY_AUTH + else "" + ), "name": service_name, "task_template": { "ContainerSpec": container_spec, "Placement": { - "Constraints": ["node.role==worker"] - if await docker_utils.swarm_has_worker_nodes() - else [] + "Constraints": ( + ["node.role==worker"] + if await docker_utils.swarm_has_worker_nodes() + else [] + ) }, "RestartPolicy": { "Condition": "on-failure", - "Delay": config.DIRECTOR_SERVICES_RESTART_POLICY_DELAY_S * pow(10, 6), - "MaxAttempts": config.DIRECTOR_SERVICES_RESTART_POLICY_MAX_ATTEMPTS, + "Delay": app_settings.DIRECTOR_SERVICES_RESTART_POLICY_DELAY_S + * pow(10, 6), + "MaxAttempts": app_settings.DIRECTOR_SERVICES_RESTART_POLICY_MAX_ATTEMPTS, }, "Resources": { "Limits": { - "NanoCPUs": config.DEFAULT_MAX_NANO_CPUS, - "MemoryBytes": config.DEFAULT_MAX_MEMORY, + "NanoCPUs": app_settings.DIRECTOR_DEFAULT_MAX_NANO_CPUS, + "MemoryBytes": app_settings.DIRECTOR_DEFAULT_MAX_MEMORY, }, "Reservations": { - "NanoCPUs": config.DEFAULT_MAX_NANO_CPUS, - "MemoryBytes": config.DEFAULT_MAX_MEMORY, + "NanoCPUs": app_settings.DIRECTOR_DEFAULT_MAX_NANO_CPUS, + "MemoryBytes": app_settings.DIRECTOR_DEFAULT_MAX_MEMORY, }, }, }, @@ -249,7 +270,7 @@ async def _create_docker_service_params( _to_simcore_runtime_docker_label_key("node_id"): node_uuid, _to_simcore_runtime_docker_label_key( "swarm_stack_name" - ): config.SWARM_STACK_NAME, + ): app_settings.DIRECTOR_SWARM_STACK_NAME, _to_simcore_runtime_docker_label_key( "simcore_user_agent" ): request_simcore_user_agent, @@ -258,45 +279,46 @@ async def _create_docker_service_params( ): "osparc", # fixed no legacy available in other products _to_simcore_runtime_docker_label_key("cpu_limit"): "0", _to_simcore_runtime_docker_label_key("memory_limit"): "0", - _to_simcore_runtime_docker_label_key("type"): "main" - if main_service - else "dependency", - "io.simcore.zone": f"{config.TRAEFIK_SIMCORE_ZONE}", + _to_simcore_runtime_docker_label_key("type"): ( + "main" if main_service else "dependency" + ), + "io.simcore.zone": f"{app_settings.DIRECTOR_TRAEFIK_SIMCORE_ZONE}", "traefik.enable": "true" if main_service else "false", f"traefik.http.services.{service_name}.loadbalancer.server.port": "8080", f"traefik.http.routers.{service_name}.rule": f"PathPrefix(`/x/{node_uuid}`)", f"traefik.http.routers.{service_name}.entrypoints": "http", f"traefik.http.routers.{service_name}.priority": "10", - f"traefik.http.routers.{service_name}.middlewares": f"{config.SWARM_STACK_NAME}_gzip@swarm", + f"traefik.http.routers.{service_name}.middlewares": f"{app_settings.DIRECTOR_SWARM_STACK_NAME}_gzip@swarm", }, "networks": [internal_network_id] if internal_network_id else [], } - if config.DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS: - log.debug( - "adding custom constraints %s ", config.DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS + if app_settings.DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS: + _logger.debug( + "adding custom constraints %s ", + app_settings.DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS, ) docker_params["task_template"]["Placement"]["Constraints"] += [ - config.DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS + app_settings.DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS ] - if reverse_proxy_settings: - # some services define strip_path:true if they need the path to be stripped away - if ( - "strip_path" in reverse_proxy_settings - and reverse_proxy_settings["strip_path"] - ): - docker_params["labels"][ - f"traefik.http.middlewares.{service_name}_stripprefixregex.stripprefixregex.regex" - ] = f"^/x/{node_uuid}" - docker_params["labels"][ - f"traefik.http.routers.{service_name}.middlewares" - ] += f", {service_name}_stripprefixregex" - - placement_constraints_to_substitute: List[str] = [] - placement_substitutions: Dict[ + # some services define strip_path:true if they need the path to be stripped away + if ( + isinstance(reverse_proxy_settings, dict) + and reverse_proxy_settings + and reverse_proxy_settings.get("strip_path") + ): + docker_params["labels"][ + f"traefik.http.middlewares.{service_name}_stripprefixregex.stripprefixregex.regex" + ] = f"^/x/{node_uuid}" + docker_params["labels"][ + f"traefik.http.routers.{service_name}.middlewares" + ] += f", {service_name}_stripprefixregex" + + placement_constraints_to_substitute: list[str] = [] + placement_substitutions: dict[ str, str - ] = config.DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS - + ] = app_settings.DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS + assert isinstance(service_parameters_labels, list) # nosec for param in service_parameters_labels: _check_setting_correctness(param) # replace %service_uuid% by the given uuid @@ -336,7 +358,7 @@ async def _create_docker_service_params( "GenericResources" ] - to_remove: Set[str] = set() + to_remove: set[str] = set() for generic_resource in generic_resources: kind = generic_resource["DiscreteResourceSpec"]["Kind"] if kind in placement_substitutions: @@ -375,34 +397,33 @@ async def _create_docker_service_params( ) # REST-API compatible elif param["type"] == "EndpointSpec": - if "Ports" in param["value"]: - if ( - isinstance(param["value"]["Ports"], list) - and "TargetPort" in param["value"]["Ports"][0] - ): - docker_params["labels"][ - _to_simcore_runtime_docker_label_key("port") - ] = docker_params["labels"][ - f"traefik.http.services.{service_name}.loadbalancer.server.port" - ] = str( - param["value"]["Ports"][0]["TargetPort"] - ) + if "Ports" in param["value"] and ( + isinstance(param["value"]["Ports"], list) + and "TargetPort" in param["value"]["Ports"][0] + ): + docker_params["labels"][ + _to_simcore_runtime_docker_label_key("port") + ] = docker_params["labels"][ + f"traefik.http.services.{service_name}.loadbalancer.server.port" + ] = str( + param["value"]["Ports"][0]["TargetPort"] + ) # placement constraints - elif param["name"] == "constraints": # python-API compatible - docker_params["task_template"]["Placement"]["Constraints"] += param["value"] - elif param["type"] == "Constraints": # REST-API compatible + elif ( + param["name"] == "constraints" or param["type"] == "Constraints" + ): # python-API compatible docker_params["task_template"]["Placement"]["Constraints"] += param["value"] elif param["name"] == "env": - log.debug("Found env parameter %s", param["value"]) + _logger.debug("Found env parameter %s", param["value"]) env_settings = _parse_env_settings(param["value"]) if env_settings: docker_params["task_template"]["ContainerSpec"]["Env"].update( env_settings ) elif param["name"] == "mount": - log.debug("Found mount parameter %s", param["value"]) - mount_settings: List[Dict] = _parse_mount_settings(param["value"]) + _logger.debug("Found mount parameter %s", param["value"]) + mount_settings: list[dict] = _parse_mount_settings(param["value"]) if mount_settings: docker_params["task_template"]["ContainerSpec"]["Mounts"].extend( mount_settings @@ -415,15 +436,11 @@ async def _create_docker_service_params( ] # attach the service to the swarm network dedicated to services - try: - swarm_network = await _get_swarm_network(client) - swarm_network_id = swarm_network["Id"] - swarm_network_name = swarm_network["Name"] - docker_params["networks"].append(swarm_network_id) - docker_params["labels"]["traefik.docker.network"] = swarm_network_name - - except exceptions.DirectorException: - log.exception("Could not find swarm network") + swarm_network = await _get_swarm_network(client, app_settings=app_settings) + swarm_network_id = swarm_network["Id"] + swarm_network_name = swarm_network["Name"] + docker_params["networks"].append(swarm_network_id) + docker_params["labels"]["traefik.docker.network"] = swarm_network_name # set labels for CPU and Memory limits nano_cpus_limit = str( @@ -450,26 +467,31 @@ async def _create_docker_service_params( } docker_params["task_template"]["ContainerSpec"]["Env"].update(resource_limits) - log.debug( + _logger.debug( "Converted labels to docker runtime parameters: %s", pformat(docker_params) ) return docker_params -def _get_service_entrypoint(service_boot_parameters_labels: Dict) -> str: - log.debug("Getting service entrypoint") +def _get_service_entrypoint( + service_boot_parameters_labels: list[dict[str, Any]] +) -> str: + _logger.debug("Getting service entrypoint") for param in service_boot_parameters_labels: _check_setting_correctness(param) if param["name"] == "entry_point": - log.debug("Service entrypoint is %s", param["value"]) + _logger.debug("Service entrypoint is %s", param["value"]) + assert isinstance(param["value"], str) # nosec return param["value"] return "" -async def _get_swarm_network(client: aiodocker.docker.Docker) -> Dict: +async def _get_swarm_network( + client: aiodocker.docker.Docker, app_settings: ApplicationSettings +) -> dict: network_name = "_default" - if config.SIMCORE_SERVICES_NETWORK_NAME: - network_name = "{}".format(config.SIMCORE_SERVICES_NETWORK_NAME) + if app_settings.DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME: + network_name = f"{app_settings.DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME}" # try to find the network name (usually named STACKNAME_default) networks = [ x @@ -477,20 +499,20 @@ async def _get_swarm_network(client: aiodocker.docker.Docker) -> Dict: if "swarm" in x["Scope"] and network_name in x["Name"] ] if not networks or len(networks) > 1: - raise exceptions.DirectorException( + raise DirectorRuntimeError( msg=( "Swarm network name is not configured, found following networks " "(if there is more then 1 network, remove the one which has no " - f"containers attached and all is fixed): {networks}" + f"containers attached and all is fixed): {networks if networks else 'no swarm network!'}" ) ) return networks[0] async def _get_docker_image_port_mapping( - service: Dict, -) -> Tuple[Optional[str], Optional[int]]: - log.debug("getting port published by service: %s", service["Spec"]["Name"]) + service: dict, +) -> tuple[str | None, int | None]: + _logger.debug("getting port published by service: %s", service["Spec"]["Name"]) published_ports = [] target_ports = [] @@ -502,19 +524,18 @@ async def _get_docker_image_port_mapping( published_ports.append(port["PublishedPort"]) target_ports.append(port["TargetPort"]) - log.debug("Service %s publishes: %s ports", service["ID"], published_ports) + _logger.debug("Service %s publishes: %s ports", service["ID"], published_ports) published_port = None target_port = None if published_ports: published_port = published_ports[0] if target_ports: target_port = target_ports[0] - else: - # if empty no port is published but there might still be an internal port defined - if _to_simcore_runtime_docker_label_key("port") in service["Spec"]["Labels"]: - target_port = int( - service["Spec"]["Labels"][_to_simcore_runtime_docker_label_key("port")] - ) + # if empty no port is published but there might still be an internal port defined + elif _to_simcore_runtime_docker_label_key("port") in service["Spec"]["Labels"]: + target_port = int( + service["Spec"]["Labels"][_to_simcore_runtime_docker_label_key("port")] + ) return published_port, target_port @@ -525,30 +546,30 @@ async def _get_docker_image_port_mapping( async def _pass_port_to_service( service_name: str, port: str, - service_boot_parameters_labels: Dict, + service_boot_parameters_labels: list[Any], session: ClientSession, + app_settings: ApplicationSettings, ) -> None: for param in service_boot_parameters_labels: _check_setting_correctness(param) if param["name"] == "published_host": - # time.sleep(5) route = param["value"] - log.debug( + _logger.debug( "Service needs to get published host %s:%s using route %s", - config.PUBLISHED_HOST_NAME, + app_settings.DIRECTOR_PUBLISHED_HOST_NAME, port, route, ) service_url = "http://" + service_name + "/" + route # NOSONAR query_string = { - "hostname": str(config.PUBLISHED_HOST_NAME), + "hostname": app_settings.DIRECTOR_PUBLISHED_HOST_NAME, "port": str(port), } - log.debug("creating request %s and query %s", service_url, query_string) + _logger.debug("creating request %s and query %s", service_url, query_string) async with session.post(service_url, data=query_string) as response: - log.debug("query response: %s", await response.text()) + _logger.debug("query response: %s", await response.text()) return - log.debug("service %s does not need to know its external port", service_name) + _logger.debug("service %s does not need to know its external port", service_name) async def _create_network_name(service_name: str, node_uuid: str) -> str: @@ -558,7 +579,7 @@ async def _create_network_name(service_name: str, node_uuid: str) -> str: async def _create_overlay_network_in_swarm( client: aiodocker.docker.Docker, service_name: str, node_uuid: str ) -> str: - log.debug( + _logger.debug( "Creating overlay network for service %s with uuid %s", service_name, node_uuid ) network_name = await _create_network_name(service_name, node_uuid) @@ -569,24 +590,22 @@ async def _create_overlay_network_in_swarm( "Labels": {_to_simcore_runtime_docker_label_key("node_id"): node_uuid}, } docker_network = await client.networks.create(network_config) - log.debug( + _logger.debug( "Network %s created for service %s with uuid %s", network_name, service_name, node_uuid, ) - return docker_network.id - except aiodocker.exceptions.DockerError as err: - log.exception("Error while creating network for service %s", service_name) - raise exceptions.GenericDockerError( - "Error while creating network", err - ) from err + return cast(str, docker_network.id) + except aiodocker.DockerError as err: + msg = "Error while creating network" + raise GenericDockerError(err=msg) from err async def _remove_overlay_network_of_swarm( client: aiodocker.docker.Docker, node_uuid: str ) -> None: - log.debug("Removing overlay network for service with uuid %s", node_uuid) + _logger.debug("Removing overlay network for service with uuid %s", node_uuid) try: networks = await client.networks.list() networks = [ @@ -597,32 +616,28 @@ async def _remove_overlay_network_of_swarm( and x["Labels"][_to_simcore_runtime_docker_label_key("node_id")] == node_uuid ] - log.debug("Found %s networks with uuid %s", len(networks), node_uuid) + _logger.debug("Found %s networks with uuid %s", len(networks), node_uuid) # remove any network in the list (should be only one) for network in networks: docker_network = aiodocker.networks.DockerNetwork(client, network["Id"]) await docker_network.delete() - log.debug("Removed %s networks with uuid %s", len(networks), node_uuid) - except aiodocker.exceptions.DockerError as err: - log.exception( - "Error while removing networks for service with uuid: %s", node_uuid - ) - raise exceptions.GenericDockerError( - "Error while removing networks", err - ) from err + _logger.debug("Removed %s networks with uuid %s", len(networks), node_uuid) + except aiodocker.DockerError as err: + msg = "Error while removing networks" + raise GenericDockerError(err=msg) from err async def _get_service_state( - client: aiodocker.docker.Docker, service: Dict -) -> Tuple[ServiceState, str]: + client: aiodocker.docker.Docker, service: dict, app_settings: ApplicationSettings +) -> tuple[ServiceState, str]: # some times one has to wait until the task info is filled service_name = service["Spec"]["Name"] - log.debug("Getting service %s state", service_name) + _logger.debug("Getting service %s state", service_name) tasks = await client.tasks.list(filters={"service": service_name}) # wait for tasks - task_started_time = datetime.utcnow() - while (datetime.utcnow() - task_started_time) < timedelta(seconds=20): + task_started_time = arrow.utcnow().datetime + while (arrow.utcnow().datetime - task_started_time) < timedelta(seconds=20): tasks = await client.tasks.list(filters={"service": service_name}) # only keep the ones with the right service ID (we're being a bit picky maybe) tasks = [x for x in tasks if x["ServiceID"] == service["ID"]] @@ -637,19 +652,17 @@ async def _get_service_state( last_task = sorted(tasks, key=lambda task: task["UpdatedAt"])[-1] task_state = last_task["Status"]["State"] - log.debug("%s %s", service["ID"], task_state) + _logger.debug("%s %s", service["ID"], task_state) last_task_state = ServiceState.STARTING # default - last_task_error_msg = ( - last_task["Status"]["Err"] if "Err" in last_task["Status"] else "" - ) + last_task_error_msg = last_task["Status"].get("Err", "") if task_state in ("failed"): # check if it failed already the max number of attempts we allow for - if len(tasks) < config.DIRECTOR_SERVICES_RESTART_POLICY_MAX_ATTEMPTS: - log.debug("number of tasks: %s", len(tasks)) + if len(tasks) < app_settings.DIRECTOR_SERVICES_RESTART_POLICY_MAX_ATTEMPTS: + _logger.debug("number of tasks: %s", len(tasks)) last_task_state = ServiceState.STARTING else: - log.error( + _logger.error( "service %s failed with %s after %s trials", service_name, last_task["Status"], @@ -657,7 +670,7 @@ async def _get_service_state( ) last_task_state = ServiceState.FAILED elif task_state in ("rejected"): - log.error("service %s failed with %s", service_name, last_task["Status"]) + _logger.error("service %s failed with %s", service_name, last_task["Status"]) last_task_state = ServiceState.FAILED elif task_state in ("pending"): last_task_state = ServiceState.PENDING @@ -666,16 +679,16 @@ async def _get_service_state( elif task_state in ("ready", "starting"): last_task_state = ServiceState.STARTING elif task_state in ("running"): - now = datetime.utcnow() + now = arrow.utcnow().datetime # NOTE: task_state_update_time is only used to discrimitate between 'starting' and 'running' - task_state_update_time = parse_as_datetime( - last_task["Status"]["Timestamp"], default=now - ) + task_state_update_time = to_datetime(last_task["Status"]["Timestamp"]) time_since_running = now - task_state_update_time - log.debug("Now is %s, time since running mode is %s", now, time_since_running) + _logger.debug( + "Now is %s, time since running mode is %s", now, time_since_running + ) if time_since_running > timedelta( - seconds=config.DIRECTOR_SERVICES_STATE_MONITOR_S + seconds=app_settings.DIRECTOR_SERVICES_STATE_MONITOR_S ): last_task_state = ServiceState.RUNNING else: @@ -683,16 +696,16 @@ async def _get_service_state( elif task_state in ("complete", "shutdown"): last_task_state = ServiceState.COMPLETE - log.debug("service running state is %s", last_task_state) + _logger.debug("service running state is %s", last_task_state) return (last_task_state, last_task_error_msg) async def _wait_until_service_running_or_failed( - client: aiodocker.docker.Docker, service: Dict, node_uuid: str + client: aiodocker.docker.Docker, service: dict, node_uuid: str ) -> None: # some times one has to wait until the task info is filled service_name = service["Spec"]["Name"] - log.debug("Waiting for service %s to start", service_name) + _logger.debug("Waiting for service %s to start", service_name) while True: tasks = await client.tasks.list(filters={"service": service_name}) # only keep the ones with the right service ID (we're being a bit picky maybe) @@ -701,31 +714,31 @@ async def _wait_until_service_running_or_failed( if tasks: last_task = tasks[0] task_state = last_task["Status"]["State"] - log.debug("%s %s", service["ID"], task_state) + _logger.debug("%s %s", service["ID"], task_state) if task_state in ("failed", "rejected"): - log.error( + _logger.error( "Error while waiting for service with %s", last_task["Status"] ) - raise exceptions.ServiceStartTimeoutError(service_name, node_uuid) + raise ServiceStartTimeoutError( + service_name=service_name, service_uuid=node_uuid + ) if task_state in ("running", "complete"): break # allows dealing with other events instead of wasting time here await asyncio.sleep(1) # 1s - log.debug("Waited for service %s to start", service_name) + _logger.debug("Waited for service %s to start", service_name) -async def _get_repos_from_key( - app: web.Application, service_key: str -) -> Dict[str, List[Dict]]: +async def _get_repos_from_key(app: FastAPI, service_key: str) -> dict[str, list[str]]: # get the available image for the main service (syntax is image:tag) list_of_images = { service_key: await registry_proxy.list_image_tags(app, service_key) } - log.debug("entries %s", list_of_images) + _logger.debug("entries %s", list_of_images) if not list_of_images[service_key]: - raise exceptions.ServiceNotAvailableError(service_key) + raise ServiceNotAvailableError(service_name=service_key) - log.debug( + _logger.debug( "Service %s has the following list of images available: %s", service_key, list_of_images, @@ -735,15 +748,14 @@ async def _get_repos_from_key( async def _get_dependant_repos( - app: web.Application, service_key: str, service_tag: str -) -> List[Dict]: + app: FastAPI, service_key: str, service_tag: str +) -> list[dict]: list_of_images = await _get_repos_from_key(app, service_key) tag = await _find_service_tag(list_of_images, service_key, service_tag) # look for dependencies - dependent_repositories = await registry_proxy.list_interactive_service_dependencies( + return await registry_proxy.list_interactive_service_dependencies( app, service_key, tag ) - return dependent_repositories _TAG_REGEX = re.compile(r"^\d+\.\d+\.\d+$") @@ -758,34 +770,38 @@ async def _get_dependant_repos( async def _find_service_tag( - list_of_images: Dict, service_key: str, service_tag: str + list_of_images: dict, service_key: str, service_tag: str | None ) -> str: - if not service_key in list_of_images: - raise exceptions.ServiceNotAvailableError( + if service_key not in list_of_images: + raise ServiceNotAvailableError( service_name=service_key, service_tag=service_tag ) # filter incorrect chars filtered_tags_list = filter(_TAG_REGEX.search, list_of_images[service_key]) # sort them now - available_tags_list = sorted(filtered_tags_list, key=StrictVersion) + available_tags_list = sorted(filtered_tags_list, key=Version) # not tags available... probably an undefined service there... if not available_tags_list: - raise exceptions.ServiceNotAvailableError(service_key, service_tag) + raise ServiceNotAvailableError( + service_name=service_key, service_tag=service_tag + ) tag = service_tag if not service_tag or service_tag == "latest": # get latest tag tag = available_tags_list[len(available_tags_list) - 1] elif available_tags_list.count(service_tag) != 1: - raise exceptions.ServiceNotAvailableError( + raise ServiceNotAvailableError( service_name=service_key, service_tag=service_tag ) - log.debug("Service tag found is %s ", service_tag) + _logger.debug("Service tag found is %s ", service_tag) + assert tag is not None # nosec return tag async def _start_docker_service( - app: web.Application, + app: FastAPI, + *, client: aiodocker.docker.Docker, user_id: str, project_id: str, @@ -794,23 +810,24 @@ async def _start_docker_service( main_service: bool, node_uuid: str, node_base_path: str, - internal_network_id: Optional[str], + internal_network_id: str | None, request_simcore_user_agent: str, -) -> Dict: # pylint: disable=R0913 +) -> dict: # pylint: disable=R0913 + app_settings = get_application_settings(app) service_parameters = await _create_docker_service_params( app, - client, - service_key, - service_tag, - main_service, - user_id, - node_uuid, - project_id, - node_base_path, - internal_network_id, - request_simcore_user_agent, + client=client, + service_key=service_key, + service_tag=service_tag, + main_service=main_service, + user_id=user_id, + node_uuid=node_uuid, + project_id=project_id, + node_base_path=node_base_path, + internal_network_id=internal_network_id, + request_simcore_user_agent=request_simcore_user_agent, ) - log.debug( + _logger.debug( "Starting docker service %s:%s using parameters %s", service_key, service_tag, @@ -821,34 +838,42 @@ async def _start_docker_service( service = await client.services.create(**service_parameters) if "ID" not in service: # error while starting service - raise exceptions.DirectorException( - "Error while starting service: {}".format(str(service)) - ) - log.debug("Service started now waiting for it to run") + msg = f"Error while starting service: {service!s}" + raise DirectorRuntimeError(msg=msg) + _logger.debug("Service started now waiting for it to run") # get the full info from docker service = await client.services.inspect(service["ID"]) service_name = service["Spec"]["Name"] - service_state, service_msg = await _get_service_state(client, service) + service_state, service_msg = await _get_service_state( + client, dict(service), app_settings=app_settings + ) # wait for service to start - # await _wait_until_service_running_or_failed(client, service, node_uuid) - log.debug("Service %s successfully started", service_name) + _logger.debug("Service %s successfully started", service_name) # the docker swarm maybe opened some random port to access the service, get the latest version of the service service = await client.services.inspect(service["ID"]) - published_port, target_port = await _get_docker_image_port_mapping(service) + published_port, target_port = await _get_docker_image_port_mapping( + dict(service) + ) # now pass boot parameters service_boot_parameters_labels = await _read_service_settings( - app, service_key, service_tag, config.SERVICE_RUNTIME_BOOTSETTINGS + app, service_key, service_tag, SERVICE_RUNTIME_BOOTSETTINGS ) - service_entrypoint = _get_service_entrypoint(service_boot_parameters_labels) - if published_port: - session = app[APP_CLIENT_SESSION_KEY] - await _pass_port_to_service( - service_name, published_port, service_boot_parameters_labels, session - ) + service_entrypoint = "" + if isinstance(service_boot_parameters_labels, list): + service_entrypoint = _get_service_entrypoint(service_boot_parameters_labels) + if published_port: + session = get_client_session(app) + await _pass_port_to_service( + service_name, + published_port, + service_boot_parameters_labels, + session, + app_settings=app_settings, + ) - container_meta_data = { + return { "published_port": published_port, "entry_point": service_entrypoint, "service_uuid": node_uuid, @@ -862,43 +887,42 @@ async def _start_docker_service( "user_id": user_id, "project_id": project_id, } - return container_meta_data - except exceptions.ServiceStartTimeoutError: - log.exception("Service failed to start") + except ServiceStartTimeoutError: + _logger.exception("Service failed to start") await _silent_service_cleanup(app, node_uuid) raise - except aiodocker.exceptions.DockerError as err: - log.exception("Unexpected error") + except aiodocker.DockerError as err: + _logger.exception("Unexpected error") await _silent_service_cleanup(app, node_uuid) - raise exceptions.ServiceNotAvailableError(service_key, service_tag) from err + raise ServiceNotAvailableError( + service_name=service_key, service_tag=service_tag + ) from err -async def _silent_service_cleanup(app: web.Application, node_uuid: str) -> None: - try: - await stop_service(app, node_uuid, False) - except exceptions.DirectorException: - pass +async def _silent_service_cleanup(app: FastAPI, node_uuid: str) -> None: + with contextlib.suppress(DirectorRuntimeError): + await stop_service(app, node_uuid=node_uuid, save_state=False) async def _create_node( - app: web.Application, + app: FastAPI, client: aiodocker.docker.Docker, user_id: str, project_id: str, - list_of_services: List[Dict], + list_of_services: list[dict], node_uuid: str, node_base_path: str, request_simcore_user_agent: str, -) -> List[Dict]: # pylint: disable=R0913, R0915 - log.debug( +) -> list[dict]: # pylint: disable=R0913, R0915 + _logger.debug( "Creating %s docker services for node %s and base path %s for user %s", len(list_of_services), node_uuid, node_base_path, user_id, ) - log.debug("Services %s will be started", list_of_services) + _logger.debug("Services %s will be started", list_of_services) # if the service uses several docker images, a network needs to be setup to connect them together inter_docker_network_id = None @@ -907,22 +931,22 @@ async def _create_node( inter_docker_network_id = await _create_overlay_network_in_swarm( client, service_name, node_uuid ) - log.debug("Created docker network in swarm for service %s", service_name) + _logger.debug("Created docker network in swarm for service %s", service_name) containers_meta_data = [] for service in list_of_services: service_meta_data = await _start_docker_service( app, - client, - user_id, - project_id, - service["key"], - service["tag"], - list_of_services.index(service) == 0, - node_uuid, - node_base_path, - inter_docker_network_id, - request_simcore_user_agent, + client=client, + user_id=user_id, + project_id=project_id, + service_key=service["key"], + service_tag=service["tag"], + main_service=list_of_services.index(service) == 0, + node_uuid=node_uuid, + node_base_path=node_base_path, + internal_network_id=inter_docker_network_id, + request_simcore_user_agent=request_simcore_user_agent, ) containers_meta_data.append(service_meta_data) @@ -930,18 +954,20 @@ async def _create_node( async def _get_service_key_version_from_docker_service( - service: Dict, -) -> Tuple[str, str]: + service: dict, registry_settings: RegistrySettings +) -> tuple[str, str]: service_full_name = str(service["Spec"]["TaskTemplate"]["ContainerSpec"]["Image"]) - if not service_full_name.startswith(config.REGISTRY_PATH): - raise exceptions.DirectorException( - msg=f"Invalid service '{service_full_name}', it is missing {config.REGISTRY_PATH}" + if not service_full_name.startswith(registry_settings.resolved_registry_url): + raise DirectorRuntimeError( + msg=f"Invalid service '{service_full_name}', it is missing {registry_settings.resolved_registry_url}" ) - service_full_name = service_full_name[len(config.REGISTRY_PATH) :].strip("/") + service_full_name = service_full_name[ + len(registry_settings.resolved_registry_url) : + ].strip("/") service_re_match = _SERVICE_KEY_REGEX.match(service_full_name) if not service_re_match: - raise exceptions.DirectorException( + raise DirectorRuntimeError( msg=f"Invalid service '{service_full_name}', it does not follow pattern '{_SERVICE_KEY_REGEX.pattern}'" ) service_key = service_re_match.group("key") @@ -949,24 +975,24 @@ async def _get_service_key_version_from_docker_service( return service_key, service_tag -async def _get_service_basepath_from_docker_service(service: Dict) -> str: - envs_list = service["Spec"]["TaskTemplate"]["ContainerSpec"]["Env"] +async def _get_service_basepath_from_docker_service(service: dict[str, Any]) -> str: + envs_list: list[str] = service["Spec"]["TaskTemplate"]["ContainerSpec"]["Env"] envs_dict = dict(x.split("=") for x in envs_list) return envs_dict["SIMCORE_NODE_BASEPATH"] async def start_service( - app: web.Application, + app: FastAPI, user_id: str, project_id: str, service_key: str, - service_tag: str, + service_tag: str | None, node_uuid: str, node_base_path: str, request_simcore_user_agent: str, -) -> Dict: - # pylint: disable=C0103 - log.debug( +) -> dict: + app_settings = get_application_settings(app) + _logger.debug( "starting service %s:%s using uuid %s, basepath %s", service_key, service_tag, @@ -978,11 +1004,11 @@ async def start_service( await _check_node_uuid_available(client, node_uuid) list_of_images = await _get_repos_from_key(app, service_key) service_tag = await _find_service_tag(list_of_images, service_key, service_tag) - log.debug("Found service to start %s:%s", service_key, service_tag) + _logger.debug("Found service to start %s:%s", service_key, service_tag) list_of_services_to_start = [{"key": service_key, "tag": service_tag}] # find the service dependencies list_of_dependencies = await _get_dependant_repos(app, service_key, service_tag) - log.debug("Found service dependencies: %s", list_of_dependencies) + _logger.debug("Found service dependencies: %s", list_of_dependencies) if list_of_dependencies: list_of_services_to_start.extend(list_of_dependencies) @@ -997,36 +1023,40 @@ async def start_service( request_simcore_user_agent, ) node_details = containers_meta_data[0] - if config.MONITORING_ENABLED: - service_started( - app, - "undefined_user", # NOTE: to prevent high cardinality metrics this is disabled - service_key, - service_tag, - "DYNAMIC", - ) + if app_settings.DIRECTOR_MONITORING_ENABLED: + get_instrumentation(app).services_started.labels( + service_key=service_key, + service_tag=service_tag, + simcore_user_agent="undefined_user", + ).inc() + # we return only the info of the main service return node_details async def _get_node_details( - app: web.Application, client: aiodocker.docker.Docker, service: Dict -) -> Dict: + app: FastAPI, client: aiodocker.docker.Docker, service: dict +) -> dict: + app_settings = get_application_settings(app) service_key, service_tag = await _get_service_key_version_from_docker_service( - service + service, registry_settings=app_settings.DIRECTOR_REGISTRY ) # get boot parameters results = await asyncio.gather( _read_service_settings( - app, service_key, service_tag, config.SERVICE_RUNTIME_BOOTSETTINGS + app, service_key, service_tag, SERVICE_RUNTIME_BOOTSETTINGS ), _get_service_basepath_from_docker_service(service), - _get_service_state(client, service), + _get_service_state(client, service, app_settings=app_settings), ) service_boot_parameters_labels = results[0] - service_entrypoint = _get_service_entrypoint(service_boot_parameters_labels) + service_entrypoint = "" + if service_boot_parameters_labels and isinstance( + service_boot_parameters_labels, list + ): + service_entrypoint = _get_service_entrypoint(service_boot_parameters_labels) service_basepath = results[1] service_state, service_msg = results[2] service_name = service["Spec"]["Name"] @@ -1040,7 +1070,7 @@ async def _get_node_details( # get the published port published_port, target_port = await _get_docker_image_port_mapping(service) - node_details = { + return { "published_port": published_port, "entry_point": service_entrypoint, "service_uuid": service_uuid, @@ -1054,17 +1084,17 @@ async def _get_node_details( "user_id": user_id, "project_id": project_id, } - return node_details async def get_services_details( - app: web.Application, user_id: Optional[str], study_id: Optional[str] -) -> List[Dict]: + app: FastAPI, user_id: str | None, study_id: str | None +) -> list[dict]: + app_settings = get_application_settings(app) async with docker_utils.docker_client() as client: # pylint: disable=not-async-context-manager try: filters = [ f"{_to_simcore_runtime_docker_label_key('type')}=main", - f"{_to_simcore_runtime_docker_label_key('swarm_stack_name')}={config.SWARM_STACK_NAME}", + f"{_to_simcore_runtime_docker_label_key('swarm_stack_name')}={app_settings.DIRECTOR_SWARM_STACK_NAME}", ] if user_id: filters.append( @@ -1078,53 +1108,44 @@ async def get_services_details( filters={"label": filters} ) - services_details = [ - await _get_node_details(app, client, service) + return [ + await _get_node_details(app, client, dict(service)) for service in list_running_services ] - return services_details - except aiodocker.exceptions.DockerError as err: - log.exception( - "Error while listing services with user_id, study_id %s, %s", - user_id, - study_id, - ) - raise exceptions.GenericDockerError( - "Error while accessing container", err - ) from err + except aiodocker.DockerError as err: + msg = f"Error while accessing container for {user_id=}, {study_id=}" + raise GenericDockerError(err=msg) from err -async def get_service_details(app: web.Application, node_uuid: str) -> Dict: - async with docker_utils.docker_client() as client: # pylint: disable=not-async-context-manager +async def get_service_details(app: FastAPI, node_uuid: str) -> dict: + app_settings = get_application_settings(app) + async with docker_utils.docker_client() as client: try: list_running_services_with_uuid = await client.services.list( filters={ "label": [ f"{_to_simcore_runtime_docker_label_key('node_id')}={node_uuid}", f"{_to_simcore_runtime_docker_label_key('type')}=main", - f"{_to_simcore_runtime_docker_label_key('swarm_stack_name')}={config.SWARM_STACK_NAME}", + f"{_to_simcore_runtime_docker_label_key('swarm_stack_name')}={app_settings.DIRECTOR_SWARM_STACK_NAME}", ] } ) # error if no service with such an id exists if not list_running_services_with_uuid: - raise exceptions.ServiceUUIDNotFoundError(node_uuid) + raise ServiceUUIDNotFoundError(service_uuid=node_uuid) if len(list_running_services_with_uuid) > 1: # someone did something fishy here - raise exceptions.DirectorException( + raise DirectorRuntimeError( msg="More than one docker service is labeled as main service" ) - node_details = await _get_node_details( - app, client, list_running_services_with_uuid[0] + return await _get_node_details( + app, client, dict(list_running_services_with_uuid[0]) ) - return node_details - except aiodocker.exceptions.DockerError as err: - log.exception("Error while accessing container with uuid: %s", node_uuid) - raise exceptions.GenericDockerError( - "Error while accessing container", err - ) from err + except aiodocker.DockerError as err: + msg = f"Error while accessing container {node_uuid=}" + raise GenericDockerError(err=msg) from err @retry( @@ -1133,11 +1154,15 @@ async def get_service_details(app: web.Application, node_uuid: str) -> Dict: reraise=True, retry=retry_if_exception_type(ClientConnectionError), ) -async def _save_service_state(service_host_name: str, session: aiohttp.ClientSession): +async def _save_service_state( + service_host_name: str, session: aiohttp.ClientSession +) -> None: response: ClientResponse async with session.post( url=f"http://{service_host_name}/state", # NOSONAR - timeout=ServicesCommonSettings().director_dynamic_service_save_timeout, + timeout=ClientTimeout( + ServicesCommonSettings().director_dynamic_service_save_timeout + ), ) as response: try: response.raise_for_status() @@ -1154,7 +1179,7 @@ async def _save_service_state(service_host_name: str, session: aiohttp.ClientSes # METHOD NOT ALLOWED https://httpstatuses.com/405 # NOT FOUND https://httpstatuses.com/404 # - log.warning( + _logger.warning( "Service '%s' does not seem to implement save state functionality: %s. Skipping save", service_host_name, err, @@ -1163,7 +1188,7 @@ async def _save_service_state(service_host_name: str, session: aiohttp.ClientSes # upss ... could service had troubles saving, reraise raise else: - log.info( + _logger.info( "Service '%s' successfully saved its state: %s", service_host_name, f"{response}", @@ -1171,8 +1196,9 @@ async def _save_service_state(service_host_name: str, session: aiohttp.ClientSes @run_sequentially_in_context(target_args=["node_uuid"]) -async def stop_service(app: web.Application, node_uuid: str, save_state: bool) -> None: - log.debug( +async def stop_service(app: FastAPI, *, node_uuid: str, save_state: bool) -> None: + app_settings = get_application_settings(app) + _logger.debug( "stopping service with node_uuid=%s, save_state=%s", node_uuid, save_state ) @@ -1183,52 +1209,53 @@ async def stop_service(app: web.Application, node_uuid: str, save_state: bool) - filters={ "label": [ f"{_to_simcore_runtime_docker_label_key('node_id')}={node_uuid}", - f"{_to_simcore_runtime_docker_label_key('swarm_stack_name')}={config.SWARM_STACK_NAME}", + f"{_to_simcore_runtime_docker_label_key('swarm_stack_name')}={app_settings.DIRECTOR_SWARM_STACK_NAME}", ] } ) - except aiodocker.exceptions.DockerError as err: - log.exception("Error while stopping container with uuid: %s", node_uuid) - raise exceptions.GenericDockerError( - "Error while stopping container", err - ) from err + except aiodocker.DockerError as err: + msg = f"Error while stopping container {node_uuid=}" + raise GenericDockerError(err=msg) from err # error if no service with such an id exists if not list_running_services_with_uuid: - raise exceptions.ServiceUUIDNotFoundError(node_uuid) + raise ServiceUUIDNotFoundError(service_uuid=node_uuid) - log.debug("found service(s) with uuid %s", list_running_services_with_uuid) + _logger.debug("found service(s) with uuid %s", list_running_services_with_uuid) # save the state of the main service if it can service_details = await get_service_details(app, node_uuid) - # FIXME: the exception for the 3d-viewer shall be removed once the dy-sidecar comes in service_host_name = "{}:{}{}".format( service_details["service_host"], - service_details["service_port"] - if service_details["service_port"] - else "80", - service_details["service_basepath"] - if not "3d-viewer" in service_details["service_host"] - else "", + ( + service_details["service_port"] + if service_details["service_port"] + else "80" + ), + ( + service_details["service_basepath"] + if "3d-viewer" not in service_details["service_host"] + else "" + ), ) # If state save is enforced if save_state: - log.debug("saving state of service %s...", service_host_name) + _logger.debug("saving state of service %s...", service_host_name) try: await _save_service_state( - service_host_name, session=app[APP_CLIENT_SESSION_KEY] + service_host_name, session=get_client_session(app) ) except ClientResponseError as err: raise ServiceStateSaveError( - node_uuid, + service_uuid=node_uuid, reason=f"service {service_host_name} rejected to save state, " f"responded {err.message} (status {err.status})." "Aborting stop service to prevent data loss.", ) from err except ClientError as err: - log.warning( + _logger.warning( "Could not save state because %s is unreachable [%s]." "Resuming stop_service.", service_host_name, @@ -1237,27 +1264,24 @@ async def stop_service(app: web.Application, node_uuid: str, save_state: bool) - # remove the services try: - log.debug("removing services ...") + _logger.debug("removing services ...") for service in list_running_services_with_uuid: - log.debug("removing %s", service["Spec"]["Name"]) + _logger.debug("removing %s", service["Spec"]["Name"]) await client.services.delete(service["Spec"]["Name"]) - except aiodocker.exceptions.DockerError as err: - raise exceptions.GenericDockerError( - "Error while removing services", err - ) from err + except aiodocker.DockerError as err: + msg = f"Error while removing services {node_uuid=}" + raise GenericDockerError(err=msg) from err # remove network(s) - log.debug("removed services, now removing network...") + _logger.debug("removed services, now removing network...") await _remove_overlay_network_of_swarm(client, node_uuid) - log.debug("removed network") - - if config.MONITORING_ENABLED: - service_stopped( - app, - "undefined_user", - service_details["service_key"], - service_details["service_version"], - "DYNAMIC", - "SUCCESS", - ) + _logger.debug("removed network") + + if app_settings.DIRECTOR_MONITORING_ENABLED: + get_instrumentation(app).services_stopped.labels( + service_key=service_details["service_key"], + service_tag=service_details["service_version"], + simcore_user_agent="undefined_user", + result="SUCCESS", + ).inc() diff --git a/services/director/src/simcore_service_director/registry_cache_task.py b/services/director/src/simcore_service_director/registry_cache_task.py deleted file mode 100644 index 10eca38b2b7..00000000000 --- a/services/director/src/simcore_service_director/registry_cache_task.py +++ /dev/null @@ -1,79 +0,0 @@ -import asyncio -import logging -from typing import AsyncIterator - -from aiohttp import web -from servicelib.utils import logged_gather -from simcore_service_director import config, exceptions, registry_proxy -from simcore_service_director.config import APP_REGISTRY_CACHE_DATA_KEY - -_logger = logging.getLogger(__name__) - -TASK_NAME: str = __name__ + "_registry_caching_task" - - -async def registry_caching_task(app: web.Application) -> None: - try: - - _logger.info("%s: initializing cache...", TASK_NAME) - app[APP_REGISTRY_CACHE_DATA_KEY].clear() - await registry_proxy.list_services(app, registry_proxy.ServiceType.ALL) - _logger.info("%s: initialisation completed", TASK_NAME) - while True: - _logger.info("%s: waking up, refreshing cache...", TASK_NAME) - try: - keys = [] - refresh_tasks = [] - for key in app[APP_REGISTRY_CACHE_DATA_KEY]: - path, method = key.split(":") - _logger.debug("refresh %s:%s", method, path) - refresh_tasks.append( - registry_proxy.registry_request( - app, path, method, no_cache=True - ) - ) - keys = list(app[APP_REGISTRY_CACHE_DATA_KEY].keys()) - results = await logged_gather(*refresh_tasks) - - for key, result in zip(keys, results): - app[APP_REGISTRY_CACHE_DATA_KEY][key] = result - - except exceptions.DirectorException: - # if the registry is temporarily not available this might happen - _logger.exception( - "%s: exception while refreshing cache, clean cache...", TASK_NAME - ) - app[APP_REGISTRY_CACHE_DATA_KEY].clear() - - _logger.info( - "cache refreshed %s: sleeping for %ss...", - TASK_NAME, - config.DIRECTOR_REGISTRY_CACHING_TTL, - ) - await asyncio.sleep(config.DIRECTOR_REGISTRY_CACHING_TTL) - except asyncio.CancelledError: - _logger.info("%s: cancelling task...", TASK_NAME) - except Exception: # pylint: disable=broad-except - _logger.exception("%s: Unhandled exception while refreshing cache", TASK_NAME) - finally: - _logger.info("%s: finished task...clearing cache...", TASK_NAME) - app[APP_REGISTRY_CACHE_DATA_KEY].clear() - - -async def setup_registry_caching_task(app: web.Application) -> AsyncIterator[None]: - app[APP_REGISTRY_CACHE_DATA_KEY] = {} - app[TASK_NAME] = asyncio.get_event_loop().create_task(registry_caching_task(app)) - - yield - - task = app[TASK_NAME] - task.cancel() - await task - - -def setup(app: web.Application) -> None: - if config.DIRECTOR_REGISTRY_CACHING: - app.cleanup_ctx.append(setup_registry_caching_task) - - -__all__ = ["setup", "APP_REGISTRY_CACHE_DATA_KEY"] diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index 2c4591aefb6..f45f3b96348 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -1,36 +1,52 @@ -# pylint: disable=C0111 -import asyncio import enum import json import logging import re +from collections.abc import Mapping from http import HTTPStatus from pprint import pformat -from typing import Any, AsyncIterator, Dict, List, Optional, Tuple +from typing import Any, Final, cast -from aiohttp import BasicAuth, ClientSession, client_exceptions, web +from aiocache import Cache, SimpleMemoryCache # type: ignore[import-untyped] +from aiohttp import BasicAuth, ClientSession, client_exceptions from aiohttp.client import ClientTimeout -from simcore_service_director import config, exceptions -from simcore_service_director.cache_request_decorator import cache_requests +from fastapi import FastAPI +from servicelib.utils import limited_gather from tenacity import retry from tenacity.before_sleep import before_sleep_log from tenacity.retry import retry_if_result from tenacity.wait import wait_fixed from yarl import URL -from .config import APP_CLIENT_SESSION_KEY +from .client_session import get_client_session +from .constants import ( + DIRECTOR_SIMCORE_SERVICES_PREFIX, + ORG_LABELS_TO_SCHEMA_LABELS, + SERVICE_RUNTIME_SETTINGS, +) +from .core.errors import ( + DirectorRuntimeError, + RegistryConnectionError, + ServiceNotAvailableError, +) +from .core.settings import ApplicationSettings, get_application_settings DEPENDENCIES_LABEL_KEY: str = "simcore.service.dependencies" NUMBER_OF_RETRIEVED_REPOS: int = 50 NUMBER_OF_RETRIEVED_TAGS: int = 50 - +_MAX_CONCURRENT_CALLS: Final[int] = 50 VERSION_REG = re.compile( r"^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$" ) logger = logging.getLogger(__name__) +# +# NOTE: if you are refactoring this module, +# please consider reusing packages/pytest-simcore/src/pytest_simcore/helpers/docker_registry.py +# + class ServiceType(enum.Enum): ALL = "" @@ -39,25 +55,32 @@ class ServiceType(enum.Enum): async def _basic_auth_registry_request( - app: web.Application, path: str, method: str, **session_kwargs -) -> Tuple[Dict, Dict]: - if not config.REGISTRY_URL: - raise exceptions.DirectorException("URL to registry is not defined") + app: FastAPI, path: str, method: str, **session_kwargs +) -> tuple[dict, Mapping]: + app_settings = get_application_settings(app) + if not app_settings.DIRECTOR_REGISTRY.REGISTRY_URL: + msg = "URL to registry is not defined" + raise DirectorRuntimeError(msg=msg) url = URL( - f"{'https' if config.REGISTRY_SSL else 'http'}://{config.REGISTRY_URL}{path}" + f"{'https' if app_settings.DIRECTOR_REGISTRY.REGISTRY_SSL else 'http'}://{app_settings.DIRECTOR_REGISTRY.REGISTRY_URL}{path}" ) logger.debug("Requesting registry using %s", url) # try the registry with basic authentication first, spare 1 call - resp_data: Dict = {} - resp_headers: Dict = {} + resp_data: dict = {} + resp_headers: Mapping = {} auth = ( - BasicAuth(login=config.REGISTRY_USER, password=config.REGISTRY_PW) - if config.REGISTRY_AUTH and config.REGISTRY_USER and config.REGISTRY_PW + BasicAuth( + login=app_settings.DIRECTOR_REGISTRY.REGISTRY_USER, + password=app_settings.DIRECTOR_REGISTRY.REGISTRY_PW.get_secret_value(), + ) + if app_settings.DIRECTOR_REGISTRY.REGISTRY_AUTH + and app_settings.DIRECTOR_REGISTRY.REGISTRY_USER + and app_settings.DIRECTOR_REGISTRY.REGISTRY_PW else None ) - session = app[APP_CLIENT_SESSION_KEY] + session = get_client_session(app) try: async with session.request( method.lower(), url, auth=auth, **session_kwargs @@ -66,18 +89,22 @@ async def _basic_auth_registry_request( logger.debug("Registry unauthorized request: %s", await response.text()) # basic mode failed, test with other auth mode resp_data, resp_headers = await _auth_registry_request( - url, method, response.headers, session, **session_kwargs + app_settings, + url, + method, + response.headers, + session, + **session_kwargs, ) elif response.status == HTTPStatus.NOT_FOUND: - logger.exception("Path to registry not found: %s", url) - raise exceptions.ServiceNotAvailableError(str(path)) + raise ServiceNotAvailableError(service_name=path) elif response.status > 399: logger.exception( "Unknown error while accessing registry: %s", str(response) ) - raise exceptions.RegistryConnectionError(str(response)) + raise RegistryConnectionError(msg=str(response)) else: # registry that does not need an auth @@ -86,22 +113,29 @@ async def _basic_auth_registry_request( return (resp_data, resp_headers) except client_exceptions.ClientError as exc: - logger.exception("Unknown error while accessing registry: %s", str(exc)) - raise exceptions.DirectorException( - f"Unknown error while accessing registry: {str(exc)}" - ) + logger.exception("Unknown error while accessing registry") + msg = f"Unknown error while accessing registry: {exc!s}" + raise DirectorRuntimeError(msg=msg) from exc async def _auth_registry_request( - url: URL, method: str, auth_headers: Dict, session: ClientSession, **kwargs -) -> Tuple[Dict, Dict]: - if not config.REGISTRY_AUTH or not config.REGISTRY_USER or not config.REGISTRY_PW: - raise exceptions.RegistryConnectionError( - "Wrong configuration: Authentication to registry is needed!" - ) + app_settings: ApplicationSettings, + url: URL, + method: str, + auth_headers: Mapping, + session: ClientSession, + **kwargs, +) -> tuple[dict, Mapping]: + if ( + not app_settings.DIRECTOR_REGISTRY.REGISTRY_AUTH + or not app_settings.DIRECTOR_REGISTRY.REGISTRY_USER + or not app_settings.DIRECTOR_REGISTRY.REGISTRY_PW + ): + msg = "Wrong configuration: Authentication to registry is needed!" + raise RegistryConnectionError(msg=msg) # auth issue let's try some authentication get the auth type auth_type = None - auth_details: Dict[str, str] = {} + auth_details: dict[str, str] = {} for key in auth_headers: if str(key).lower() == "www-authenticate": auth_type, auth_value = str(auth_headers[key]).split(" ", 1) @@ -111,10 +145,12 @@ async def _auth_registry_request( } break if not auth_type: - raise exceptions.RegistryConnectionError( - "Unknown registry type: cannot deduce authentication method!" - ) - auth = BasicAuth(login=config.REGISTRY_USER, password=config.REGISTRY_PW) + msg = "Unknown registry type: cannot deduce authentication method!" + raise RegistryConnectionError(msg=msg) + auth = BasicAuth( + login=app_settings.DIRECTOR_REGISTRY.REGISTRY_USER, + password=app_settings.DIRECTOR_REGISTRY.REGISTRY_PW.get_secret_value(), + ) # bearer type, it needs a token with all communications if auth_type == "Bearer": @@ -123,26 +159,23 @@ async def _auth_registry_request( service=auth_details["service"], scope=auth_details["scope"] ) async with session.get(token_url, auth=auth, **kwargs) as token_resp: - if not token_resp.status == HTTPStatus.OK: - raise exceptions.RegistryConnectionError( - "Unknown error while authentifying with registry: {}".format( - str(token_resp) - ) - ) + if token_resp.status != HTTPStatus.OK: + msg = f"Unknown error while authentifying with registry: {token_resp!s}" + raise RegistryConnectionError(msg=msg) bearer_code = (await token_resp.json())["token"] - headers = {"Authorization": "Bearer {}".format(bearer_code)} + headers = {"Authorization": f"Bearer {bearer_code}"} async with getattr(session, method.lower())( url, headers=headers, **kwargs ) as resp_wtoken: if resp_wtoken.status == HTTPStatus.NOT_FOUND: logger.exception("path to registry not found: %s", url) - raise exceptions.ServiceNotAvailableError(str(url)) + raise ServiceNotAvailableError(service_name=f"{url}") if resp_wtoken.status > 399: logger.exception( "Unknown error while accessing with token authorized registry: %s", str(resp_wtoken), ) - raise exceptions.RegistryConnectionError(str(resp_wtoken)) + raise RegistryConnectionError(msg=f"{resp_wtoken}") resp_data = await resp_wtoken.json(content_type=None) resp_headers = resp_wtoken.headers return (resp_data, resp_headers) @@ -153,70 +186,99 @@ async def _auth_registry_request( ) as resp_wbasic: if resp_wbasic.status == HTTPStatus.NOT_FOUND: logger.exception("path to registry not found: %s", url) - raise exceptions.ServiceNotAvailableError(str(url)) + raise ServiceNotAvailableError(service_name=f"{url}") if resp_wbasic.status > 399: logger.exception( "Unknown error while accessing with token authorized registry: %s", str(resp_wbasic), ) - raise exceptions.RegistryConnectionError(str(resp_wbasic)) + raise RegistryConnectionError(msg=f"{resp_wbasic}") resp_data = await resp_wbasic.json(content_type=None) resp_headers = resp_wbasic.headers return (resp_data, resp_headers) - raise exceptions.RegistryConnectionError( - f"Unknown registry authentification type: {url}" - ) + msg = f"Unknown registry authentification type: {url}" + raise RegistryConnectionError(msg=msg) async def registry_request( - app: web.Application, + app: FastAPI, path: str, method: str = "GET", no_cache: bool = False, **session_kwargs, -) -> Tuple[Dict, Dict]: +) -> tuple[dict, Mapping]: logger.debug( "Request to registry: path=%s, method=%s. no_cache=%s", path, method, no_cache ) - return await cache_requests(_basic_auth_registry_request, no_cache)( + cache: SimpleMemoryCache = app.state.registry_cache_memory + cache_key = f"{method}_{path}" + if not no_cache and (cached_response := await cache.get(cache_key)): + assert isinstance(cached_response, tuple) # nosec + return cast(tuple[dict, Mapping], cached_response) + + app_settings = get_application_settings(app) + response, response_headers = await _basic_auth_registry_request( app, path, method, **session_kwargs ) + if not no_cache and app_settings.DIRECTOR_REGISTRY_CACHING and method == "GET": + await cache.set( + cache_key, + (response, response_headers), + ttl=app_settings.DIRECTOR_REGISTRY_CACHING_TTL.total_seconds(), + ) + + return response, response_headers -async def is_registry_responsive(app: web.Application) -> bool: + +async def _is_registry_responsive(app: FastAPI) -> bool: path = "/v2/" try: await registry_request( app, path, no_cache=True, timeout=ClientTimeout(total=1.0) ) return True - except (exceptions.DirectorException, asyncio.TimeoutError) as exc: + except (TimeoutError, DirectorRuntimeError) as exc: logger.debug("Registry not responsive: %s", exc) return False -async def setup_registry(app: web.Application) -> AsyncIterator[None]: +async def _setup_registry(app: FastAPI) -> None: logger.debug("pinging registry...") @retry( wait=wait_fixed(2), before_sleep=before_sleep_log(logger, logging.WARNING), - retry=retry_if_result(lambda result: result == False), + retry=retry_if_result(lambda result: result is False), reraise=True, ) - async def wait_until_registry_responsive(app: web.Application) -> bool: - return await is_registry_responsive(app) + async def wait_until_registry_responsive(app: FastAPI) -> bool: + return await _is_registry_responsive(app) await wait_until_registry_responsive(app) logger.info("Connected to docker registry") - yield -async def _list_repositories(app: web.Application) -> List[str]: +def setup(app: FastAPI) -> None: + async def on_startup() -> None: + cache = Cache(Cache.MEMORY) + assert isinstance(cache, SimpleMemoryCache) # nosec + app.state.registry_cache_memory = cache + await _setup_registry(app) + + async def on_shutdown() -> None: + # nothing to do here + ... + + app.add_event_handler("startup", on_startup) + app.add_event_handler("shutdown", on_shutdown) + + +async def _list_repositories(app: FastAPI) -> list[str]: logger.debug("listing repositories") # if there are more repos, the Link will be available in the response headers until none available path = f"/v2/_catalog?n={NUMBER_OF_RETRIEVED_REPOS}" - repos_list: List = [] + repos_list: list = [] while True: result, headers = await registry_request(app, path) if result["repositories"]: @@ -228,9 +290,9 @@ async def _list_repositories(app: web.Application) -> List[str]: return repos_list -async def list_image_tags(app: web.Application, image_key: str) -> List[str]: +async def list_image_tags(app: FastAPI, image_key: str) -> list[str]: logger.debug("listing image tags in %s", image_key) - image_tags: List = [] + image_tags: list = [] # get list of image tags path = f"/v2/{image_key}/tags/list?n={NUMBER_OF_RETRIEVED_TAGS}" while True: @@ -243,10 +305,12 @@ async def list_image_tags(app: web.Application, image_key: str) -> List[str]: logger.debug("Found %s image tags in %s", len(image_tags), image_key) return image_tags + _DOCKER_CONTENT_DIGEST_HEADER = "Docker-Content-Digest" -async def get_image_digest(app: web.Application, image: str, tag: str) -> Optional[str]: - """ Returns image manifest digest number or None if fails to obtain it + +async def get_image_digest(app: FastAPI, image: str, tag: str) -> str | None: + """Returns image manifest digest number or None if fails to obtain it The manifest digest is essentially a SHA256 hash of the image manifest @@ -259,20 +323,22 @@ async def get_image_digest(app: web.Application, image: str, tag: str) -> Option return headers.get(_DOCKER_CONTENT_DIGEST_HEADER, None) -async def get_image_labels(app: web.Application, image: str, tag: str) -> Tuple[Dict, Optional[str]]: - """ Returns image labels and the image manifest digest """ +async def get_image_labels( + app: FastAPI, image: str, tag: str +) -> tuple[dict[str, str], str | None]: + """Returns image labels and the image manifest digest""" logger.debug("getting image labels of %s:%s", image, tag) path = f"/v2/{image}/manifests/{tag}" request_result, headers = await registry_request(app, path) v1_compatibility_key = json.loads(request_result["history"][0]["v1Compatibility"]) - container_config = v1_compatibility_key.get( + container_config: dict[str, Any] = v1_compatibility_key.get( "container_config", v1_compatibility_key["config"] ) - labels = container_config["Labels"] + labels: dict[str, str] = container_config["Labels"] headers = headers or {} - manifest_digest = headers.get(_DOCKER_CONTENT_DIGEST_HEADER, None) + manifest_digest: str | None = headers.get(_DOCKER_CONTENT_DIGEST_HEADER, None) logger.debug("retrieved labels of image %s:%s", image, tag) @@ -280,15 +346,15 @@ async def get_image_labels(app: web.Application, image: str, tag: str) -> Tuple[ async def get_image_details( - app: web.Application, image_key: str, image_tag: str -) -> Dict: - image_details: Dict = {} + app: FastAPI, image_key: str, image_tag: str +) -> dict[str, Any]: + image_details: dict = {} labels, image_manifest_digest = await get_image_labels(app, image_key, image_tag) if image_manifest_digest: # Adds manifest as extra key in the response similar to org.opencontainers.image.base.digest # SEE https://github.com/opencontainers/image-spec/blob/main/annotations.md#pre-defined-annotation-keys - image_details.update({"image_digest":image_manifest_digest}) + image_details.update({"image_digest": image_manifest_digest}) if not labels: return image_details @@ -297,7 +363,7 @@ async def get_image_details( continue try: label_data = json.loads(labels[key]) - for label_key in label_data.keys(): + for label_key in label_data: image_details[label_key] = label_data[label_key] except json.decoder.JSONDecodeError: logging.exception( @@ -311,18 +377,20 @@ async def get_image_details( return image_details -async def get_repo_details(app: web.Application, image_key: str) -> List[Dict]: - repo_details = [] +async def get_repo_details(app: FastAPI, image_key: str) -> list[dict[str, Any]]: + image_tags = await list_image_tags(app, image_key) - tasks = [get_image_details(app, image_key, tag) for tag in image_tags] - results = await asyncio.gather(*tasks) - for image_details in results: - if image_details: - repo_details.append(image_details) - return repo_details + + results = await limited_gather( + *[get_image_details(app, image_key, tag) for tag in image_tags], + reraise=False, + log=logger, + limit=_MAX_CONCURRENT_CALLS, + ) + return [result for result in results if not isinstance(result, BaseException)] -async def list_services(app: web.Application, service_type: ServiceType) -> List[Dict]: +async def list_services(app: FastAPI, service_type: ServiceType) -> list[dict]: logger.debug("getting list of services") repos = await _list_repositories(app) # get the services repos @@ -335,29 +403,34 @@ async def list_services(app: web.Application, service_type: ServiceType) -> List logger.debug("retrieved list of repos : %s", repos) # only list as service if it actually contains the necessary labels - tasks = [get_repo_details(app, repo) for repo in repos] - results = await asyncio.gather(*tasks, return_exceptions=True) - services = [] - for repo_details in results: - if repo_details and isinstance(repo_details, list): - services.extend(repo_details) - elif isinstance(repo_details, Exception): - logger.error("Exception occured while listing services %s", repo_details) - return services + results = await limited_gather( + *[get_repo_details(app, repo) for repo in repos], + reraise=False, + log=logger, + limit=_MAX_CONCURRENT_CALLS, + ) + + return [ + service + for repo_details in results + if isinstance(repo_details, list) + for service in repo_details + ] async def list_interactive_service_dependencies( - app: web.Application, service_key: str, service_tag: str -) -> List[Dict]: + app: FastAPI, service_key: str, service_tag: str +) -> list[dict]: image_labels, _ = await get_image_labels(app, service_key, service_tag) dependency_keys = [] if DEPENDENCIES_LABEL_KEY in image_labels: try: dependencies = json.loads(image_labels[DEPENDENCIES_LABEL_KEY]) - for dependency in dependencies: - dependency_keys.append( - {"key": dependency["key"], "tag": dependency["tag"]} - ) + dependency_keys = [ + {"key": dependency["key"], "tag": dependency["tag"]} + for dependency in dependencies + ] + except json.decoder.JSONDecodeError: logging.exception( "Incorrect json formatting in %s, skipping...", @@ -368,7 +441,7 @@ async def list_interactive_service_dependencies( def _get_prefix(service_type: ServiceType) -> str: - return "{}/{}/".format(config.SIMCORE_SERVICES_PREFIX, service_type.value) + return f"{DIRECTOR_SIMCORE_SERVICES_PREFIX}/{service_type.value}/" def get_service_first_name(image_key: str) -> str: @@ -407,7 +480,7 @@ def get_service_last_names(image_key: str) -> str: RESOURCES_ENTRY_NAME = "Resources".lower() -def _validate_kind(entry_to_validate: Dict[str, Any], kind_name: str): +def _validate_kind(entry_to_validate: dict[str, Any], kind_name: str): for element in ( entry_to_validate.get("value", {}) .get("Reservations", {}) @@ -419,22 +492,25 @@ def _validate_kind(entry_to_validate: Dict[str, Any], kind_name: str): async def get_service_extras( - app: web.Application, image_key: str, image_tag: str -) -> Dict[str, Any]: + app: FastAPI, image_key: str, image_tag: str +) -> dict[str, Any]: # check physical node requirements # all nodes require "CPU" - result = { + app_settings = get_application_settings(app) + result: dict[str, Any] = { "node_requirements": { - "CPU": config.DEFAULT_MAX_NANO_CPUS / 1.0e09, - "RAM": config.DEFAULT_MAX_MEMORY, + "CPU": app_settings.DIRECTOR_DEFAULT_MAX_NANO_CPUS / 1.0e09, + "RAM": app_settings.DIRECTOR_DEFAULT_MAX_MEMORY, } } labels, _ = await get_image_labels(app, image_key, image_tag) logger.debug("Compiling service extras from labels %s", pformat(labels)) - if config.SERVICE_RUNTIME_SETTINGS in labels: - service_settings = json.loads(labels[config.SERVICE_RUNTIME_SETTINGS]) + if SERVICE_RUNTIME_SETTINGS in labels: + service_settings: list[dict[str, Any]] = json.loads( + labels[SERVICE_RUNTIME_SETTINGS] + ) for entry in service_settings: entry_name = entry.get("name", "").lower() entry_value = entry.get("value") @@ -448,19 +524,18 @@ async def get_service_extras( result["node_requirements"]["CPU"] = ( float(res_limit.get("NanoCPUs", 0)) or float(res_reservation.get("NanoCPUs", 0)) - or config.DEFAULT_MAX_NANO_CPUS + or app_settings.DIRECTOR_DEFAULT_MAX_NANO_CPUS ) / 1.0e09 # RAM result["node_requirements"]["RAM"] = ( res_limit.get("MemoryBytes", 0) or res_reservation.get("MemoryBytes", 0) - or config.DEFAULT_MAX_MEMORY + or app_settings.DIRECTOR_DEFAULT_MAX_MEMORY ) else: invalid_with_msg = f"invalid type for resource [{entry_value}]" # discrete resources (custom made ones) --- - # TODO: this could be adjusted to separate between GPU and/or VRAM # check if the service requires GPU support if not invalid_with_msg and _validate_kind(entry, "VRAM"): @@ -493,7 +568,7 @@ async def get_service_extras( result.update( { sl: labels[dl] - for dl, sl in config.ORG_LABELS_TO_SCHEMA_LABELS.items() + for dl, sl in ORG_LABELS_TO_SCHEMA_LABELS.items() if dl in labels } ) diff --git a/services/director/src/simcore_service_director/resources.py b/services/director/src/simcore_service_director/resources.py deleted file mode 100644 index d5471ce39d0..00000000000 --- a/services/director/src/simcore_service_director/resources.py +++ /dev/null @@ -1,36 +0,0 @@ -import functools -from pathlib import Path - -import pkg_resources -from simcore_service_director import config - - -RESOURCE_OPENAPI_ROOT: str = "api" -RESOURCE_OPEN_API: str = f"{RESOURCE_OPENAPI_ROOT}/{config.API_VERSION}/openapi.yaml" -RESOURCE_NODE_SCHEMA: str = config.NODE_SCHEMA_LOCATION - -""" - List of pkg_resources functions *bound* to current package with the following signature - - function(resource_name) - - Note that resource names must be /-separated paths and - cannot be absolute (i.e. no leading /) or contain relative names like "..". - Do not use os.path routines to manipulate resource paths, as they are not filesystem paths. - - Resources are read/only files/folders -""" -exists = functools.partial(pkg_resources.resource_exists, __name__) -stream = functools.partial(pkg_resources.resource_stream, __name__) -listdir = functools.partial(pkg_resources.resource_listdir, __name__) -isdir = functools.partial(pkg_resources.resource_isdir, __name__) - - -def get_path(resource_name: str) -> Path: - """ Returns a path to a resource - - WARNING: existence of file is not guaranteed. Use resources.exists - WARNING: resource files are supposed to be used as read-only! - """ - resource_path = Path(pkg_resources.resource_filename(__name__, resource_name)) - return resource_path diff --git a/services/director/src/simcore_service_director/rest/__init__.py b/services/director/src/simcore_service_director/rest/__init__.py deleted file mode 100644 index a7048f43474..00000000000 --- a/services/director/src/simcore_service_director/rest/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -"""GENERATED CODE from codegen.sh -It is advisable to not modify this code if possible. -This will be overriden next time the code generator is called. -""" -from .generated_code import ( - models, - util, - routing -) diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/__init__.py b/services/director/src/simcore_service_director/rest/generated_code/models/__init__.py deleted file mode 100644 index c758f7a5a10..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -# coding: utf-8 - -# import models into model package -from .error_enveloped import ErrorEnveloped -from .health_check_enveloped import HealthCheckEnveloped -from .inline_response200 import InlineResponse200 -from .inline_response2001 import InlineResponse2001 -from .inline_response2001_authors import InlineResponse2001Authors -from .inline_response2001_badges import InlineResponse2001Badges -from .inline_response2002 import InlineResponse2002 -from .inline_response2002_data import InlineResponse2002Data -from .inline_response2002_data_container_spec import InlineResponse2002DataContainerSpec -from .inline_response2002_data_node_requirements import InlineResponse2002DataNodeRequirements -from .inline_response2002_data_service_build_details import InlineResponse2002DataServiceBuildDetails -from .inline_response2003 import InlineResponse2003 -from .inline_response2003_data import InlineResponse2003Data -from .inline_response200_data import InlineResponse200Data -from .inline_response201 import InlineResponse201 -from .inline_response_default import InlineResponseDefault -from .inline_response_default_error import InlineResponseDefaultError -from .running_service_enveloped import RunningServiceEnveloped -from .running_services_enveloped import RunningServicesEnveloped -from .service_extras_enveloped import ServiceExtrasEnveloped -from .services_enveloped import ServicesEnveloped -from .simcore_node import SimcoreNode diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/base_model_.py b/services/director/src/simcore_service_director/rest/generated_code/models/base_model_.py deleted file mode 100644 index 5d67f4e0a8e..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/base_model_.py +++ /dev/null @@ -1,66 +0,0 @@ -import pprint - -import typing - -from .. import util - -T = typing.TypeVar('T') - - -class Model(object): - # openapiTypes: The key is attribute name and the - # value is attribute type. - openapi_types = {} - - # attributeMap: The key is attribute name and the - # value is json key in definition. - attribute_map = {} - - @classmethod - def from_dict(cls: T, dikt: dict) -> T: - """Returns the dict as a model""" - return util.deserialize_model(dikt, cls) - - def to_dict(self) -> dict: - """Returns the model properties as a dict - """ - result = {} - - for attr_key, json_key in self.attribute_map.items(): - value = getattr(self, attr_key) - if value is None: - continue - if isinstance(value, list): - result[json_key] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[json_key] = value.to_dict() - elif isinstance(value, dict): - result[json_key] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[json_key] = value - - return result - - def to_str(self) -> str: - """Returns the string representation of the model - """ - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/error_enveloped.py b/services/director/src/simcore_service_director/rest/generated_code/models/error_enveloped.py deleted file mode 100644 index 80829e28b9e..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/error_enveloped.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response_default_error import InlineResponseDefaultError -from .. import util - - -class ErrorEnveloped(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: object=None, error: InlineResponseDefaultError=None): - """ErrorEnveloped - a model defined in OpenAPI - - :param data: The data of this ErrorEnveloped. - :param error: The error of this ErrorEnveloped. - """ - self.openapi_types = { - 'data': object, - 'error': InlineResponseDefaultError - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'ErrorEnveloped': - """Returns the dict as a model - - :param dikt: A dict. - :return: The ErrorEnveloped of this ErrorEnveloped. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this ErrorEnveloped. - - - :return: The data of this ErrorEnveloped. - :rtype: object - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this ErrorEnveloped. - - - :param data: The data of this ErrorEnveloped. - :type data: object - """ - - self._data = data - - @property - def error(self): - """Gets the error of this ErrorEnveloped. - - - :return: The error of this ErrorEnveloped. - :rtype: InlineResponseDefaultError - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this ErrorEnveloped. - - - :param error: The error of this ErrorEnveloped. - :type error: InlineResponseDefaultError - """ - if error is None: - raise ValueError("Invalid value for `error`, must not be `None`") - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/health_check_enveloped.py b/services/director/src/simcore_service_director/rest/generated_code/models/health_check_enveloped.py deleted file mode 100644 index 3906d343690..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/health_check_enveloped.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response200_data import InlineResponse200Data -from .. import util - - -class HealthCheckEnveloped(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: InlineResponse200Data=None, error: object=None): - """HealthCheckEnveloped - a model defined in OpenAPI - - :param data: The data of this HealthCheckEnveloped. - :param error: The error of this HealthCheckEnveloped. - """ - self.openapi_types = { - 'data': InlineResponse200Data, - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'HealthCheckEnveloped': - """Returns the dict as a model - - :param dikt: A dict. - :return: The HealthCheckEnveloped of this HealthCheckEnveloped. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this HealthCheckEnveloped. - - - :return: The data of this HealthCheckEnveloped. - :rtype: InlineResponse200Data - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this HealthCheckEnveloped. - - - :param data: The data of this HealthCheckEnveloped. - :type data: InlineResponse200Data - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this HealthCheckEnveloped. - - - :return: The error of this HealthCheckEnveloped. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this HealthCheckEnveloped. - - - :param error: The error of this HealthCheckEnveloped. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response200.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response200.py deleted file mode 100644 index 007a500aced..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response200.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response200_data import InlineResponse200Data -from .. import util - - -class InlineResponse200(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: InlineResponse200Data=None, error: object=None): - """InlineResponse200 - a model defined in OpenAPI - - :param data: The data of this InlineResponse200. - :param error: The error of this InlineResponse200. - """ - self.openapi_types = { - 'data': InlineResponse200Data, - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse200': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200 of this InlineResponse200. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this InlineResponse200. - - - :return: The data of this InlineResponse200. - :rtype: InlineResponse200Data - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this InlineResponse200. - - - :param data: The data of this InlineResponse200. - :type data: InlineResponse200Data - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this InlineResponse200. - - - :return: The error of this InlineResponse200. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this InlineResponse200. - - - :param error: The error of this InlineResponse200. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001.py deleted file mode 100644 index 86c64116439..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .simcore_node import SimcoreNode -from .. import util - - -class InlineResponse2001(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: List[SimcoreNode]=None, error: object=None): - """InlineResponse2001 - a model defined in OpenAPI - - :param data: The data of this InlineResponse2001. - :param error: The error of this InlineResponse2001. - """ - self.openapi_types = { - 'data': List[SimcoreNode], - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2001': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_1 of this InlineResponse2001. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this InlineResponse2001. - - - :return: The data of this InlineResponse2001. - :rtype: List[SimcoreNode] - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this InlineResponse2001. - - - :param data: The data of this InlineResponse2001. - :type data: List[SimcoreNode] - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this InlineResponse2001. - - - :return: The error of this InlineResponse2001. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this InlineResponse2001. - - - :param error: The error of this InlineResponse2001. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_authors.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_authors.py deleted file mode 100644 index 2fd9d6d8983..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_authors.py +++ /dev/null @@ -1,120 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .. import util - - -class InlineResponse2001Authors(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, name: str=None, email: str=None, affiliation: str=None): - """InlineResponse2001Authors - a model defined in OpenAPI - - :param name: The name of this InlineResponse2001Authors. - :param email: The email of this InlineResponse2001Authors. - :param affiliation: The affiliation of this InlineResponse2001Authors. - """ - self.openapi_types = { - 'name': str, - 'email': str, - 'affiliation': str - } - - self.attribute_map = { - 'name': 'name', - 'email': 'email', - 'affiliation': 'affiliation' - } - - self._name = name - self._email = email - self._affiliation = affiliation - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2001Authors': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_1_authors of this InlineResponse2001Authors. - """ - return util.deserialize_model(dikt, cls) - - @property - def name(self): - """Gets the name of this InlineResponse2001Authors. - - Name of the author - - :return: The name of this InlineResponse2001Authors. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this InlineResponse2001Authors. - - Name of the author - - :param name: The name of this InlineResponse2001Authors. - :type name: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") - - self._name = name - - @property - def email(self): - """Gets the email of this InlineResponse2001Authors. - - Email address - - :return: The email of this InlineResponse2001Authors. - :rtype: str - """ - return self._email - - @email.setter - def email(self, email): - """Sets the email of this InlineResponse2001Authors. - - Email address - - :param email: The email of this InlineResponse2001Authors. - :type email: str - """ - if email is None: - raise ValueError("Invalid value for `email`, must not be `None`") - - self._email = email - - @property - def affiliation(self): - """Gets the affiliation of this InlineResponse2001Authors. - - Affiliation of the author - - :return: The affiliation of this InlineResponse2001Authors. - :rtype: str - """ - return self._affiliation - - @affiliation.setter - def affiliation(self, affiliation): - """Sets the affiliation of this InlineResponse2001Authors. - - Affiliation of the author - - :param affiliation: The affiliation of this InlineResponse2001Authors. - :type affiliation: str - """ - - self._affiliation = affiliation diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_badges.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_badges.py deleted file mode 100644 index 94121a17f3a..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_badges.py +++ /dev/null @@ -1,122 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .. import util - - -class InlineResponse2001Badges(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, name: str=None, image: str=None, url: str=None): - """InlineResponse2001Badges - a model defined in OpenAPI - - :param name: The name of this InlineResponse2001Badges. - :param image: The image of this InlineResponse2001Badges. - :param url: The url of this InlineResponse2001Badges. - """ - self.openapi_types = { - 'name': str, - 'image': str, - 'url': str - } - - self.attribute_map = { - 'name': 'name', - 'image': 'image', - 'url': 'url' - } - - self._name = name - self._image = image - self._url = url - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2001Badges': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_1_badges of this InlineResponse2001Badges. - """ - return util.deserialize_model(dikt, cls) - - @property - def name(self): - """Gets the name of this InlineResponse2001Badges. - - Name of the subject - - :return: The name of this InlineResponse2001Badges. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this InlineResponse2001Badges. - - Name of the subject - - :param name: The name of this InlineResponse2001Badges. - :type name: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") - - self._name = name - - @property - def image(self): - """Gets the image of this InlineResponse2001Badges. - - Url to the shield - - :return: The image of this InlineResponse2001Badges. - :rtype: str - """ - return self._image - - @image.setter - def image(self, image): - """Sets the image of this InlineResponse2001Badges. - - Url to the shield - - :param image: The image of this InlineResponse2001Badges. - :type image: str - """ - if image is None: - raise ValueError("Invalid value for `image`, must not be `None`") - - self._image = image - - @property - def url(self): - """Gets the url of this InlineResponse2001Badges. - - Link to status - - :return: The url of this InlineResponse2001Badges. - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this InlineResponse2001Badges. - - Link to status - - :param url: The url of this InlineResponse2001Badges. - :type url: str - """ - if url is None: - raise ValueError("Invalid value for `url`, must not be `None`") - - self._url = url diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_data.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_data.py deleted file mode 100644 index fcfa3b0bf69..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_data.py +++ /dev/null @@ -1,332 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -import re -from .. import util - - -class InlineResponse2001Data(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, entry_point: str=None, published_port: int=None, service_basepath: str='', service_host: str=None, service_key: str=None, service_message: str=None, service_port: int=None, service_state: str=None, service_uuid: str=None, service_version: str=None): - """InlineResponse2001Data - a model defined in OpenAPI - - :param entry_point: The entry_point of this InlineResponse2001Data. - :param published_port: The published_port of this InlineResponse2001Data. - :param service_basepath: The service_basepath of this InlineResponse2001Data. - :param service_host: The service_host of this InlineResponse2001Data. - :param service_key: The service_key of this InlineResponse2001Data. - :param service_message: The service_message of this InlineResponse2001Data. - :param service_port: The service_port of this InlineResponse2001Data. - :param service_state: The service_state of this InlineResponse2001Data. - :param service_uuid: The service_uuid of this InlineResponse2001Data. - :param service_version: The service_version of this InlineResponse2001Data. - """ - self.openapi_types = { - 'entry_point': str, - 'published_port': int, - 'service_basepath': str, - 'service_host': str, - 'service_key': str, - 'service_message': str, - 'service_port': int, - 'service_state': str, - 'service_uuid': str, - 'service_version': str - } - - self.attribute_map = { - 'entry_point': 'entry_point', - 'published_port': 'published_port', - 'service_basepath': 'service_basepath', - 'service_host': 'service_host', - 'service_key': 'service_key', - 'service_message': 'service_message', - 'service_port': 'service_port', - 'service_state': 'service_state', - 'service_uuid': 'service_uuid', - 'service_version': 'service_version' - } - - self._entry_point = entry_point - self._published_port = published_port - self._service_basepath = service_basepath - self._service_host = service_host - self._service_key = service_key - self._service_message = service_message - self._service_port = service_port - self._service_state = service_state - self._service_uuid = service_uuid - self._service_version = service_version - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2001Data': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_1_data of this InlineResponse2001Data. - """ - return util.deserialize_model(dikt, cls) - - @property - def entry_point(self): - """Gets the entry_point of this InlineResponse2001Data. - - The entry point where the service provides its interface if specified - - :return: The entry_point of this InlineResponse2001Data. - :rtype: str - """ - return self._entry_point - - @entry_point.setter - def entry_point(self, entry_point): - """Sets the entry_point of this InlineResponse2001Data. - - The entry point where the service provides its interface if specified - - :param entry_point: The entry_point of this InlineResponse2001Data. - :type entry_point: str - """ - - self._entry_point = entry_point - - @property - def published_port(self): - """Gets the published_port of this InlineResponse2001Data. - - The ports where the service provides its interface - - :return: The published_port of this InlineResponse2001Data. - :rtype: int - """ - return self._published_port - - @published_port.setter - def published_port(self, published_port): - """Sets the published_port of this InlineResponse2001Data. - - The ports where the service provides its interface - - :param published_port: The published_port of this InlineResponse2001Data. - :type published_port: int - """ - if published_port is None: - raise ValueError("Invalid value for `published_port`, must not be `None`") - if published_port is not None and published_port < 1: - raise ValueError("Invalid value for `published_port`, must be a value greater than or equal to `1`") - - self._published_port = published_port - - @property - def service_basepath(self): - """Gets the service_basepath of this InlineResponse2001Data. - - different base path where current service is mounted otherwise defaults to root - - :return: The service_basepath of this InlineResponse2001Data. - :rtype: str - """ - return self._service_basepath - - @service_basepath.setter - def service_basepath(self, service_basepath): - """Sets the service_basepath of this InlineResponse2001Data. - - different base path where current service is mounted otherwise defaults to root - - :param service_basepath: The service_basepath of this InlineResponse2001Data. - :type service_basepath: str - """ - - self._service_basepath = service_basepath - - @property - def service_host(self): - """Gets the service_host of this InlineResponse2001Data. - - service host name within the network - - :return: The service_host of this InlineResponse2001Data. - :rtype: str - """ - return self._service_host - - @service_host.setter - def service_host(self, service_host): - """Sets the service_host of this InlineResponse2001Data. - - service host name within the network - - :param service_host: The service_host of this InlineResponse2001Data. - :type service_host: str - """ - if service_host is None: - raise ValueError("Invalid value for `service_host`, must not be `None`") - - self._service_host = service_host - - @property - def service_key(self): - """Gets the service_key of this InlineResponse2001Data. - - distinctive name for the node based on the docker registry path - - :return: The service_key of this InlineResponse2001Data. - :rtype: str - """ - return self._service_key - - @service_key.setter - def service_key(self, service_key): - """Sets the service_key of this InlineResponse2001Data. - - distinctive name for the node based on the docker registry path - - :param service_key: The service_key of this InlineResponse2001Data. - :type service_key: str - """ - if service_key is None: - raise ValueError("Invalid value for `service_key`, must not be `None`") - if service_key is not None and not re.search(r'^(simcore)\/(services)\/(comp|dynamic)(\/[^\s\/]+)+$', service_key): - raise ValueError("Invalid value for `service_key`, must be a follow pattern or equal to `/^(simcore)\/(services)\/(comp|dynamic)(\/[^\s\/]+)+$/`") - - self._service_key = service_key - - @property - def service_message(self): - """Gets the service_message of this InlineResponse2001Data. - - the service message - - :return: The service_message of this InlineResponse2001Data. - :rtype: str - """ - return self._service_message - - @service_message.setter - def service_message(self, service_message): - """Sets the service_message of this InlineResponse2001Data. - - the service message - - :param service_message: The service_message of this InlineResponse2001Data. - :type service_message: str - """ - - self._service_message = service_message - - @property - def service_port(self): - """Gets the service_port of this InlineResponse2001Data. - - port to access the service within the network - - :return: The service_port of this InlineResponse2001Data. - :rtype: int - """ - return self._service_port - - @service_port.setter - def service_port(self, service_port): - """Sets the service_port of this InlineResponse2001Data. - - port to access the service within the network - - :param service_port: The service_port of this InlineResponse2001Data. - :type service_port: int - """ - if service_port is None: - raise ValueError("Invalid value for `service_port`, must not be `None`") - if service_port is not None and service_port < 1: - raise ValueError("Invalid value for `service_port`, must be a value greater than or equal to `1`") - - self._service_port = service_port - - @property - def service_state(self): - """Gets the service_state of this InlineResponse2001Data. - - the service state * 'pending' - The service is waiting for resources to start * 'pulling' - The service is being pulled from the registry * 'starting' - The service is starting * 'running' - The service is running * 'complete' - The service completed * 'failed' - The service failed to start - - :return: The service_state of this InlineResponse2001Data. - :rtype: str - """ - return self._service_state - - @service_state.setter - def service_state(self, service_state): - """Sets the service_state of this InlineResponse2001Data. - - the service state * 'pending' - The service is waiting for resources to start * 'pulling' - The service is being pulled from the registry * 'starting' - The service is starting * 'running' - The service is running * 'complete' - The service completed * 'failed' - The service failed to start - - :param service_state: The service_state of this InlineResponse2001Data. - :type service_state: str - """ - allowed_values = ["pending", "pulling", "starting", "running", "complete", "failed"] - if service_state not in allowed_values: - raise ValueError( - "Invalid value for `service_state` ({0}), must be one of {1}" - .format(service_state, allowed_values) - ) - - self._service_state = service_state - - @property - def service_uuid(self): - """Gets the service_uuid of this InlineResponse2001Data. - - The UUID attached to this service - - :return: The service_uuid of this InlineResponse2001Data. - :rtype: str - """ - return self._service_uuid - - @service_uuid.setter - def service_uuid(self, service_uuid): - """Sets the service_uuid of this InlineResponse2001Data. - - The UUID attached to this service - - :param service_uuid: The service_uuid of this InlineResponse2001Data. - :type service_uuid: str - """ - if service_uuid is None: - raise ValueError("Invalid value for `service_uuid`, must not be `None`") - - self._service_uuid = service_uuid - - @property - def service_version(self): - """Gets the service_version of this InlineResponse2001Data. - - semantic version number - - :return: The service_version of this InlineResponse2001Data. - :rtype: str - """ - return self._service_version - - @service_version.setter - def service_version(self, service_version): - """Sets the service_version of this InlineResponse2001Data. - - semantic version number - - :param service_version: The service_version of this InlineResponse2001Data. - :type service_version: str - """ - if service_version is None: - raise ValueError("Invalid value for `service_version`, must not be `None`") - if service_version is not None and not re.search(r'^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$', service_version): - raise ValueError("Invalid value for `service_version`, must be a follow pattern or equal to `/^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$/`") - - self._service_version = service_version diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002.py deleted file mode 100644 index ffeb93d434d..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response2002_data import InlineResponse2002Data -from .. import util - - -class InlineResponse2002(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: InlineResponse2002Data=None, error: object=None): - """InlineResponse2002 - a model defined in OpenAPI - - :param data: The data of this InlineResponse2002. - :param error: The error of this InlineResponse2002. - """ - self.openapi_types = { - 'data': InlineResponse2002Data, - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2002': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_2 of this InlineResponse2002. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this InlineResponse2002. - - - :return: The data of this InlineResponse2002. - :rtype: InlineResponse2002Data - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this InlineResponse2002. - - - :param data: The data of this InlineResponse2002. - :type data: InlineResponse2002Data - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this InlineResponse2002. - - - :return: The error of this InlineResponse2002. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this InlineResponse2002. - - - :param error: The error of this InlineResponse2002. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_authors.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_authors.py deleted file mode 100644 index 5a6d37c0b68..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_authors.py +++ /dev/null @@ -1,120 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .. import util - - -class InlineResponse2002Authors(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, affiliation: str=None, email: str=None, name: str=None): - """InlineResponse2002Authors - a model defined in OpenAPI - - :param affiliation: The affiliation of this InlineResponse2002Authors. - :param email: The email of this InlineResponse2002Authors. - :param name: The name of this InlineResponse2002Authors. - """ - self.openapi_types = { - 'affiliation': str, - 'email': str, - 'name': str - } - - self.attribute_map = { - 'affiliation': 'affiliation', - 'email': 'email', - 'name': 'name' - } - - self._affiliation = affiliation - self._email = email - self._name = name - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2002Authors': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_2_authors of this InlineResponse2002Authors. - """ - return util.deserialize_model(dikt, cls) - - @property - def affiliation(self): - """Gets the affiliation of this InlineResponse2002Authors. - - Affiliation of the author - - :return: The affiliation of this InlineResponse2002Authors. - :rtype: str - """ - return self._affiliation - - @affiliation.setter - def affiliation(self, affiliation): - """Sets the affiliation of this InlineResponse2002Authors. - - Affiliation of the author - - :param affiliation: The affiliation of this InlineResponse2002Authors. - :type affiliation: str - """ - - self._affiliation = affiliation - - @property - def email(self): - """Gets the email of this InlineResponse2002Authors. - - Email address - - :return: The email of this InlineResponse2002Authors. - :rtype: str - """ - return self._email - - @email.setter - def email(self, email): - """Sets the email of this InlineResponse2002Authors. - - Email address - - :param email: The email of this InlineResponse2002Authors. - :type email: str - """ - if email is None: - raise ValueError("Invalid value for `email`, must not be `None`") - - self._email = email - - @property - def name(self): - """Gets the name of this InlineResponse2002Authors. - - Name of the author - - :return: The name of this InlineResponse2002Authors. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this InlineResponse2002Authors. - - Name of the author - - :param name: The name of this InlineResponse2002Authors. - :type name: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") - - self._name = name diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_badges.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_badges.py deleted file mode 100644 index 20fb1cf7741..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_badges.py +++ /dev/null @@ -1,122 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .. import util - - -class InlineResponse2002Badges(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, image: str=None, name: str=None, url: str=None): - """InlineResponse2002Badges - a model defined in OpenAPI - - :param image: The image of this InlineResponse2002Badges. - :param name: The name of this InlineResponse2002Badges. - :param url: The url of this InlineResponse2002Badges. - """ - self.openapi_types = { - 'image': str, - 'name': str, - 'url': str - } - - self.attribute_map = { - 'image': 'image', - 'name': 'name', - 'url': 'url' - } - - self._image = image - self._name = name - self._url = url - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2002Badges': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_2_badges of this InlineResponse2002Badges. - """ - return util.deserialize_model(dikt, cls) - - @property - def image(self): - """Gets the image of this InlineResponse2002Badges. - - Url to the shield - - :return: The image of this InlineResponse2002Badges. - :rtype: str - """ - return self._image - - @image.setter - def image(self, image): - """Sets the image of this InlineResponse2002Badges. - - Url to the shield - - :param image: The image of this InlineResponse2002Badges. - :type image: str - """ - if image is None: - raise ValueError("Invalid value for `image`, must not be `None`") - - self._image = image - - @property - def name(self): - """Gets the name of this InlineResponse2002Badges. - - Name of the subject - - :return: The name of this InlineResponse2002Badges. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this InlineResponse2002Badges. - - Name of the subject - - :param name: The name of this InlineResponse2002Badges. - :type name: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") - - self._name = name - - @property - def url(self): - """Gets the url of this InlineResponse2002Badges. - - Link to status - - :return: The url of this InlineResponse2002Badges. - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this InlineResponse2002Badges. - - Link to status - - :param url: The url of this InlineResponse2002Badges. - :type url: str - """ - if url is None: - raise ValueError("Invalid value for `url`, must not be `None`") - - self._url = url diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data.py deleted file mode 100644 index 2fbc19bdc16..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data.py +++ /dev/null @@ -1,115 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response2002_data_container_spec import InlineResponse2002DataContainerSpec -from .inline_response2002_data_node_requirements import InlineResponse2002DataNodeRequirements -from .inline_response2002_data_service_build_details import InlineResponse2002DataServiceBuildDetails -from .. import util - - -class InlineResponse2002Data(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, node_requirements: InlineResponse2002DataNodeRequirements=None, service_build_details: InlineResponse2002DataServiceBuildDetails=None, container_spec: InlineResponse2002DataContainerSpec=None): - """InlineResponse2002Data - a model defined in OpenAPI - - :param node_requirements: The node_requirements of this InlineResponse2002Data. - :param service_build_details: The service_build_details of this InlineResponse2002Data. - :param container_spec: The container_spec of this InlineResponse2002Data. - """ - self.openapi_types = { - 'node_requirements': InlineResponse2002DataNodeRequirements, - 'service_build_details': InlineResponse2002DataServiceBuildDetails, - 'container_spec': InlineResponse2002DataContainerSpec - } - - self.attribute_map = { - 'node_requirements': 'node_requirements', - 'service_build_details': 'service_build_details', - 'container_spec': 'container_spec' - } - - self._node_requirements = node_requirements - self._service_build_details = service_build_details - self._container_spec = container_spec - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2002Data': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_2_data of this InlineResponse2002Data. - """ - return util.deserialize_model(dikt, cls) - - @property - def node_requirements(self): - """Gets the node_requirements of this InlineResponse2002Data. - - - :return: The node_requirements of this InlineResponse2002Data. - :rtype: InlineResponse2002DataNodeRequirements - """ - return self._node_requirements - - @node_requirements.setter - def node_requirements(self, node_requirements): - """Sets the node_requirements of this InlineResponse2002Data. - - - :param node_requirements: The node_requirements of this InlineResponse2002Data. - :type node_requirements: InlineResponse2002DataNodeRequirements - """ - if node_requirements is None: - raise ValueError("Invalid value for `node_requirements`, must not be `None`") - - self._node_requirements = node_requirements - - @property - def service_build_details(self): - """Gets the service_build_details of this InlineResponse2002Data. - - - :return: The service_build_details of this InlineResponse2002Data. - :rtype: InlineResponse2002DataServiceBuildDetails - """ - return self._service_build_details - - @service_build_details.setter - def service_build_details(self, service_build_details): - """Sets the service_build_details of this InlineResponse2002Data. - - - :param service_build_details: The service_build_details of this InlineResponse2002Data. - :type service_build_details: InlineResponse2002DataServiceBuildDetails - """ - - self._service_build_details = service_build_details - - @property - def container_spec(self): - """Gets the container_spec of this InlineResponse2002Data. - - - :return: The container_spec of this InlineResponse2002Data. - :rtype: InlineResponse2002DataContainerSpec - """ - return self._container_spec - - @container_spec.setter - def container_spec(self, container_spec): - """Sets the container_spec of this InlineResponse2002Data. - - - :param container_spec: The container_spec of this InlineResponse2002Data. - :type container_spec: InlineResponse2002DataContainerSpec - """ - - self._container_spec = container_spec diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_container_spec.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_container_spec.py deleted file mode 100644 index 6309f0fe94a..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_container_spec.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .. import util - - -class InlineResponse2002DataContainerSpec(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, command: List[str]=None): - """InlineResponse2002DataContainerSpec - a model defined in OpenAPI - - :param command: The command of this InlineResponse2002DataContainerSpec. - """ - self.openapi_types = { - 'command': List[str] - } - - self.attribute_map = { - 'command': 'command' - } - - self._command = command - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2002DataContainerSpec': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_2_data_container_spec of this InlineResponse2002DataContainerSpec. - """ - return util.deserialize_model(dikt, cls) - - @property - def command(self): - """Gets the command of this InlineResponse2002DataContainerSpec. - - - :return: The command of this InlineResponse2002DataContainerSpec. - :rtype: List[str] - """ - return self._command - - @command.setter - def command(self, command): - """Sets the command of this InlineResponse2002DataContainerSpec. - - - :param command: The command of this InlineResponse2002DataContainerSpec. - :type command: List[str] - """ - - self._command = command diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_node_requirements.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_node_requirements.py deleted file mode 100644 index 3a1f8bb6e08..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_node_requirements.py +++ /dev/null @@ -1,147 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .. import util - - -class InlineResponse2002DataNodeRequirements(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, cpu: float=1, gpu: int=None, ram: int=None, mpi: int=None): - """InlineResponse2002DataNodeRequirements - a model defined in OpenAPI - - :param cpu: The cpu of this InlineResponse2002DataNodeRequirements. - :param gpu: The gpu of this InlineResponse2002DataNodeRequirements. - :param ram: The ram of this InlineResponse2002DataNodeRequirements. - :param mpi: The mpi of this InlineResponse2002DataNodeRequirements. - """ - self.openapi_types = { - 'cpu': float, - 'gpu': int, - 'ram': int, - 'mpi': int - } - - self.attribute_map = { - 'cpu': 'CPU', - 'gpu': 'GPU', - 'ram': 'RAM', - 'mpi': 'MPI' - } - - self._cpu = cpu - self._gpu = gpu - self._ram = ram - self._mpi = mpi - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2002DataNodeRequirements': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_2_data_node_requirements of this InlineResponse2002DataNodeRequirements. - """ - return util.deserialize_model(dikt, cls) - - @property - def cpu(self): - """Gets the cpu of this InlineResponse2002DataNodeRequirements. - - - :return: The cpu of this InlineResponse2002DataNodeRequirements. - :rtype: float - """ - return self._cpu - - @cpu.setter - def cpu(self, cpu): - """Sets the cpu of this InlineResponse2002DataNodeRequirements. - - - :param cpu: The cpu of this InlineResponse2002DataNodeRequirements. - :type cpu: float - """ - if cpu is None: - raise ValueError("Invalid value for `cpu`, must not be `None`") - if cpu is not None and cpu < 1: - raise ValueError("Invalid value for `cpu`, must be a value greater than or equal to `1`") - - self._cpu = cpu - - @property - def gpu(self): - """Gets the gpu of this InlineResponse2002DataNodeRequirements. - - - :return: The gpu of this InlineResponse2002DataNodeRequirements. - :rtype: int - """ - return self._gpu - - @gpu.setter - def gpu(self, gpu): - """Sets the gpu of this InlineResponse2002DataNodeRequirements. - - - :param gpu: The gpu of this InlineResponse2002DataNodeRequirements. - :type gpu: int - """ - if gpu is not None and gpu < 0: - raise ValueError("Invalid value for `gpu`, must be a value greater than or equal to `0`") - - self._gpu = gpu - - @property - def ram(self): - """Gets the ram of this InlineResponse2002DataNodeRequirements. - - - :return: The ram of this InlineResponse2002DataNodeRequirements. - :rtype: int - """ - return self._ram - - @ram.setter - def ram(self, ram): - """Sets the ram of this InlineResponse2002DataNodeRequirements. - - - :param ram: The ram of this InlineResponse2002DataNodeRequirements. - :type ram: int - """ - if ram is None: - raise ValueError("Invalid value for `ram`, must not be `None`") - if ram is not None and ram < 1024: - raise ValueError("Invalid value for `ram`, must be a value greater than or equal to `1024`") - - self._ram = ram - - @property - def mpi(self): - """Gets the mpi of this InlineResponse2002DataNodeRequirements. - - - :return: The mpi of this InlineResponse2002DataNodeRequirements. - :rtype: int - """ - return self._mpi - - @mpi.setter - def mpi(self, mpi): - """Sets the mpi of this InlineResponse2002DataNodeRequirements. - - - :param mpi: The mpi of this InlineResponse2002DataNodeRequirements. - :type mpi: int - """ - if mpi is not None and mpi > 1: - raise ValueError("Invalid value for `mpi`, must be a value less than or equal to `1`") - - self._mpi = mpi diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_service_build_details.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_service_build_details.py deleted file mode 100644 index 35ab8473235..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_service_build_details.py +++ /dev/null @@ -1,110 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .. import util - - -class InlineResponse2002DataServiceBuildDetails(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, build_date: str=None, vcs_ref: str=None, vcs_url: str=None): - """InlineResponse2002DataServiceBuildDetails - a model defined in OpenAPI - - :param build_date: The build_date of this InlineResponse2002DataServiceBuildDetails. - :param vcs_ref: The vcs_ref of this InlineResponse2002DataServiceBuildDetails. - :param vcs_url: The vcs_url of this InlineResponse2002DataServiceBuildDetails. - """ - self.openapi_types = { - 'build_date': str, - 'vcs_ref': str, - 'vcs_url': str - } - - self.attribute_map = { - 'build_date': 'build_date', - 'vcs_ref': 'vcs_ref', - 'vcs_url': 'vcs_url' - } - - self._build_date = build_date - self._vcs_ref = vcs_ref - self._vcs_url = vcs_url - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2002DataServiceBuildDetails': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_2_data_service_build_details of this InlineResponse2002DataServiceBuildDetails. - """ - return util.deserialize_model(dikt, cls) - - @property - def build_date(self): - """Gets the build_date of this InlineResponse2002DataServiceBuildDetails. - - - :return: The build_date of this InlineResponse2002DataServiceBuildDetails. - :rtype: str - """ - return self._build_date - - @build_date.setter - def build_date(self, build_date): - """Sets the build_date of this InlineResponse2002DataServiceBuildDetails. - - - :param build_date: The build_date of this InlineResponse2002DataServiceBuildDetails. - :type build_date: str - """ - - self._build_date = build_date - - @property - def vcs_ref(self): - """Gets the vcs_ref of this InlineResponse2002DataServiceBuildDetails. - - - :return: The vcs_ref of this InlineResponse2002DataServiceBuildDetails. - :rtype: str - """ - return self._vcs_ref - - @vcs_ref.setter - def vcs_ref(self, vcs_ref): - """Sets the vcs_ref of this InlineResponse2002DataServiceBuildDetails. - - - :param vcs_ref: The vcs_ref of this InlineResponse2002DataServiceBuildDetails. - :type vcs_ref: str - """ - - self._vcs_ref = vcs_ref - - @property - def vcs_url(self): - """Gets the vcs_url of this InlineResponse2002DataServiceBuildDetails. - - - :return: The vcs_url of this InlineResponse2002DataServiceBuildDetails. - :rtype: str - """ - return self._vcs_url - - @vcs_url.setter - def vcs_url(self, vcs_url): - """Sets the vcs_url of this InlineResponse2002DataServiceBuildDetails. - - - :param vcs_url: The vcs_url of this InlineResponse2002DataServiceBuildDetails. - :type vcs_url: str - """ - - self._vcs_url = vcs_url diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2003.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2003.py deleted file mode 100644 index 3c527146f9d..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2003.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response2003_data import InlineResponse2003Data -from .. import util - - -class InlineResponse2003(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: List[InlineResponse2003Data]=None, error: object=None): - """InlineResponse2003 - a model defined in OpenAPI - - :param data: The data of this InlineResponse2003. - :param error: The error of this InlineResponse2003. - """ - self.openapi_types = { - 'data': List[InlineResponse2003Data], - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2003': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_3 of this InlineResponse2003. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this InlineResponse2003. - - - :return: The data of this InlineResponse2003. - :rtype: List[InlineResponse2003Data] - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this InlineResponse2003. - - - :param data: The data of this InlineResponse2003. - :type data: List[InlineResponse2003Data] - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this InlineResponse2003. - - - :return: The error of this InlineResponse2003. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this InlineResponse2003. - - - :param error: The error of this InlineResponse2003. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2003_data.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2003_data.py deleted file mode 100644 index bb3c3804dda..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2003_data.py +++ /dev/null @@ -1,399 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -import re -from .. import util - - -class InlineResponse2003Data(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__( - self, - published_port: int = None, - entry_point: str = None, - service_uuid: str = None, - service_key: str = None, - service_version: str = None, - service_host: str = None, - service_port: int = None, - service_basepath: str = "", - service_state: str = None, - service_message: str = None, - user_id: str = None, - ): - """InlineResponse2003Data - a model defined in OpenAPI - - :param published_port: The published_port of this InlineResponse2003Data. - :param entry_point: The entry_point of this InlineResponse2003Data. - :param service_uuid: The service_uuid of this InlineResponse2003Data. - :param service_key: The service_key of this InlineResponse2003Data. - :param service_version: The service_version of this InlineResponse2003Data. - :param service_host: The service_host of this InlineResponse2003Data. - :param service_port: The service_port of this InlineResponse2003Data. - :param service_basepath: The service_basepath of this InlineResponse2003Data. - :param service_state: The service_state of this InlineResponse2003Data. - :param service_message: The service_message of this InlineResponse2003Data. - :param user_id: The user_id of this InlineResponse2003Data. - """ - self.openapi_types = { - "published_port": int, - "entry_point": str, - "service_uuid": str, - "service_key": str, - "service_version": str, - "service_host": str, - "service_port": int, - "service_basepath": str, - "service_state": str, - "service_message": str, - "user_id": str, - } - - self.attribute_map = { - "published_port": "published_port", - "entry_point": "entry_point", - "service_uuid": "service_uuid", - "service_key": "service_key", - "service_version": "service_version", - "service_host": "service_host", - "service_port": "service_port", - "service_basepath": "service_basepath", - "service_state": "service_state", - "service_message": "service_message", - "user_id": "user_id", - } - - self._published_port = published_port - self._entry_point = entry_point - self._service_uuid = service_uuid - self._service_key = service_key - self._service_version = service_version - self._service_host = service_host - self._service_port = service_port - self._service_basepath = service_basepath - self._service_state = service_state - self._service_message = service_message - self._user_id = user_id - - @classmethod - def from_dict(cls, dikt: dict) -> "InlineResponse2003Data": - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_3_data of this InlineResponse2003Data. - """ - return util.deserialize_model(dikt, cls) - - @property - def published_port(self): - """Gets the published_port of this InlineResponse2003Data. - - The ports where the service provides its interface - - :return: The published_port of this InlineResponse2003Data. - :rtype: int - """ - return self._published_port - - @published_port.setter - def published_port(self, published_port): - """Sets the published_port of this InlineResponse2003Data. - - The ports where the service provides its interface - - :param published_port: The published_port of this InlineResponse2003Data. - :type published_port: int - """ - if published_port is None: - raise ValueError("Invalid value for `published_port`, must not be `None`") - if published_port is not None and published_port < 1: - raise ValueError( - "Invalid value for `published_port`, must be a value greater than or equal to `1`" - ) - - self._published_port = published_port - - @property - def entry_point(self): - """Gets the entry_point of this InlineResponse2003Data. - - The entry point where the service provides its interface if specified - - :return: The entry_point of this InlineResponse2003Data. - :rtype: str - """ - return self._entry_point - - @entry_point.setter - def entry_point(self, entry_point): - """Sets the entry_point of this InlineResponse2003Data. - - The entry point where the service provides its interface if specified - - :param entry_point: The entry_point of this InlineResponse2003Data. - :type entry_point: str - """ - - self._entry_point = entry_point - - @property - def service_uuid(self): - """Gets the service_uuid of this InlineResponse2003Data. - - The UUID attached to this service - - :return: The service_uuid of this InlineResponse2003Data. - :rtype: str - """ - return self._service_uuid - - @service_uuid.setter - def service_uuid(self, service_uuid): - """Sets the service_uuid of this InlineResponse2003Data. - - The UUID attached to this service - - :param service_uuid: The service_uuid of this InlineResponse2003Data. - :type service_uuid: str - """ - if service_uuid is None: - raise ValueError("Invalid value for `service_uuid`, must not be `None`") - - self._service_uuid = service_uuid - - @property - def service_key(self): - """Gets the service_key of this InlineResponse2003Data. - - distinctive name for the node based on the docker registry path - - :return: The service_key of this InlineResponse2003Data. - :rtype: str - """ - return self._service_key - - @service_key.setter - def service_key(self, service_key): - """Sets the service_key of this InlineResponse2003Data. - - distinctive name for the node based on the docker registry path - - :param service_key: The service_key of this InlineResponse2003Data. - :type service_key: str - """ - if service_key is None: - raise ValueError("Invalid value for `service_key`, must not be `None`") - if service_key is not None and not re.search( - r"^simcore/services/" - r"(?P(comp|dynamic|frontend))/" - r"(?P[a-z0-9][a-z0-9_.-]*/)*" - r"(?P[a-z0-9-_]+[a-z0-9])$", - service_key, - ): - raise ValueError( - r"Invalid value for `service_key`, must be a follow pattern or equal to `/^(simcore)\/(services)\/(comp|dynamic)(\/[\w\/-]+)+$/`" - ) - - self._service_key = service_key - - @property - def service_version(self): - """Gets the service_version of this InlineResponse2003Data. - - semantic version number - - :return: The service_version of this InlineResponse2003Data. - :rtype: str - """ - return self._service_version - - @service_version.setter - def service_version(self, service_version): - """Sets the service_version of this InlineResponse2003Data. - - semantic version number - - :param service_version: The service_version of this InlineResponse2003Data. - :type service_version: str - """ - if service_version is None: - raise ValueError("Invalid value for `service_version`, must not be `None`") - if service_version is not None and not re.search( - r"^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[\da-zA-Z-]+)*)?$", - service_version, - ): - raise ValueError( - r"Invalid value for `service_version`, must be a follow pattern or equal to `/^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$/`" - ) - - self._service_version = service_version - - @property - def service_host(self): - """Gets the service_host of this InlineResponse2003Data. - - service host name within the network - - :return: The service_host of this InlineResponse2003Data. - :rtype: str - """ - return self._service_host - - @service_host.setter - def service_host(self, service_host): - """Sets the service_host of this InlineResponse2003Data. - - service host name within the network - - :param service_host: The service_host of this InlineResponse2003Data. - :type service_host: str - """ - if service_host is None: - raise ValueError("Invalid value for `service_host`, must not be `None`") - - self._service_host = service_host - - @property - def service_port(self): - """Gets the service_port of this InlineResponse2003Data. - - port to access the service within the network - - :return: The service_port of this InlineResponse2003Data. - :rtype: int - """ - return self._service_port - - @service_port.setter - def service_port(self, service_port): - """Sets the service_port of this InlineResponse2003Data. - - port to access the service within the network - - :param service_port: The service_port of this InlineResponse2003Data. - :type service_port: int - """ - if service_port is None: - raise ValueError("Invalid value for `service_port`, must not be `None`") - if service_port is not None and service_port < 1: - raise ValueError( - "Invalid value for `service_port`, must be a value greater than or equal to `1`" - ) - - self._service_port = service_port - - @property - def service_basepath(self): - """Gets the service_basepath of this InlineResponse2003Data. - - different base path where current service is mounted otherwise defaults to root - - :return: The service_basepath of this InlineResponse2003Data. - :rtype: str - """ - return self._service_basepath - - @service_basepath.setter - def service_basepath(self, service_basepath): - """Sets the service_basepath of this InlineResponse2003Data. - - different base path where current service is mounted otherwise defaults to root - - :param service_basepath: The service_basepath of this InlineResponse2003Data. - :type service_basepath: str - """ - - self._service_basepath = service_basepath - - @property - def service_state(self): - """Gets the service_state of this InlineResponse2003Data. - - the service state * 'pending' - The service is waiting for resources to start * 'pulling' - The service is being pulled from the registry * 'starting' - The service is starting * 'running' - The service is running * 'complete' - The service completed * 'failed' - The service failed to start - - :return: The service_state of this InlineResponse2003Data. - :rtype: str - """ - return self._service_state - - @service_state.setter - def service_state(self, service_state): - """Sets the service_state of this InlineResponse2003Data. - - the service state * 'pending' - The service is waiting for resources to start * 'pulling' - The service is being pulled from the registry * 'starting' - The service is starting * 'running' - The service is running * 'complete' - The service completed * 'failed' - The service failed to start - - :param service_state: The service_state of this InlineResponse2003Data. - :type service_state: str - """ - allowed_values = [ - "pending", - "pulling", - "starting", - "running", - "complete", - "failed", - ] - if service_state not in allowed_values: - raise ValueError( - "Invalid value for `service_state` ({0}), must be one of {1}".format( - service_state, allowed_values - ) - ) - - self._service_state = service_state - - @property - def service_message(self): - """Gets the service_message of this InlineResponse2003Data. - - the service message - - :return: The service_message of this InlineResponse2003Data. - :rtype: str - """ - return self._service_message - - @service_message.setter - def service_message(self, service_message): - """Sets the service_message of this InlineResponse2003Data. - - the service message - - :param service_message: The service_message of this InlineResponse2003Data. - :type service_message: str - """ - - self._service_message = service_message - - @property - def user_id(self): - """Gets the user_id of this InlineResponse2003Data. - - the user that started the service - - :return: The user_id of this InlineResponse2003Data. - :rtype: str - """ - return self._user_id - - @user_id.setter - def user_id(self, user_id): - """Sets the user_id of this InlineResponse2003Data. - - the user that started the service - - :param user_id: The user_id of this InlineResponse2003Data. - :type user_id: str - """ - if user_id is None: - raise ValueError("Invalid value for `user_id`, must not be `None`") - - self._user_id = user_id diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response200_data.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response200_data.py deleted file mode 100644 index 1cc495dbbbd..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response200_data.py +++ /dev/null @@ -1,135 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .. import util - - -class InlineResponse200Data(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, name: str=None, status: str=None, api_version: str=None, version: str=None): - """InlineResponse200Data - a model defined in OpenAPI - - :param name: The name of this InlineResponse200Data. - :param status: The status of this InlineResponse200Data. - :param api_version: The api_version of this InlineResponse200Data. - :param version: The version of this InlineResponse200Data. - """ - self.openapi_types = { - 'name': str, - 'status': str, - 'api_version': str, - 'version': str - } - - self.attribute_map = { - 'name': 'name', - 'status': 'status', - 'api_version': 'api_version', - 'version': 'version' - } - - self._name = name - self._status = status - self._api_version = api_version - self._version = version - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse200Data': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_data of this InlineResponse200Data. - """ - return util.deserialize_model(dikt, cls) - - @property - def name(self): - """Gets the name of this InlineResponse200Data. - - - :return: The name of this InlineResponse200Data. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this InlineResponse200Data. - - - :param name: The name of this InlineResponse200Data. - :type name: str - """ - - self._name = name - - @property - def status(self): - """Gets the status of this InlineResponse200Data. - - - :return: The status of this InlineResponse200Data. - :rtype: str - """ - return self._status - - @status.setter - def status(self, status): - """Sets the status of this InlineResponse200Data. - - - :param status: The status of this InlineResponse200Data. - :type status: str - """ - - self._status = status - - @property - def api_version(self): - """Gets the api_version of this InlineResponse200Data. - - - :return: The api_version of this InlineResponse200Data. - :rtype: str - """ - return self._api_version - - @api_version.setter - def api_version(self, api_version): - """Sets the api_version of this InlineResponse200Data. - - - :param api_version: The api_version of this InlineResponse200Data. - :type api_version: str - """ - - self._api_version = api_version - - @property - def version(self): - """Gets the version of this InlineResponse200Data. - - - :return: The version of this InlineResponse200Data. - :rtype: str - """ - return self._version - - @version.setter - def version(self, version): - """Sets the version of this InlineResponse200Data. - - - :param version: The version of this InlineResponse200Data. - :type version: str - """ - - self._version = version diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response201.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response201.py deleted file mode 100644 index 221a60352b6..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response201.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response2003_data import InlineResponse2003Data -from .. import util - - -class InlineResponse201(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: InlineResponse2003Data=None, error: object=None): - """InlineResponse201 - a model defined in OpenAPI - - :param data: The data of this InlineResponse201. - :param error: The error of this InlineResponse201. - """ - self.openapi_types = { - 'data': InlineResponse2003Data, - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse201': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_201 of this InlineResponse201. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this InlineResponse201. - - - :return: The data of this InlineResponse201. - :rtype: InlineResponse2003Data - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this InlineResponse201. - - - :param data: The data of this InlineResponse201. - :type data: InlineResponse2003Data - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this InlineResponse201. - - - :return: The error of this InlineResponse201. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this InlineResponse201. - - - :param error: The error of this InlineResponse201. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response_default.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response_default.py deleted file mode 100644 index 3dd0b09b399..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response_default.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response_default_error import InlineResponseDefaultError -from .. import util - - -class InlineResponseDefault(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: object=None, error: InlineResponseDefaultError=None): - """InlineResponseDefault - a model defined in OpenAPI - - :param data: The data of this InlineResponseDefault. - :param error: The error of this InlineResponseDefault. - """ - self.openapi_types = { - 'data': object, - 'error': InlineResponseDefaultError - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponseDefault': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_default of this InlineResponseDefault. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this InlineResponseDefault. - - - :return: The data of this InlineResponseDefault. - :rtype: object - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this InlineResponseDefault. - - - :param data: The data of this InlineResponseDefault. - :type data: object - """ - - self._data = data - - @property - def error(self): - """Gets the error of this InlineResponseDefault. - - - :return: The error of this InlineResponseDefault. - :rtype: InlineResponseDefaultError - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this InlineResponseDefault. - - - :param error: The error of this InlineResponseDefault. - :type error: InlineResponseDefaultError - """ - if error is None: - raise ValueError("Invalid value for `error`, must not be `None`") - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response_default_error.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response_default_error.py deleted file mode 100644 index 95b5cf26175..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response_default_error.py +++ /dev/null @@ -1,118 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .. import util - - -class InlineResponseDefaultError(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, message: str=None, errors: List[object]=None, status: int=None): - """InlineResponseDefaultError - a model defined in OpenAPI - - :param message: The message of this InlineResponseDefaultError. - :param errors: The errors of this InlineResponseDefaultError. - :param status: The status of this InlineResponseDefaultError. - """ - self.openapi_types = { - 'message': str, - 'errors': List[object], - 'status': int - } - - self.attribute_map = { - 'message': 'message', - 'errors': 'errors', - 'status': 'status' - } - - self._message = message - self._errors = errors - self._status = status - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponseDefaultError': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_default_error of this InlineResponseDefaultError. - """ - return util.deserialize_model(dikt, cls) - - @property - def message(self): - """Gets the message of this InlineResponseDefaultError. - - Error message - - :return: The message of this InlineResponseDefaultError. - :rtype: str - """ - return self._message - - @message.setter - def message(self, message): - """Sets the message of this InlineResponseDefaultError. - - Error message - - :param message: The message of this InlineResponseDefaultError. - :type message: str - """ - if message is None: - raise ValueError("Invalid value for `message`, must not be `None`") - - self._message = message - - @property - def errors(self): - """Gets the errors of this InlineResponseDefaultError. - - - :return: The errors of this InlineResponseDefaultError. - :rtype: List[object] - """ - return self._errors - - @errors.setter - def errors(self, errors): - """Sets the errors of this InlineResponseDefaultError. - - - :param errors: The errors of this InlineResponseDefaultError. - :type errors: List[object] - """ - - self._errors = errors - - @property - def status(self): - """Gets the status of this InlineResponseDefaultError. - - Error code - - :return: The status of this InlineResponseDefaultError. - :rtype: int - """ - return self._status - - @status.setter - def status(self, status): - """Sets the status of this InlineResponseDefaultError. - - Error code - - :param status: The status of this InlineResponseDefaultError. - :type status: int - """ - if status is None: - raise ValueError("Invalid value for `status`, must not be `None`") - - self._status = status diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/running_service_enveloped.py b/services/director/src/simcore_service_director/rest/generated_code/models/running_service_enveloped.py deleted file mode 100644 index 2075fb9fd91..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/running_service_enveloped.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response2003_data import InlineResponse2003Data -from .. import util - - -class RunningServiceEnveloped(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: InlineResponse2003Data=None, error: object=None): - """RunningServiceEnveloped - a model defined in OpenAPI - - :param data: The data of this RunningServiceEnveloped. - :param error: The error of this RunningServiceEnveloped. - """ - self.openapi_types = { - 'data': InlineResponse2003Data, - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'RunningServiceEnveloped': - """Returns the dict as a model - - :param dikt: A dict. - :return: The RunningServiceEnveloped of this RunningServiceEnveloped. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this RunningServiceEnveloped. - - - :return: The data of this RunningServiceEnveloped. - :rtype: InlineResponse2003Data - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this RunningServiceEnveloped. - - - :param data: The data of this RunningServiceEnveloped. - :type data: InlineResponse2003Data - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this RunningServiceEnveloped. - - - :return: The error of this RunningServiceEnveloped. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this RunningServiceEnveloped. - - - :param error: The error of this RunningServiceEnveloped. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/running_services_enveloped.py b/services/director/src/simcore_service_director/rest/generated_code/models/running_services_enveloped.py deleted file mode 100644 index 104508d8de4..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/running_services_enveloped.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response2003_data import InlineResponse2003Data -from .. import util - - -class RunningServicesEnveloped(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: List[InlineResponse2003Data]=None, error: object=None): - """RunningServicesEnveloped - a model defined in OpenAPI - - :param data: The data of this RunningServicesEnveloped. - :param error: The error of this RunningServicesEnveloped. - """ - self.openapi_types = { - 'data': List[InlineResponse2003Data], - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'RunningServicesEnveloped': - """Returns the dict as a model - - :param dikt: A dict. - :return: The RunningServicesEnveloped of this RunningServicesEnveloped. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this RunningServicesEnveloped. - - - :return: The data of this RunningServicesEnveloped. - :rtype: List[InlineResponse2003Data] - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this RunningServicesEnveloped. - - - :param data: The data of this RunningServicesEnveloped. - :type data: List[InlineResponse2003Data] - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this RunningServicesEnveloped. - - - :return: The error of this RunningServicesEnveloped. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this RunningServicesEnveloped. - - - :param error: The error of this RunningServicesEnveloped. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/service_extras_enveloped.py b/services/director/src/simcore_service_director/rest/generated_code/models/service_extras_enveloped.py deleted file mode 100644 index dcb444a8725..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/service_extras_enveloped.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response2002_data import InlineResponse2002Data -from .. import util - - -class ServiceExtrasEnveloped(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: InlineResponse2002Data=None, error: object=None): - """ServiceExtrasEnveloped - a model defined in OpenAPI - - :param data: The data of this ServiceExtrasEnveloped. - :param error: The error of this ServiceExtrasEnveloped. - """ - self.openapi_types = { - 'data': InlineResponse2002Data, - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'ServiceExtrasEnveloped': - """Returns the dict as a model - - :param dikt: A dict. - :return: The ServiceExtrasEnveloped of this ServiceExtrasEnveloped. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this ServiceExtrasEnveloped. - - - :return: The data of this ServiceExtrasEnveloped. - :rtype: InlineResponse2002Data - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this ServiceExtrasEnveloped. - - - :param data: The data of this ServiceExtrasEnveloped. - :type data: InlineResponse2002Data - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this ServiceExtrasEnveloped. - - - :return: The error of this ServiceExtrasEnveloped. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this ServiceExtrasEnveloped. - - - :param error: The error of this ServiceExtrasEnveloped. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/services_enveloped.py b/services/director/src/simcore_service_director/rest/generated_code/models/services_enveloped.py deleted file mode 100644 index b101b17ecb8..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/services_enveloped.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .simcore_node import SimcoreNode -from .. import util - - -class ServicesEnveloped(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: List[SimcoreNode]=None, error: object=None): - """ServicesEnveloped - a model defined in OpenAPI - - :param data: The data of this ServicesEnveloped. - :param error: The error of this ServicesEnveloped. - """ - self.openapi_types = { - 'data': List[SimcoreNode], - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'ServicesEnveloped': - """Returns the dict as a model - - :param dikt: A dict. - :return: The ServicesEnveloped of this ServicesEnveloped. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this ServicesEnveloped. - - - :return: The data of this ServicesEnveloped. - :rtype: List[SimcoreNode] - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this ServicesEnveloped. - - - :param data: The data of this ServicesEnveloped. - :type data: List[SimcoreNode] - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this ServicesEnveloped. - - - :return: The error of this ServicesEnveloped. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this ServicesEnveloped. - - - :param error: The error of this ServicesEnveloped. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/simcore_node.py b/services/director/src/simcore_service_director/rest/generated_code/models/simcore_node.py deleted file mode 100644 index 2601375f1b8..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/simcore_node.py +++ /dev/null @@ -1,447 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response2001_authors import InlineResponse2001Authors -from .inline_response2001_badges import InlineResponse2001Badges -import re -from .. import util - - -class SimcoreNode(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__( - self, - key: str = None, - integration_version: str = None, - version: str = None, - type: str = None, - name: str = None, - thumbnail: str = None, - badges: List[InlineResponse2001Badges] = None, - description: str = None, - authors: List[InlineResponse2001Authors] = None, - contact: str = None, - inputs: Dict[str, object] = None, - outputs: Dict[str, object] = None, - boot_options: Dict[str, object] = None, - ): - """SimcoreNode - a model defined in OpenAPI - - :param key: The key of this SimcoreNode. - :param integration_version: The integration_version of this SimcoreNode. - :param version: The version of this SimcoreNode. - :param type: The type of this SimcoreNode. - :param name: The name of this SimcoreNode. - :param thumbnail: The thumbnail of this SimcoreNode. - :param badges: The badges of this SimcoreNode. - :param description: The description of this SimcoreNode. - :param authors: The authors of this SimcoreNode. - :param contact: The contact of this SimcoreNode. - :param inputs: The inputs of this SimcoreNode. - :param outputs: The outputs of this SimcoreNode. - :param boot_options: The boot_options of this SimcoreNode. - """ - self.openapi_types = { - "key": str, - "integration_version": str, - "version": str, - "type": str, - "name": str, - "thumbnail": str, - "badges": List[InlineResponse2001Badges], - "description": str, - "authors": List[InlineResponse2001Authors], - "contact": str, - "inputs": Dict[str, object], - "outputs": Dict[str, object], - "boot_options": Dict[str, object], - } - - self.attribute_map = { - "key": "key", - "integration_version": "integration-version", - "version": "version", - "type": "type", - "name": "name", - "thumbnail": "thumbnail", - "badges": "badges", - "description": "description", - "authors": "authors", - "contact": "contact", - "inputs": "inputs", - "outputs": "outputs", - "boot_options": "boot-options", - } - - self._key = key - self._integration_version = integration_version - self._version = version - self._type = type - self._name = name - self._thumbnail = thumbnail - self._badges = badges - self._description = description - self._authors = authors - self._contact = contact - self._inputs = inputs - self._outputs = outputs - self._boot_options = boot_options - - @classmethod - def from_dict(cls, dikt: dict) -> "SimcoreNode": - """Returns the dict as a model - - :param dikt: A dict. - :return: The simcore_node of this SimcoreNode. - """ - return util.deserialize_model(dikt, cls) - - @property - def key(self): - """Gets the key of this SimcoreNode. - - distinctive name for the node based on the docker registry path - - :return: The key of this SimcoreNode. - :rtype: str - """ - return self._key - - @key.setter - def key(self, key): - """Sets the key of this SimcoreNode. - - distinctive name for the node based on the docker registry path - - :param key: The key of this SimcoreNode. - :type key: str - """ - if key is None: - raise ValueError("Invalid value for `key`, must not be `None`") - if key is not None and not re.search( - r"^simcore/services/" - r"(?P(comp|dynamic|frontend))/" - r"(?P[a-z0-9][a-z0-9_.-]*/)*" - r"(?P[a-z0-9-_]+[a-z0-9])$", - key, - ): - raise ValueError( - r"Invalid value for `key`, must be a follow pattern or equal to `/^(simcore)\/(services)\/(comp|dynamic|frontend)(\/[\w\/-]+)+$/`" - ) - - self._key = key - - @property - def integration_version(self): - """Gets the integration_version of this SimcoreNode. - - integration version number - - :return: The integration_version of this SimcoreNode. - :rtype: str - """ - return self._integration_version - - @integration_version.setter - def integration_version(self, integration_version): - """Sets the integration_version of this SimcoreNode. - - integration version number - - :param integration_version: The integration_version of this SimcoreNode. - :type integration_version: str - """ - if integration_version is not None and not re.search( - r"^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$", - integration_version, - ): - raise ValueError( - r"Invalid value for `integration_version`, must be a follow pattern or equal to `/^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$/`" - ) - - self._integration_version = integration_version - - @property - def version(self): - """Gets the version of this SimcoreNode. - - service version number - - :return: The version of this SimcoreNode. - :rtype: str - """ - return self._version - - @version.setter - def version(self, version): - """Sets the version of this SimcoreNode. - - service version number - - :param version: The version of this SimcoreNode. - :type version: str - """ - if version is None: - raise ValueError("Invalid value for `version`, must not be `None`") - if version is not None and not re.search( - r"^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$", - version, - ): - raise ValueError( - r"Invalid value for `version`, must be a follow pattern or equal to `/^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$/`" - ) - - self._version = version - - @property - def type(self): - """Gets the type of this SimcoreNode. - - service type - - :return: The type of this SimcoreNode. - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this SimcoreNode. - - service type - - :param type: The type of this SimcoreNode. - :type type: str - """ - allowed_values = ["frontend", "computational", "dynamic"] - if type not in allowed_values: - raise ValueError( - "Invalid value for `type` ({0}), must be one of {1}".format( - type, allowed_values - ) - ) - - self._type = type - - @property - def name(self): - """Gets the name of this SimcoreNode. - - short, human readable name for the node - - :return: The name of this SimcoreNode. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this SimcoreNode. - - short, human readable name for the node - - :param name: The name of this SimcoreNode. - :type name: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") - - self._name = name - - @property - def thumbnail(self): - """Gets the thumbnail of this SimcoreNode. - - url to the thumbnail - - :return: The thumbnail of this SimcoreNode. - :rtype: str - """ - return self._thumbnail - - @thumbnail.setter - def thumbnail(self, thumbnail): - """Sets the thumbnail of this SimcoreNode. - - url to the thumbnail - - :param thumbnail: The thumbnail of this SimcoreNode. - :type thumbnail: str - """ - - self._thumbnail = thumbnail - - @property - def badges(self): - """Gets the badges of this SimcoreNode. - - - :return: The badges of this SimcoreNode. - :rtype: List[InlineResponse2001Badges] - """ - return self._badges - - @badges.setter - def badges(self, badges): - """Sets the badges of this SimcoreNode. - - - :param badges: The badges of this SimcoreNode. - :type badges: List[InlineResponse2001Badges] - """ - - self._badges = badges - - @property - def description(self): - """Gets the description of this SimcoreNode. - - human readable description of the purpose of the node - - :return: The description of this SimcoreNode. - :rtype: str - """ - return self._description - - @description.setter - def description(self, description): - """Sets the description of this SimcoreNode. - - human readable description of the purpose of the node - - :param description: The description of this SimcoreNode. - :type description: str - """ - if description is None: - raise ValueError("Invalid value for `description`, must not be `None`") - - self._description = description - - @property - def authors(self): - """Gets the authors of this SimcoreNode. - - - :return: The authors of this SimcoreNode. - :rtype: List[InlineResponse2001Authors] - """ - return self._authors - - @authors.setter - def authors(self, authors): - """Sets the authors of this SimcoreNode. - - - :param authors: The authors of this SimcoreNode. - :type authors: List[InlineResponse2001Authors] - """ - if authors is None: - raise ValueError("Invalid value for `authors`, must not be `None`") - - self._authors = authors - - @property - def contact(self): - """Gets the contact of this SimcoreNode. - - email to correspond to the authors about the node - - :return: The contact of this SimcoreNode. - :rtype: str - """ - return self._contact - - @contact.setter - def contact(self, contact): - """Sets the contact of this SimcoreNode. - - email to correspond to the authors about the node - - :param contact: The contact of this SimcoreNode. - :type contact: str - """ - if contact is None: - raise ValueError("Invalid value for `contact`, must not be `None`") - - self._contact = contact - - @property - def inputs(self): - """Gets the inputs of this SimcoreNode. - - definition of the inputs of this node - - :return: The inputs of this SimcoreNode. - :rtype: Dict[str, object] - """ - return self._inputs - - @inputs.setter - def inputs(self, inputs): - """Sets the inputs of this SimcoreNode. - - definition of the inputs of this node - - :param inputs: The inputs of this SimcoreNode. - :type inputs: Dict[str, object] - """ - if inputs is None: - raise ValueError("Invalid value for `inputs`, must not be `None`") - - self._inputs = inputs - - @property - def outputs(self): - """Gets the outputs of this SimcoreNode. - - definition of the outputs of this node - - :return: The outputs of this SimcoreNode. - :rtype: Dict[str, object] - """ - return self._outputs - - @outputs.setter - def outputs(self, outputs): - """Sets the outputs of this SimcoreNode. - - definition of the outputs of this node - - :param outputs: The outputs of this SimcoreNode. - :type outputs: Dict[str, object] - """ - if outputs is None: - raise ValueError("Invalid value for `outputs`, must not be `None`") - - self._outputs = outputs - - @property - def boot_options(self): - """Gets the boot_options of this SimcoreNode. - - Service defined boot options. These get injected in the service as env variables. - - :return: The boot_options of this SimcoreNode. - :rtype: Dict[str, object] - """ - return self._boot_options - - @boot_options.setter - def boot_options(self, boot_options): - """Sets the boot_options of this SimcoreNode. - - Service defined boot options. These get injected in the service as env variables. - - :param boot_options: The boot_options of this SimcoreNode. - :type boot_options: Dict[str, object] - """ - - self._boot_options = boot_options diff --git a/services/director/src/simcore_service_director/rest/generated_code/routing.py b/services/director/src/simcore_service_director/rest/generated_code/routing.py deleted file mode 100644 index 3cf2a4d57b7..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/routing.py +++ /dev/null @@ -1,94 +0,0 @@ -"""GENERATED CODE from codegen.sh -It is advisable to not modify this code if possible. -This will be overriden next time the code generator is called. - -use create_web_app to initialise the web application using the specification file. -The base folder is the root of the package. -""" - - -import logging -from pathlib import Path - -from aiohttp import hdrs, web -from aiohttp_apiset import SwaggerRouter -from aiohttp_apiset.exceptions import ValidationError -from aiohttp_apiset.middlewares import Jsonify, jsonify -from aiohttp_apiset.swagger.loader import ExtendedSchemaFile -from aiohttp_apiset.swagger.operations import OperationIdMapping - -from .. import handlers -from .models.base_model_ import Model - -log = logging.getLogger(__name__) - -@web.middleware -async def __handle_errors(request, handler): - try: - log.debug("error middleware handling request %s to handler %s", request, handler) - response = await handler(request) - return response - except ValidationError as ex: - # aiohttp apiset errors - log.exception("error happened in handling route") - error = dict(status=ex.status, message=ex.to_tree()) - error_enveloped = dict(error=error) - return web.json_response(error_enveloped, status=ex.status) - except web.HTTPError as ex: - log.exception("error happened in handling route") - error = dict(status=ex.status, message=str(ex.reason)) - error_enveloped = dict(data=error) - return web.json_response(error_enveloped, status=ex.status) - - -def create_web_app(base_folder, spec_file, additional_middlewares = None): - # create the default mapping of the operationId to the implementation code in handlers - opmap = __create_default_operation_mapping(Path(base_folder / spec_file)) - - # generate a version 3 of the API documentation - router = SwaggerRouter( - swagger_ui='/apidoc/', - version_ui=3, # forces the use of version 3 by default - search_dirs=[base_folder], - default_validate=True, - ) - - # add automatic jsonification of the models located in generated code - jsonify.singleton = Jsonify(indent=3, ensure_ascii=False) - jsonify.singleton.add_converter(Model, lambda o: o.to_dict(), score=0) - - middlewares = [jsonify, __handle_errors] - if additional_middlewares: - middlewares.extend(additional_middlewares) - # create the web application using the API - app = web.Application( - router=router, - middlewares=middlewares, - ) - router.set_cors(app, domains='*', headers=( - (hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, hdrs.AUTHORIZATION), - )) - - # Include our specifications in a router, - # is now available in the swagger-ui to the address http://localhost:8080/swagger/?spec=v1 - router.include( - spec=Path(base_folder / spec_file), - operationId_mapping=opmap, - name='v0', # name to access in swagger-ui, - basePath="/v0" # BUG: in apiset with openapi 3.0.0 [Github bug entry](https://github.com/aamalev/aiohttp_apiset/issues/45) - ) - - return app - -def __create_default_operation_mapping(specs_file): - operation_mapping = {} - yaml_specs = ExtendedSchemaFile(specs_file) - paths = yaml_specs['paths'] - for path in paths.items(): - for method in path[1].items(): # can be get, post, patch, put, delete... - op_str = "operationId" - if op_str not in method[1]: - raise Exception("The API %s does not contain the operationId tag for route %s %s" % (specs_file, path[0], method[0])) - operation_id = method[1][op_str] - operation_mapping[operation_id] = getattr(handlers, operation_id) - return OperationIdMapping(**operation_mapping) diff --git a/services/director/src/simcore_service_director/rest/generated_code/typing_utils.py b/services/director/src/simcore_service_director/rest/generated_code/typing_utils.py deleted file mode 100644 index 0563f81fd53..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/typing_utils.py +++ /dev/null @@ -1,32 +0,0 @@ -# coding: utf-8 - -import sys - -if sys.version_info < (3, 7): - import typing - - def is_generic(klass): - """ Determine whether klass is a generic class """ - return type(klass) == typing.GenericMeta - - def is_dict(klass): - """ Determine whether klass is a Dict """ - return klass.__extra__ == dict - - def is_list(klass): - """ Determine whether klass is a List """ - return klass.__extra__ == list - -else: - - def is_generic(klass): - """ Determine whether klass is a generic class """ - return hasattr(klass, '__origin__') - - def is_dict(klass): - """ Determine whether klass is a Dict """ - return klass.__origin__ == dict - - def is_list(klass): - """ Determine whether klass is a List """ - return klass.__origin__ == list diff --git a/services/director/src/simcore_service_director/rest/generated_code/util.py b/services/director/src/simcore_service_director/rest/generated_code/util.py deleted file mode 100644 index a9ab1e81939..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/util.py +++ /dev/null @@ -1,131 +0,0 @@ -import datetime - -import typing -from typing import Union -from . import typing_utils - -T = typing.TypeVar('T') -Class = typing.Type[T] - - -def _deserialize(data: Union[dict, list, str], klass: Union[Class, str]) -> Union[dict, list, Class, int, float, str, bool, datetime.date, datetime.datetime]: - """Deserializes dict, list, str into an object. - - :param data: dict, list or str. - :param klass: class literal, or string of class name. - - :return: object. - """ - if data is None: - return None - - if klass in (int, float, str, bool): - return _deserialize_primitive(data, klass) - elif klass == object: - return _deserialize_object(data) - elif klass == datetime.date: - return deserialize_date(data) - elif klass == datetime.datetime: - return deserialize_datetime(data) - elif typing_utils.is_generic(klass): - if typing_utils.is_list(klass): - return _deserialize_list(data, klass.__args__[0]) - if typing_utils.is_dict(klass): - return _deserialize_dict(data, klass.__args__[1]) - else: - return deserialize_model(data, klass) - - -def _deserialize_primitive(data, klass: Class) -> Union[Class, int, float, str, bool]: - """Deserializes to primitive type. - - :param data: data to deserialize. - :param klass: class literal. - - :return: int, float, str, bool. - """ - try: - value = klass(data) - except (UnicodeEncodeError, TypeError): - value = data - return value - - -def _deserialize_object(value: T) -> T: - """Return an original value. - - :return: object. - """ - return value - - -def deserialize_date(string: str) -> datetime.date: - """Deserializes string to date. - - :param string: str. - :return: date. - """ - try: - from dateutil.parser import parse - return parse(string).date() - except ImportError: - return string - - -def deserialize_datetime(string: str) -> datetime.datetime: - """Deserializes string to datetime. - - The string should be in iso8601 datetime format. - - :param string: str. - :return: datetime. - """ - try: - from dateutil.parser import parse - return parse(string) - except ImportError: - return string - - -def deserialize_model(data: Union[dict, list], klass: T) -> T: - """Deserializes list or dict to model. - - :param data: dict, list. - :param klass: class literal. - :return: model object. - """ - instance = klass() - - if not instance.openapi_types: - return data - - if data is not None and isinstance(data, (list, dict)): - for attr, attr_type in instance.openapi_types.items(): - attr_key = instance.attribute_map[attr] - if attr_key in data: - value = data[attr_key] - setattr(instance, attr, _deserialize(value, attr_type)) - - return instance - - -def _deserialize_list(data: list, boxed_type) -> list: - """Deserializes a list and its elements. - - :param data: list to deserialize. - :param boxed_type: class literal. - - :return: deserialized list. - """ - return [_deserialize(sub_data, boxed_type) for sub_data in data] - - -def _deserialize_dict(data: dict, boxed_type) -> dict: - """Deserializes a dict and its elements. - - :param data: dict to deserialize. - :param boxed_type: class literal. - - :return: deserialized dict. - """ - return {k: _deserialize(v, boxed_type) for k, v in data.items()} diff --git a/services/director/src/simcore_service_director/rest/handlers.py b/services/director/src/simcore_service_director/rest/handlers.py deleted file mode 100644 index 151f4e4299a..00000000000 --- a/services/director/src/simcore_service_director/rest/handlers.py +++ /dev/null @@ -1,238 +0,0 @@ -# pylint:disable=too-many-arguments - -import logging -from typing import Optional - -import pkg_resources -import yaml -from aiohttp import web, web_exceptions -from simcore_service_director import exceptions, producer, registry_proxy, resources - -log = logging.getLogger(__name__) - - -async def root_get( - request: web.Request, -) -> web.Response: - log.debug("Client does root_get request %s", request) - distb = pkg_resources.get_distribution("simcore-service-director") - with resources.stream(resources.RESOURCE_OPEN_API) as file_ptr: - api_dict = yaml.safe_load(file_ptr) - - service_health = dict( - name=distb.project_name, - status="SERVICE_RUNNING", - api_version=api_dict["info"]["version"], - version=distb.version, - ) - return web.json_response(data=dict(data=service_health)) - - -async def services_get( - request: web.Request, service_type: Optional[str] = None -) -> web.Response: - log.debug( - "Client does services_get request %s with service_type %s", - request, - service_type, - ) - try: - services = [] - if not service_type: - services = await registry_proxy.list_services( - request.app, registry_proxy.ServiceType.ALL - ) - elif "computational" in service_type: - services = await registry_proxy.list_services( - request.app, registry_proxy.ServiceType.COMPUTATIONAL - ) - elif "interactive" in service_type: - services = await registry_proxy.list_services( - request.app, registry_proxy.ServiceType.DYNAMIC - ) - # NOTE: the validation is done in the catalog. This entrypoint IS and MUST BE only used by the catalog!! - # NOTE2: the catalog will directly talk to the registry see case #2165 [https://github.com/ITISFoundation/osparc-simcore/issues/2165] - # services = node_validator.validate_nodes(services) - return web.json_response(data=dict(data=services)) - except exceptions.RegistryConnectionError as err: - raise web_exceptions.HTTPUnauthorized(reason=str(err)) - except Exception as err: - raise web_exceptions.HTTPInternalServerError(reason=str(err)) - - -async def services_by_key_version_get( - request: web.Request, service_key: str, service_version: str -) -> web.Response: - log.debug( - "Client does services_get request %s with service_key %s, service_version %s", - request, - service_key, - service_version, - ) - try: - services = [ - await registry_proxy.get_image_details( - request.app, service_key, service_version - ) - ] - return web.json_response(data=dict(data=services)) - except exceptions.ServiceNotAvailableError as err: - raise web_exceptions.HTTPNotFound(reason=str(err)) - except exceptions.RegistryConnectionError as err: - raise web_exceptions.HTTPUnauthorized(reason=str(err)) - except Exception as err: - raise web_exceptions.HTTPInternalServerError(reason=str(err)) - - -async def get_service_labels( - request: web.Request, service_key: str, service_version: str -) -> web.Response: - # GET /services/{service_key}/{service_version}/labels - - log.debug( - "Retrieving service labels %s with service_key %s, service_version %s", - request, - service_key, - service_version, - ) - try: - service_labels, _ = await registry_proxy.get_image_labels( - request.app, service_key, service_version - ) - return web.json_response(data=dict(data=service_labels)) - - except exceptions.ServiceNotAvailableError as err: - raise web_exceptions.HTTPNotFound(reason=str(err)) - - except exceptions.RegistryConnectionError as err: - raise web_exceptions.HTTPUnauthorized(reason=str(err)) - - except Exception as err: - raise web_exceptions.HTTPInternalServerError(reason=str(err)) - - -async def service_extras_by_key_version_get( - request: web.Request, service_key: str, service_version: str -) -> web.Response: - # GET /service_extras/{service_key}/{service_version} - log.debug( - "Client does service_extras_by_key_version_get request %s with service_key %s, service_version %s", - request, - service_key, - service_version, - ) - try: - service_extras = await registry_proxy.get_service_extras( - request.app, service_key, service_version - ) - return web.json_response(data=dict(data=service_extras)) - except exceptions.ServiceNotAvailableError as err: - raise web_exceptions.HTTPNotFound(reason=str(err)) - except exceptions.RegistryConnectionError as err: - raise web_exceptions.HTTPUnauthorized(reason=str(err)) - except Exception as err: - raise web_exceptions.HTTPInternalServerError(reason=str(err)) - - -async def running_interactive_services_list_get( - request: web.Request, user_id: str, project_id: str -) -> web.Response: - log.debug( - "Client does running_interactive_services_list_get request %s, user_id %s, project_id %s", - request, - user_id, - project_id, - ) - try: - service = await producer.get_services_details(request.app, user_id, project_id) - return web.json_response(data=dict(data=service), status=200) - except Exception as err: - raise web_exceptions.HTTPInternalServerError(reason=str(err)) - - -async def running_interactive_services_post( - request: web.Request, - user_id: str, - project_id: str, - service_key: str, - service_uuid: str, - service_tag: str, - service_basepath: str, -) -> web.Response: - # NOTE: servicelib is not present here - request_simcore_user_agent = request.headers.get("X-Simcore-User-Agent", "") - log.debug( - "Client does running_interactive_services_post request %s with user_id %s, project_id %s, service %s:%s, service_uuid %s, service_basepath %s, request_simcore_user_agent %s", - request, - user_id, - project_id, - service_key, - service_tag, - service_uuid, - service_basepath, - request_simcore_user_agent, - ) - try: - service = await producer.start_service( - request.app, - user_id, - project_id, - service_key, - service_tag, - service_uuid, - service_basepath, - request_simcore_user_agent, - ) - return web.json_response(data=dict(data=service), status=201) - except exceptions.ServiceStartTimeoutError as err: - raise web_exceptions.HTTPInternalServerError(reason=str(err)) - except exceptions.ServiceNotAvailableError as err: - raise web_exceptions.HTTPNotFound(reason=str(err)) - except exceptions.ServiceUUIDInUseError as err: - raise web_exceptions.HTTPConflict(reason=str(err)) - except exceptions.RegistryConnectionError as err: - raise web_exceptions.HTTPUnauthorized(reason=str(err)) - except Exception as err: - raise web_exceptions.HTTPInternalServerError(reason=str(err)) - - -async def running_interactive_services_get( - request: web.Request, service_uuid: str -) -> web.Response: - log.debug( - "Client does running_interactive_services_get request %s with service_uuid %s", - request, - service_uuid, - ) - try: - service = await producer.get_service_details(request.app, service_uuid) - return web.json_response(data=dict(data=service), status=200) - except exceptions.ServiceUUIDNotFoundError as err: - raise web_exceptions.HTTPNotFound(reason=str(err)) - except Exception as err: - raise web_exceptions.HTTPInternalServerError(reason=str(err)) - - -async def running_interactive_services_delete( - request: web.Request, service_uuid: str, save_state: Optional[bool] = True -) -> web.Response: - log.debug( - "Client does running_interactive_services_delete request %s with service_uuid %s", - request, - service_uuid, - ) - try: - await producer.stop_service(request.app, service_uuid, save_state) - - except exceptions.ServiceUUIDNotFoundError as err: - raise web_exceptions.HTTPNotFound(reason=str(err)) - except Exception as err: - # server errors are logged (>=500) - log.exception( - "Failed to delete dynamic service %s (save_state=%s)", - service_uuid, - save_state, - ) - raise web_exceptions.HTTPInternalServerError(reason=str(err)) - - return web.json_response(status=204) diff --git a/services/director/src/simcore_service_director/services_common.py b/services/director/src/simcore_service_director/services_common.py index f1aef5ac668..b92bbd1ca80 100644 --- a/services/director/src/simcore_service_director/services_common.py +++ b/services/director/src/simcore_service_director/services_common.py @@ -11,14 +11,14 @@ class ServicesCommonSettings(BaseSettings): # set this interval to 1 hour director_dynamic_service_save_timeout: PositiveInt = Field( - _BASE_TIMEOUT_FOR_STOPPING_SERVICES, + default=_BASE_TIMEOUT_FOR_STOPPING_SERVICES, description=( "When stopping a dynamic service, if it has " "big payloads it is important to have longer timeouts." ), ) webserver_director_stop_service_timeout: PositiveInt = Field( - _BASE_TIMEOUT_FOR_STOPPING_SERVICES + 10, + default=_BASE_TIMEOUT_FOR_STOPPING_SERVICES + 10, description=( "When the webserver invokes the director API to stop " "a service which has a very long timeout, it also " @@ -26,7 +26,7 @@ class ServicesCommonSettings(BaseSettings): ), ) storage_service_upload_download_timeout: PositiveInt = Field( - 60 * 60, + default=60 * 60, description=( "When dynamic services upload and download data from storage, " "sometimes very big payloads are involved. In order to handle " diff --git a/services/director/src/simcore_service_director/system_utils.py b/services/director/src/simcore_service_director/system_utils.py deleted file mode 100644 index cc3ee25c114..00000000000 --- a/services/director/src/simcore_service_director/system_utils.py +++ /dev/null @@ -1,13 +0,0 @@ -from pathlib import Path -from typing import List - - -def get_system_extra_hosts_raw(extra_host_domain: str) -> List[str]: - extra_hosts = [] - hosts_path = Path("/etc/hosts") - if hosts_path.exists() and extra_host_domain != "undefined": - with hosts_path.open() as hosts: - for line in hosts: - if extra_host_domain in line: - extra_hosts.append(line.strip().replace("\t", " ")) - return extra_hosts diff --git a/services/director/src/simcore_service_director/utils.py b/services/director/src/simcore_service_director/utils.py deleted file mode 100644 index a05e0bdbb39..00000000000 --- a/services/director/src/simcore_service_director/utils.py +++ /dev/null @@ -1,29 +0,0 @@ -import logging -from datetime import datetime -from typing import Optional - -log = logging.getLogger(__name__) - -DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" -_MAXLEN = len("2020-10-09T12:28:14.7710") - - -def parse_as_datetime(timestr: str, *, default: Optional[datetime] = None) -> datetime: - """ - default: if parsing is not possible, it returs default - - """ - # datetime_str is typically '2020-10-09T12:28:14.771034099Z' - # - The T separates the date portion from the time-of-day portion - # - The Z on the end means UTC, that is, an offset-from-UTC - # The 099 before the Z is not clear, therefore we will truncate the last part - - try: - timestr = timestr.strip("Z ")[:_MAXLEN] - dt = datetime.strptime(timestr, DATETIME_FORMAT) - return dt - except ValueError as err: - log.debug("Failed to parse %s: %s", timestr, err) - if default is not None: - return default - raise diff --git a/services/director/temp_generate_openapi.sh b/services/director/temp_generate_openapi.sh deleted file mode 100755 index 533053087ef..00000000000 --- a/services/director/temp_generate_openapi.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/bin/bash -set -e - -cd $(dirname $0) -usage() -{ - echo "usage: temp_generate_openapi.sh [[[-i input]] | [-h help]]" -} - -apihub_specs_dir= -# process arguments -while [ "$1" != "" ]; do - case $1 in - -i | --input ) shift - apihub_specs_dir=$1 - ;; - -h | --help ) usage - exit - ;; - * ) usage - exit 1 - esac - shift -done - -if [ -z "$apihub_specs_dir" ]; then - echo "please define an apihub specs directory..." - usage - exit 1 -fi - -docker run \ - -v $apihub_specs_dir:/input \ - -v ${PWD}/src/simcore_service_director/api/v0:/output \ - itisfoundation/oas_resolver \ - /input/director/v0/openapi.yaml \ - /output/openapi.yaml diff --git a/services/director/tests/conftest.py b/services/director/tests/conftest.py deleted file mode 100644 index eecb693e0de..00000000000 --- a/services/director/tests/conftest.py +++ /dev/null @@ -1,115 +0,0 @@ -# pylint: disable=unused-argument -# pylint: disable=unused-import -# pylint: disable=bare-except -# pylint: disable=redefined-outer-name - -import os -from pathlib import Path - -import pytest -import simcore_service_director -from aiohttp import ClientSession -from simcore_service_director import config, resources - -# NOTE: that all the changes in these pytest-plugins MUST by py3.6 compatible! -pytest_plugins = [ - "fixtures.fake_services", - "pytest_simcore.docker_compose", - "pytest_simcore.docker_registry", - "pytest_simcore.docker_swarm", - "pytest_simcore.repository_paths", - "pytest_simcore.pytest_global_environs", -] - - -@pytest.fixture -def configure_swarm_stack_name(): - config.SWARM_STACK_NAME = "test_stack" - - -@pytest.fixture(scope="session") -def common_schemas_specs_dir(osparc_simcore_root_dir): - specs_dir = osparc_simcore_root_dir / "api" / "specs" / "director" / "schemas" - assert specs_dir.exists() - return specs_dir - - -@pytest.fixture(scope="session") -def package_dir(): - dirpath = Path(simcore_service_director.__file__).resolve().parent - assert dirpath.exists() - return dirpath - - -@pytest.fixture -def configure_schemas_location(package_dir, common_schemas_specs_dir): - config.NODE_SCHEMA_LOCATION = str( - common_schemas_specs_dir / "node-meta-v0.0.1.json" - ) - resources.RESOURCE_NODE_SCHEMA = os.path.relpath( - config.NODE_SCHEMA_LOCATION, package_dir - ) - - -@pytest.fixture -def configure_registry_access(docker_registry): - config.REGISTRY_URL = docker_registry - config.REGISTRY_PATH = docker_registry - config.REGISTRY_SSL = False - config.DIRECTOR_REGISTRY_CACHING = False - - -@pytest.fixture -def user_id(): - yield "some_user_id" - - -@pytest.fixture -def project_id(): - yield "some_project_id" - - -def pytest_addoption(parser): - parser.addoption("--registry_url", action="store", default="default url") - parser.addoption("--registry_user", action="store", default="default user") - parser.addoption("--registry_pw", action="store", default="default pw") - - -@pytest.fixture(scope="session") -def configure_custom_registry(pytestconfig): - # to set these values call - # pytest --registry_url myregistry --registry_user username --registry_pw password - config.REGISTRY_URL = pytestconfig.getoption("registry_url") - config.REGISTRY_AUTH = True - config.REGISTRY_USER = pytestconfig.getoption("registry_user") - config.REGISTRY_PW = pytestconfig.getoption("registry_pw") - config.DIRECTOR_REGISTRY_CACHING = False - - -@pytest.fixture -async def aiohttp_mock_app(loop, mocker): - print("client session started ...") - session = ClientSession() - - mock_app_storage = { - config.APP_CLIENT_SESSION_KEY: session, - config.APP_REGISTRY_CACHE_DATA_KEY: {}, - } - - def _get_item(self, key): - return mock_app_storage[key] - - aiohttp_app = mocker.patch("aiohttp.web.Application") - aiohttp_app.__getitem__ = _get_item - - yield aiohttp_app - - # cleanup session - await session.close() - print("client session closed") - - -@pytest.fixture -def api_version_prefix() -> str: - assert "v0" in resources.listdir(resources.RESOURCE_OPENAPI_ROOT) - return "v0" diff --git a/services/director/tests/fixtures/fake_services.py b/services/director/tests/fixtures/fake_services.py deleted file mode 100644 index e58f547f729..00000000000 --- a/services/director/tests/fixtures/fake_services.py +++ /dev/null @@ -1,242 +0,0 @@ -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name - - -import asyncio -import json -import logging -import random -from io import BytesIO -from pathlib import Path - -import pytest -import requests -from aiodocker import utils -from aiodocker.docker import Docker -from aiodocker.exceptions import DockerError -from simcore_service_director.config import DEFAULT_MAX_MEMORY, DEFAULT_MAX_NANO_CPUS - -_logger = logging.getLogger(__name__) - - -@pytest.fixture(scope="function") -def push_services(docker_registry, tmpdir): - registry_url = docker_registry - tmp_dir = Path(tmpdir) - - list_of_pushed_images_tags = [] - dependent_images = [] - - async def build_push_images( - number_of_computational_services, - number_of_interactive_services, - inter_dependent_services=False, - bad_json_format=False, - version="1.0.", - ): - try: - dependent_image = None - if inter_dependent_services: - dependent_image = await _build_push_image( - tmp_dir, - registry_url, - "computational", - "dependency", - "10.52.999999", - None, - bad_json_format=bad_json_format, - ) - dependent_images.append(dependent_image) - - images_to_build = [] - - for image_index in range(0, number_of_computational_services): - images_to_build.append( - _build_push_image( - tmp_dir, - registry_url, - "computational", - "test", - version + str(image_index), - dependent_image, - bad_json_format=bad_json_format, - ) - ) - - for image_index in range(0, number_of_interactive_services): - images_to_build.append( - _build_push_image( - tmp_dir, - registry_url, - "dynamic", - "test", - version + str(image_index), - dependent_image, - bad_json_format=bad_json_format, - ) - ) - results = await asyncio.gather(*images_to_build) - list_of_pushed_images_tags.extend(results) - except DockerError: - _logger.exception("Unexpected docker API error") - raise - - return list_of_pushed_images_tags - - yield build_push_images - _logger.info("clean registry") - _clean_registry(registry_url, list_of_pushed_images_tags) - _clean_registry(registry_url, dependent_images) - - -async def _build_push_image( - docker_dir, - registry_url, - service_type, - name, - tag, - dependent_image=None, - *, - bad_json_format=False, -): # pylint: disable=R0913 - - # crate image - service_description = _create_service_description(service_type, name, tag) - docker_labels = _create_docker_labels(service_description, bad_json_format) - additional_docker_labels = [ - {"name": "constraints", "type": "string", "value": ["node.role==manager"]} - ] - - internal_port = None - entry_point = "" - if service_type == "dynamic": - internal_port = random.randint(1, 65535) - additional_docker_labels.append( - {"name": "ports", "type": "int", "value": internal_port} - ) - entry_point = "/test/entry_point" - docker_labels["simcore.service.bootsettings"] = json.dumps( - [{"name": "entry_point", "type": "string", "value": entry_point}] - ) - docker_labels["simcore.service.settings"] = json.dumps(additional_docker_labels) - if bad_json_format: - docker_labels["simcore.service.settings"] = ( - "'fjks" + docker_labels["simcore.service.settings"] - ) - - if dependent_image is not None: - dependent_description = dependent_image["service_description"] - dependency_docker_labels = [ - { - "key": dependent_description["key"], - "tag": dependent_description["version"], - } - ] - docker_labels["simcore.service.dependencies"] = json.dumps( - dependency_docker_labels - ) - if bad_json_format: - docker_labels["simcore.service.dependencies"] = ( - "'fjks" + docker_labels["simcore.service.dependencies"] - ) - - # create the typical org.label-schema labels - service_extras = { - "node_requirements": { - "CPU": DEFAULT_MAX_NANO_CPUS / 1e9, - "RAM": DEFAULT_MAX_MEMORY, - }, - "build_date": "2020-08-19T15:36:27Z", - "vcs_ref": "ca180ef1", - "vcs_url": "git@github.com:ITISFoundation/osparc-simcore.git", - } - docker_labels["org.label-schema.build-date"] = service_extras["build_date"] - docker_labels["org.label-schema.schema-version"] = "1.0" - docker_labels["org.label-schema.vcs-ref"] = service_extras["vcs_ref"] - docker_labels["org.label-schema.vcs-url"] = service_extras["vcs_url"] - - image_tag = registry_url + "/{key}:{version}".format( - key=service_description["key"], version=tag - ) - await _create_base_image(docker_labels, image_tag) - - # push image to registry - docker = Docker() - await docker.images.push(image_tag) - await docker.close() - # remove image from host - # docker.images.remove(image_tag) - return { - "service_description": service_description, - "docker_labels": docker_labels, - "image_path": image_tag, - "internal_port": internal_port, - "entry_point": entry_point, - "service_extras": service_extras, - } - - -def _clean_registry(registry_url, list_of_images): - request_headers = {"accept": "application/vnd.docker.distribution.manifest.v2+json"} - for image in list_of_images: - service_description = image["service_description"] - # get the image digest - tag = service_description["version"] - url = "http://{host}/v2/{name}/manifests/{tag}".format( - host=registry_url, name=service_description["key"], tag=tag - ) - response = requests.get(url, headers=request_headers) - docker_content_digest = response.headers["Docker-Content-Digest"] - # remove the image from the registry - url = "http://{host}/v2/{name}/manifests/{digest}".format( - host=registry_url, - name=service_description["key"], - digest=docker_content_digest, - ) - response = requests.delete(url, headers=request_headers) - - -async def _create_base_image(labels, tag): - dockerfile = """ -FROM alpine -CMD while true; do sleep 10; done - """ - f = BytesIO(dockerfile.encode("utf-8")) - tar_obj = utils.mktar_from_dockerfile(f) - - # build docker base image - docker = Docker() - base_docker_image = await docker.images.build( - fileobj=tar_obj, encoding="gzip", rm=True, labels=labels, tag=tag - ) - await docker.close() - return base_docker_image[0] - - -def _create_service_description(service_type, name, tag): - file_name = "dummy_service_description-v1.json" - dummy_description_path = Path(__file__).parent / file_name - with dummy_description_path.open() as file_pt: - service_desc = json.load(file_pt) - - if service_type == "computational": - service_key_type = "comp" - elif service_type == "dynamic": - service_key_type = "dynamic" - service_desc["key"] = "simcore/services/" + service_key_type + "/" + name - service_desc["version"] = tag - service_desc["type"] = service_type - - return service_desc - - -def _create_docker_labels(service_description, bad_json_format): - docker_labels = {} - for key, value in service_description.items(): - docker_labels[".".join(["io", "simcore", key])] = json.dumps({key: value}) - if bad_json_format: - docker_labels[".".join(["io", "simcore", key])] = ( - "d32;'" + docker_labels[".".join(["io", "simcore", key])] - ) - - return docker_labels diff --git a/services/director/tests/helpers/json_schema_validator.py b/services/director/tests/helpers/json_schema_validator.py deleted file mode 100644 index 25088a192b8..00000000000 --- a/services/director/tests/helpers/json_schema_validator.py +++ /dev/null @@ -1,29 +0,0 @@ -import json -import logging -from pathlib import Path - -# NOTE: currently uses draft04 version -from jsonschema import SchemaError, ValidationError, validate - -_logger = logging.getLogger(__name__) - - -def validate_instance_object(json_instance: dict, json_schema: dict): - try: - validate(json_instance, json_schema) - except ValidationError: - _logger.exception("Node validation error:") - raise - except SchemaError: - _logger.exception("Schema validation error:") - raise - - -def validate_instance_path(json_instance: Path, json_schema: Path): - with json_instance.open() as file_pointer: - instance = json.load(file_pointer) - - with json_schema.open() as file_pointer: - schema = json.load(file_pointer) - - validate_instance_object(instance, schema) diff --git a/services/director/tests/test_dummy_services.py b/services/director/tests/test_dummy_services.py deleted file mode 100644 index f38cb848b22..00000000000 --- a/services/director/tests/test_dummy_services.py +++ /dev/null @@ -1,24 +0,0 @@ -# pylint: disable=unused-argument -# pylint: disable=unused-import -# pylint: disable=bare-except -# pylint:disable=redefined-outer-name - -import pytest -import json -import logging -from helpers import json_schema_validator - -log = logging.getLogger(__name__) - - -async def test_services_conformity(configure_schemas_location, push_services): - from simcore_service_director import resources - - services = await push_services(1, 1) - with resources.stream(resources.RESOURCE_NODE_SCHEMA) as file_pt: - service_schema = json.load(file_pt) - for service in services: - # validate service - json_schema_validator.validate_instance_object( - service["service_description"], service_schema - ) diff --git a/services/director/tests/test_handlers.py b/services/director/tests/test_handlers.py deleted file mode 100644 index 4d981ede990..00000000000 --- a/services/director/tests/test_handlers.py +++ /dev/null @@ -1,545 +0,0 @@ -# pylint: disable=unused-argument -# pylint: disable=unused-import -# pylint: disable=bare-except -# pylint: disable=redefined-outer-name -# pylint: disable=R0915 -# pylint: disable=too-many-arguments - -import json -import uuid -from typing import Optional -from urllib.parse import quote - -import pytest -from aioresponses.core import CallbackResult, aioresponses -from helpers import json_schema_validator -from servicelib.rest_responses import ( # pylint: disable=no-name-in-module - unwrap_envelope, -) -from simcore_service_director import main, resources, rest - - -@pytest.fixture -def client( - loop, - aiohttp_client, - aiohttp_unused_port, - configure_schemas_location, - configure_registry_access, -): - app = main.setup_app() - server_kwargs = {"port": aiohttp_unused_port(), "host": "localhost"} - client = loop.run_until_complete(aiohttp_client(app, server_kwargs=server_kwargs)) - return client - - -async def test_root_get(client, api_version_prefix): - web_response = await client.get(f"/{api_version_prefix}/") - assert web_response.content_type == "application/json" - assert web_response.status == 200 - healthcheck_enveloped = await web_response.json() - assert "data" in healthcheck_enveloped - - assert isinstance(healthcheck_enveloped["data"], dict) - - healthcheck = healthcheck_enveloped["data"] - assert healthcheck["name"] == "simcore-service-director" - assert healthcheck["status"] == "SERVICE_RUNNING" - assert healthcheck["version"] == "0.1.0" - assert healthcheck["api_version"] == "0.1.0" - - -def _check_services(created_services, services, schema_version="v1"): - assert len(created_services) == len(services) - - created_service_descriptions = [ - (x["service_description"]["key"], x["service_description"]["version"]) - for x in created_services - ] - - json_schema_path = resources.get_path(resources.RESOURCE_NODE_SCHEMA) - assert json_schema_path.exists() == True - with json_schema_path.open() as file_pt: - service_schema = json.load(file_pt) - - for service in services: - service.pop("image_digest") - if schema_version == "v1": - assert ( - created_service_descriptions.count((service["key"], service["version"])) - == 1 - ) - json_schema_validator.validate_instance_object(service, service_schema) - - -async def test_services_get(docker_registry, client, push_services, api_version_prefix): - # empty case - web_response = await client.get(f"/{api_version_prefix}/services") - assert web_response.status == 200 - assert web_response.content_type == "application/json" - services_enveloped = await web_response.json() - assert isinstance(services_enveloped["data"], list) - services = services_enveloped["data"] - _check_services([], services) - - # some services - created_services = await push_services( - number_of_computational_services=3, number_of_interactive_services=2 - ) - web_response = await client.get(f"/{api_version_prefix}/services") - assert web_response.status == 200 - assert web_response.content_type == "application/json" - services_enveloped = await web_response.json() - assert isinstance(services_enveloped["data"], list) - services = services_enveloped["data"] - _check_services(created_services, services) - - web_response = await client.get( - f"/{api_version_prefix}/services?service_type=blahblah" - ) - assert web_response.status == 400 - assert web_response.content_type == "application/json" - services_enveloped = await web_response.json() - assert not "data" in services_enveloped - assert "error" in services_enveloped - - web_response = await client.get( - f"/{api_version_prefix}/services?service_type=computational" - ) - assert web_response.status == 200 - assert web_response.content_type == "application/json" - services_enveloped = await web_response.json() - assert isinstance(services_enveloped["data"], list) - services = services_enveloped["data"] - assert len(services) == 3 - - web_response = await client.get( - f"/{api_version_prefix}/services?service_type=interactive" - ) - assert web_response.status == 200 - assert web_response.content_type == "application/json" - services_enveloped = await web_response.json() - assert isinstance(services_enveloped["data"], list) - services = services_enveloped["data"] - assert len(services) == 2 - - -async def test_services_by_key_version_get( - client, push_services, api_version_prefix -): # pylint: disable=W0613, W0621 - web_response = await client.get( - f"/{api_version_prefix}/services/whatever/someversion" - ) - assert web_response.status == 400 - web_response = await client.get( - f"/{api_version_prefix}/services/simcore/services/dynamic/something/someversion" - ) - assert web_response.status == 404 - web_response = await client.get( - f"/{api_version_prefix}/services/simcore/services/dynamic/something/1.5.2" - ) - assert web_response.status == 404 - - created_services = await push_services(3, 2) - assert len(created_services) == 5 - - retrieved_services = [] - for created_service in created_services: - service_description = created_service["service_description"] - # note that it is very important to remove the safe="/" from quote!!!! - key, version = [ - quote(service_description[key], safe="") for key in ("key", "version") - ] - url = f"/{api_version_prefix}/services/{key}/{version}" - web_response = await client.get(url) - - assert ( - web_response.status == 200 - ), await web_response.text() # here the error is actually json. - assert web_response.content_type == "application/json" - services_enveloped = await web_response.json() - - assert isinstance(services_enveloped["data"], list) - services = services_enveloped["data"] - assert len(services) == 1 - retrieved_services.append(services[0]) - _check_services(created_services, retrieved_services) - - -async def test_get_service_labels( - client, push_services, api_version_prefix -): # pylint: disable=W0613, W0621 - created_services = await push_services(3, 2) - - for service in created_services: - service_description = service["service_description"] - # note that it is very important to remove the safe="/" from quote!!!! - key, version = [ - quote(service_description[key], safe="") for key in ("key", "version") - ] - url = f"/{api_version_prefix}/services/{key}/{version}/labels" - web_response = await client.get(url) - assert web_response.status == 200, await web_response.text() - - services_enveloped = await web_response.json() - labels = services_enveloped["data"] - - assert service["docker_labels"] == labels - - -async def test_services_extras_by_key_version_get( - client, push_services, api_version_prefix -): # pylint: disable=W0613, W0621 - web_response = await client.get( - f"/{api_version_prefix}/service_extras/whatever/someversion" - ) - assert web_response.status == 400 - web_response = await client.get( - f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/someversion" - ) - assert web_response.status == 404 - web_response = await client.get( - f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/1.5.2" - ) - assert web_response.status == 404 - - created_services = await push_services(3, 2) - assert len(created_services) == 5 - - for created_service in created_services: - service_description = created_service["service_description"] - # note that it is very important to remove the safe="/" from quote!!!! - key, version = [ - quote(service_description[key], safe="") for key in ("key", "version") - ] - url = f"/{api_version_prefix}/service_extras/{key}/{version}" - web_response = await client.get(url) - - assert ( - web_response.status == 200 - ), await web_response.text() # here the error is actually json. - assert web_response.content_type == "application/json" - service_extras_enveloped = await web_response.json() - - assert isinstance(service_extras_enveloped["data"], dict) - service_extras = service_extras_enveloped["data"] - assert created_service["service_extras"] == service_extras - - -async def _start_get_stop_services( - client, - push_services, - user_id, - project_id, - api_version_prefix: str, - save_state: Optional[bool], - expected_save_state_call: bool, - mocker, -): - params = {} - web_response = await client.post( - f"/{api_version_prefix}/running_interactive_services", params=params - ) - assert web_response.status == 400 - - params = { - "user_id": "None", - "project_id": "None", - "service_uuid": "sdlfkj4", - "service_key": "None", - "service_tag": "None", # optional - "service_basepath": "None", # optional - } - web_response = await client.post( - f"/{api_version_prefix}/running_interactive_services", params=params - ) - data = await web_response.json() - assert web_response.status == 400, data - - params["service_key"] = "simcore/services/comp/somfunkyname-nhsd" - params["service_tag"] = "1.2.3" - web_response = await client.post( - f"/{api_version_prefix}/running_interactive_services", params=params - ) - data = await web_response.json() - assert web_response.status == 404, data - - created_services = await push_services(0, 2) - assert len(created_services) == 2 - for created_service in created_services: - service_description = created_service["service_description"] - params["user_id"] = user_id - params["project_id"] = project_id - params["service_key"] = service_description["key"] - params["service_tag"] = service_description["version"] - service_port = created_service["internal_port"] - service_entry_point = created_service["entry_point"] - params["service_basepath"] = "/i/am/a/basepath" - params["service_uuid"] = str(uuid.uuid4()) - # start the service - web_response = await client.post( - f"/{api_version_prefix}/running_interactive_services", params=params - ) - assert web_response.status == 201 - assert web_response.content_type == "application/json" - running_service_enveloped = await web_response.json() - assert isinstance(running_service_enveloped["data"], dict) - assert all( - k in running_service_enveloped["data"] - for k in [ - "service_uuid", - "service_key", - "service_version", - "published_port", - "entry_point", - "service_host", - "service_port", - "service_basepath", - ] - ) - assert ( - running_service_enveloped["data"]["service_uuid"] == params["service_uuid"] - ) - assert running_service_enveloped["data"]["service_key"] == params["service_key"] - assert ( - running_service_enveloped["data"]["service_version"] - == params["service_tag"] - ) - assert running_service_enveloped["data"]["service_port"] == service_port - service_published_port = running_service_enveloped["data"]["published_port"] - assert not service_published_port - assert service_entry_point == running_service_enveloped["data"]["entry_point"] - service_host = running_service_enveloped["data"]["service_host"] - assert service_host == f"test_{params['service_uuid']}" - service_basepath = running_service_enveloped["data"]["service_basepath"] - assert service_basepath == params["service_basepath"] - - # get the service - web_response = await client.request( - "GET", - f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", - ) - assert web_response.status == 200 - text = await web_response.text() - assert web_response.content_type == "application/json", text - running_service_enveloped = await web_response.json() - assert isinstance(running_service_enveloped["data"], dict) - assert all( - k in running_service_enveloped["data"] - for k in [ - "service_uuid", - "service_key", - "service_version", - "published_port", - "entry_point", - ] - ) - assert ( - running_service_enveloped["data"]["service_uuid"] == params["service_uuid"] - ) - assert running_service_enveloped["data"]["service_key"] == params["service_key"] - assert ( - running_service_enveloped["data"]["service_version"] - == params["service_tag"] - ) - assert ( - running_service_enveloped["data"]["published_port"] - == service_published_port - ) - assert running_service_enveloped["data"]["entry_point"] == service_entry_point - assert running_service_enveloped["data"]["service_host"] == service_host - assert running_service_enveloped["data"]["service_port"] == service_port - assert running_service_enveloped["data"]["service_basepath"] == service_basepath - - # stop the service - query_params = {} - if save_state: - query_params.update({"save_state": "true" if save_state else "false"}) - - mocked_save_state_cb = mocker.MagicMock( - return_value=CallbackResult(status=200, payload={}) - ) - PASSTHROUGH_REQUESTS_PREFIXES = [ - "http://127.0.0.1", - "http://localhost", - "unix://", # docker engine - "ws://", # websockets - ] - with aioresponses(passthrough=PASSTHROUGH_REQUESTS_PREFIXES) as mock: - - # POST /http://service_host:service_port service_basepath/state ------------------------------------------------- - mock.post( - f"http://{service_host}:{service_port}{service_basepath}/state", - status=200, - callback=mocked_save_state_cb, - ) - web_response = await client.delete( - f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", - params=query_params, - ) - if expected_save_state_call: - mocked_save_state_cb.assert_called_once() - - text = await web_response.text() - assert web_response.status == 204, text - assert web_response.content_type == "application/json" - data = await web_response.json() - assert data is None - - -@pytest.mark.skip( - reason="docker_swarm fixture is a session fixture making it bad running together with other tests that require a swarm" -) -async def test_running_services_post_and_delete_no_swarm( - configure_swarm_stack_name, - client, - push_services, - user_id, - project_id, - api_version_prefix, -): - params = { - "user_id": "None", - "project_id": "None", - "service_uuid": "sdlfkj4", - "service_key": "simcore/services/comp/some-key", - } - web_response = await client.post( - f"/{api_version_prefix}/running_interactive_services", params=params - ) - data = await web_response.json() - assert web_response.status == 500, data - - -@pytest.mark.parametrize( - "save_state, expected_save_state_call", [(True, True), (False, False), (None, True)] -) -async def test_running_services_post_and_delete( - configure_swarm_stack_name, - client, - push_services, - docker_swarm, - user_id, - project_id, - api_version_prefix, - save_state: Optional[bool], - expected_save_state_call: bool, - mocker, -): - await _start_get_stop_services( - client, - push_services, - user_id, - project_id, - api_version_prefix, - save_state, - expected_save_state_call, - mocker, - ) - - -async def test_running_interactive_services_list_get( - client, push_services, docker_swarm -): - """Test case for running_interactive_services_list_get - - Returns a list of interactive services - """ - user_ids = ["first_user_id", "second_user_id"] - project_ids = ["first_project_id", "second_project_id", "third_project_id"] - # prepare services - NUM_SERVICES = 1 - created_services = await push_services(0, NUM_SERVICES) - assert len(created_services) == NUM_SERVICES - # start the services - for user_id in user_ids: - for project_id in project_ids: - for created_service in created_services: - service_description = created_service["service_description"] - params = {} - params["user_id"] = user_id - params["project_id"] = project_id - params["service_key"] = service_description["key"] - params["service_tag"] = service_description["version"] - params["service_uuid"] = str(uuid.uuid4()) - # start the service - web_response = await client.post( - "/v0/running_interactive_services", params=params - ) - assert web_response.status == 201 - # get the list of services - for user_id in user_ids: - for project_id in project_ids: - params = {} - # list by user_id - params["user_id"] = user_id - response = await client.get( - path="/v0/running_interactive_services", params=params - ) - assert response.status == 200, "Response body is : " + ( - await response.read() - ).decode("utf-8") - data, error = unwrap_envelope(await response.json()) - assert data - assert not error - services_list = data - assert len(services_list) == len(project_ids) * NUM_SERVICES - # list by user_id and project_id - params["project_id"] = project_id - response = await client.get( - path="/v0/running_interactive_services", params=params - ) - assert response.status == 200, "Response body is : " + ( - await response.read() - ).decode("utf-8") - data, error = unwrap_envelope(await response.json()) - assert data - assert not error - services_list = data - assert len(services_list) == NUM_SERVICES - # list by project_id - params = {} - params["project_id"] = project_id - response = await client.get( - path="/v0/running_interactive_services", params=params - ) - assert response.status == 200, "Response body is : " + ( - await response.read() - ).decode("utf-8") - data, error = unwrap_envelope(await response.json()) - assert data - assert not error - services_list = data - assert len(services_list) == len(user_ids) * NUM_SERVICES - - -@pytest.mark.skip(reason="test needs credentials to real registry") -async def test_performance_get_services( - loop, configure_custom_registry, configure_schemas_location -): - import time - - fake_request = "fake request" - start_time = time.perf_counter() - number_of_calls = 1 - number_of_services = 0 - for i in range(number_of_calls): - print("calling iteration", i) - start_time_i = time.perf_counter() - web_response = await rest.handlers.services_get(fake_request) - assert web_response.status == 200 - assert web_response.content_type == "application/json" - services_enveloped = json.loads(web_response.text) - assert isinstance(services_enveloped["data"], list) - services = services_enveloped["data"] - number_of_services = len(services) - print("iteration completed in", (time.perf_counter() - start_time_i), "s") - stop_time = time.perf_counter() - print( - "Time to run {} times: {}s, #services {}, time per call {}s/service".format( - number_of_calls, - stop_time - start_time, - number_of_services, - (stop_time - start_time) / number_of_calls / number_of_services, - ) - ) diff --git a/services/director/tests/test_json_schemas.py b/services/director/tests/test_json_schemas.py deleted file mode 100644 index 6a45b1d0740..00000000000 --- a/services/director/tests/test_json_schemas.py +++ /dev/null @@ -1,30 +0,0 @@ -import json -from pathlib import Path - -import pytest -from jsonschema import SchemaError, ValidationError, validate - -from simcore_service_director import resources - -API_VERSIONS = resources.listdir(resources.RESOURCE_OPENAPI_ROOT) - - -def validate_individual_schemas(list_of_paths): - for spec_file_path in list_of_paths: - assert spec_file_path.exists() - with spec_file_path.open() as file_ptr: - schema_specs = json.load(file_ptr) - try: - dummy_instance = {} - with pytest.raises(ValidationError): - validate(dummy_instance, schema_specs) - except SchemaError as err: - pytest.fail(err.message) - - -@pytest.mark.parametrize("version", API_VERSIONS) -def test_valid_individual_json_schemas_specs(version): - name = f"{resources.RESOURCE_OPENAPI_ROOT}/{version}/schemas" - schemas_folder_path = resources.get_path(name) - - validate_individual_schemas(Path(schemas_folder_path).rglob("*.json")) diff --git a/services/director/tests/test_oas.py b/services/director/tests/test_oas.py deleted file mode 100644 index 86898604fa4..00000000000 --- a/services/director/tests/test_oas.py +++ /dev/null @@ -1,30 +0,0 @@ -# pylint: disable=redefined-outer-name - -import pytest -import yaml -from openapi_spec_validator import validate_spec -from openapi_spec_validator.exceptions import OpenAPIValidationError - -from simcore_service_director import resources - - -def test_openapi_specs(): - openapi_path = resources.get_path(resources.RESOURCE_OPEN_API) - with resources.stream(resources.RESOURCE_OPEN_API) as fh: - specs = yaml.safe_load(fh) - try: - validate_spec(specs, spec_url=openapi_path.as_uri()) - except OpenAPIValidationError as err: - pytest.fail(err.message) - - -def test_server_specs(): - with resources.stream(resources.RESOURCE_OPEN_API) as fh: - specs = yaml.safe_load(fh) - - # client-sdk current limitation - # - hooks to first server listed in oas - default_server = specs["servers"][0] - assert ( - default_server["url"] == "http://{host}:{port}/{version}" - ), "Invalid convention" diff --git a/services/director/tests/test_openapi.py b/services/director/tests/test_openapi.py deleted file mode 100644 index 36b25d16073..00000000000 --- a/services/director/tests/test_openapi.py +++ /dev/null @@ -1,25 +0,0 @@ -from pathlib import Path - -import pkg_resources -import pytest -import simcore_service_director -import yaml -from openapi_spec_validator import validate_spec -from openapi_spec_validator.exceptions import OpenAPIValidationError -from simcore_service_director.resources import RESOURCE_OPEN_API - - -def test_specifications(): - # pylint: disable=no-value-for-parameter - spec_path = Path( - pkg_resources.resource_filename( - simcore_service_director.__name__, RESOURCE_OPEN_API - ) - ) - - with spec_path.open() as fh: - specs = yaml.safe_load(fh) - try: - validate_spec(specs, spec_url=spec_path.as_uri()) - except OpenAPIValidationError as err: - pytest.fail(err.message) diff --git a/services/director/tests/test_openapi_schemas.py b/services/director/tests/test_openapi_schemas.py deleted file mode 100644 index 7849534fbcf..00000000000 --- a/services/director/tests/test_openapi_schemas.py +++ /dev/null @@ -1,70 +0,0 @@ -from pathlib import Path - -import pytest -import yaml - -from openapi_spec_validator import validate_spec -from openapi_spec_validator.exceptions import OpenAPIValidationError - -from simcore_service_director import resources - -API_VERSIONS = resources.listdir(resources.RESOURCE_OPENAPI_ROOT) - - -def correct_schema_local_references(schema_specs): - for key, value in schema_specs.items(): - if isinstance(value, dict): - correct_schema_local_references(value) - elif "$ref" in key: - if str(value).startswith("#/"): - # correct the reference - new_value = str(value).replace("#/", "#/components/schemas/") - schema_specs[key] = new_value - - -def add_namespace_for_converted_schemas(schema_specs): - # schemas converted from jsonschema do not have an overarching namespace. - # the openapi validator does not like this - # we use the jsonschema title to create a fake namespace - fake_schema_specs = {"FakeName": schema_specs} - return fake_schema_specs - - -def validate_individual_schemas(list_of_paths): - fake_openapi_headers = { - "openapi": "3.0.0", - "info": { - "title": "An include file to define sortable attributes", - "version": "1.0.0", - }, - "paths": {}, - "components": {"parameters": {}, "schemas": {}}, - } - - for spec_file_path in list_of_paths: - assert spec_file_path.exists() - # only consider schemas - if not "openapi.yaml" in str(spec_file_path.name) and "schemas" in str( - spec_file_path - ): - with spec_file_path.open() as file_ptr: - schema_specs = yaml.safe_load(file_ptr) - # correct local references - correct_schema_local_references(schema_specs) - if str(spec_file_path).endswith("-converted.yaml"): - schema_specs = add_namespace_for_converted_schemas(schema_specs) - fake_openapi_headers["components"]["schemas"] = schema_specs - try: - validate_spec(fake_openapi_headers, spec_url=spec_file_path.as_uri()) - except OpenAPIValidationError as err: - pytest.fail(err.message) - - -@pytest.mark.parametrize("version", API_VERSIONS) -def test_valid_individual_openapi_schemas_specs(version): - name = "{root}/{version}/schemas".format( - root=resources.RESOURCE_OPENAPI_ROOT, version=version - ) - schemas_folder_path = resources.get_path(name) - validate_individual_schemas(Path(schemas_folder_path).rglob("*.yaml")) - validate_individual_schemas(Path(schemas_folder_path).rglob("*.yml")) diff --git a/services/director/tests/test_registry_cache_task.py b/services/director/tests/test_registry_cache_task.py deleted file mode 100644 index 056462b9199..00000000000 --- a/services/director/tests/test_registry_cache_task.py +++ /dev/null @@ -1,67 +0,0 @@ -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name -from asyncio import sleep - -import pytest -from simcore_service_director import config, main, registry_cache_task, registry_proxy - - -@pytest.fixture -def client( - loop, - aiohttp_client, - aiohttp_unused_port, - configure_schemas_location, - configure_registry_access, -): - config.DIRECTOR_REGISTRY_CACHING = True - config.DIRECTOR_REGISTRY_CACHING_TTL = 5 - # config.DIRECTOR_REGISTRY_CACHING_TTL = 5 - app = main.setup_app() - server_kwargs = {"port": aiohttp_unused_port(), "host": "localhost"} - - registry_cache_task.setup(app) - - yield loop.run_until_complete(aiohttp_client(app, server_kwargs=server_kwargs)) - - -async def test_registry_caching_task(client, push_services): - app = client.app - assert app - - # check the task is started - assert registry_cache_task.TASK_NAME in app - # check the registry cache is empty (no calls yet) - assert registry_cache_task.APP_REGISTRY_CACHE_DATA_KEY in app - - # check we do not get any repository - list_of_services = await registry_proxy.list_services( - app, registry_proxy.ServiceType.ALL - ) - assert not list_of_services - assert app[registry_cache_task.APP_REGISTRY_CACHE_DATA_KEY] != {} - # create services in the registry - pushed_services = await push_services( - number_of_computational_services=1, number_of_interactive_services=1 - ) - # the services shall be updated - await sleep( - config.DIRECTOR_REGISTRY_CACHING_TTL * 1.1 - ) # NOTE: this can take some time. Sleep increased by 10%. - list_of_services = await registry_proxy.list_services( - app, registry_proxy.ServiceType.ALL - ) - assert len(list_of_services) == 2 - # add more - pushed_services = await push_services( - number_of_computational_services=2, - number_of_interactive_services=2, - version="2.0.", - ) - await sleep( - config.DIRECTOR_REGISTRY_CACHING_TTL * 1.1 - ) # NOTE: this sometimes takes a bit more. Sleep increased a 10%. - list_of_services = await registry_proxy.list_services( - app, registry_proxy.ServiceType.ALL - ) - assert len(list_of_services) == len(pushed_services) diff --git a/services/director/tests/test_utils.py b/services/director/tests/test_utils.py deleted file mode 100644 index 3141d2f2baa..00000000000 --- a/services/director/tests/test_utils.py +++ /dev/null @@ -1,38 +0,0 @@ -from datetime import datetime - -import pytest -from simcore_service_director.utils import parse_as_datetime - - -@pytest.mark.parametrize( - "timestr", - ( - # Samples taken from https://docs.docker.com/engine/reference/commandline/service_inspect/ - "2020-10-09T18:44:02.558012087Z", - "2020-10-09T12:28:14.771034099Z", - "2020-10-09T12:28:14.7710", - # found cases with spaces - "2020-10-09T12:28:14.77 Z", - " 2020-10-09T12:28:14.77 ", - ), -) -def test_parse_valid_time_strings(timestr): - - dt = parse_as_datetime(timestr) - assert isinstance(dt, datetime) - assert dt.year == 2020 - assert dt.month == 10 - assert dt.day == 9 - - -def test_parse_invalid_timestr(): - now = datetime.utcnow() - invalid_timestr = "2020-10-09T12:28" - - # w/ default, it should NOT raise - dt = parse_as_datetime(invalid_timestr, default=now) - assert dt == now - - # w/o default - with pytest.raises(ValueError): - parse_as_datetime(invalid_timestr) diff --git a/services/director/tests/unit/api/conftest.py b/services/director/tests/unit/api/conftest.py new file mode 100644 index 00000000000..e295a9dacd1 --- /dev/null +++ b/services/director/tests/unit/api/conftest.py @@ -0,0 +1,40 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +from collections.abc import AsyncIterator + +import httpx +import pytest +from faker import Faker +from fastapi import FastAPI +from fixtures.fake_services import PushServicesCallable, ServiceInRegistryInfoDict +from httpx._transports.asgi import ASGITransport + + +@pytest.fixture +async def client(app: FastAPI) -> AsyncIterator[httpx.AsyncClient]: + # - Needed for app to trigger start/stop event handlers + # - Prefer this client instead of fastapi.testclient.TestClient + async with httpx.AsyncClient( + transport=ASGITransport(app=app), + base_url="http://director.testserver.io", + headers={"Content-Type": "application/json"}, + ) as client: + assert isinstance(getattr(client, "_transport", None), ASGITransport) + yield client + + +@pytest.fixture +async def created_services( + push_services: PushServicesCallable, +) -> list[ServiceInRegistryInfoDict]: + return await push_services( + number_of_computational_services=3, number_of_interactive_services=2 + ) + + +@pytest.fixture +def x_simcore_user_agent_header(faker: Faker) -> dict[str, str]: + return {"x-simcore-user-agent": faker.pystr()} diff --git a/services/director/tests/unit/api/test_rest_health.py b/services/director/tests/unit/api/test_rest_health.py new file mode 100644 index 00000000000..7a429c668ff --- /dev/null +++ b/services/director/tests/unit/api/test_rest_health.py @@ -0,0 +1,19 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +import httpx +from fastapi import status + + +async def test_healthcheck( + configure_registry_access, + client: httpx.AsyncClient, + api_version_prefix: str, +): + resp = await client.get(f"/{api_version_prefix}/") + + assert resp.is_success + assert resp.status_code == status.HTTP_200_OK + assert "simcore_service_director" in resp.text diff --git a/services/director/tests/unit/api/test_rest_running_interactive_services.py b/services/director/tests/unit/api/test_rest_running_interactive_services.py new file mode 100644 index 00000000000..97accd23279 --- /dev/null +++ b/services/director/tests/unit/api/test_rest_running_interactive_services.py @@ -0,0 +1,309 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +import uuid + +import httpx +import pytest +from aioresponses import CallbackResult, aioresponses +from faker import Faker +from fastapi import status +from models_library.projects import ProjectID +from models_library.users import UserID +from pytest_simcore.helpers.typing_env import EnvVarsDict + + +def _assert_response_and_unwrap_envelope(got: httpx.Response): + assert got.headers["content-type"] == "application/json" + assert got.encoding == "utf-8" + + body = got.json() + assert isinstance(body, dict) + assert "data" in body or "error" in body + return body.get("data"), body.get("error") + + +@pytest.mark.parametrize( + "save_state, expected_save_state_call", [(True, True), (False, False), (None, True)] +) +async def test_running_services_post_and_delete( + configure_swarm_stack_name: EnvVarsDict, + configure_registry_access: EnvVarsDict, + configured_docker_network: EnvVarsDict, + client: httpx.AsyncClient, + push_services, + user_id: UserID, + project_id: ProjectID, + api_version_prefix: str, + save_state: bool | None, + expected_save_state_call: bool, + mocker, + faker: Faker, + x_simcore_user_agent_header: dict[str, str], + ensure_run_in_sequence_context_is_empty: None, +): + params = {} + resp = await client.post( + f"/{api_version_prefix}/running_interactive_services", params=params + ) + assert resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + + params = { + "user_id": f"{faker.pyint(min_value=1)}", + "project_id": f"{faker.uuid4()}", + "service_uuid": f"{faker.uuid4()}", + "service_key": "None", + "service_tag": "None", # optional + "service_basepath": "None", # optional + } + resp = await client.post( + f"/{api_version_prefix}/running_interactive_services", params=params + ) + data = resp.json() + assert resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, data + + params["service_key"] = "simcore/services/comp/somfunkyname-nhsd" + params["service_tag"] = "1.2.3" + resp = await client.post( + f"/{api_version_prefix}/running_interactive_services", + params=params, + headers=x_simcore_user_agent_header, + ) + data = resp.json() + assert resp.status_code == status.HTTP_404_NOT_FOUND, data + + created_services = await push_services( + number_of_computational_services=0, number_of_interactive_services=2 + ) + assert len(created_services) == 2 + for created_service in created_services: + service_description = created_service["service_description"] + params["user_id"] = f"{user_id}" + params["project_id"] = f"{project_id}" + params["service_key"] = service_description["key"] + params["service_tag"] = service_description["version"] + service_port = created_service["internal_port"] + service_entry_point = created_service["entry_point"] + params["service_basepath"] = "/i/am/a/basepath" + params["service_uuid"] = f"{faker.uuid4()}" + # start the service + resp = await client.post( + f"/{api_version_prefix}/running_interactive_services", + params=params, + headers=x_simcore_user_agent_header, + ) + assert resp.status_code == status.HTTP_201_CREATED, resp.text + assert resp.encoding == "utf-8" + assert resp.headers["content-type"] == "application/json" + running_service_enveloped = resp.json() + assert isinstance(running_service_enveloped["data"], dict) + assert all( + k in running_service_enveloped["data"] + for k in [ + "service_uuid", + "service_key", + "service_version", + "published_port", + "entry_point", + "service_host", + "service_port", + "service_basepath", + ] + ) + assert ( + running_service_enveloped["data"]["service_uuid"] == params["service_uuid"] + ) + assert running_service_enveloped["data"]["service_key"] == params["service_key"] + assert ( + running_service_enveloped["data"]["service_version"] + == params["service_tag"] + ) + assert running_service_enveloped["data"]["service_port"] == service_port + service_published_port = running_service_enveloped["data"]["published_port"] + assert not service_published_port + assert service_entry_point == running_service_enveloped["data"]["entry_point"] + service_host = running_service_enveloped["data"]["service_host"] + assert service_host == f"test_{params['service_uuid']}" + service_basepath = running_service_enveloped["data"]["service_basepath"] + assert service_basepath == params["service_basepath"] + + # get the service + resp = await client.request( + "GET", + f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", + ) + assert resp.status_code == status.HTTP_200_OK + text = resp.text + assert resp.headers["content-type"] == "application/json" + assert resp.encoding == "utf-8", f"Got {text=}" + running_service_enveloped = resp.json() + assert isinstance(running_service_enveloped["data"], dict) + assert all( + k in running_service_enveloped["data"] + for k in [ + "service_uuid", + "service_key", + "service_version", + "published_port", + "entry_point", + ] + ) + assert ( + running_service_enveloped["data"]["service_uuid"] == params["service_uuid"] + ) + assert running_service_enveloped["data"]["service_key"] == params["service_key"] + assert ( + running_service_enveloped["data"]["service_version"] + == params["service_tag"] + ) + assert ( + running_service_enveloped["data"]["published_port"] + == service_published_port + ) + assert running_service_enveloped["data"]["entry_point"] == service_entry_point + assert running_service_enveloped["data"]["service_host"] == service_host + assert running_service_enveloped["data"]["service_port"] == service_port + assert running_service_enveloped["data"]["service_basepath"] == service_basepath + + # stop the service + query_params = {} + if save_state: + query_params.update({"save_state": "true" if save_state else "false"}) + + mocked_save_state_cb = mocker.MagicMock( + return_value=CallbackResult(status=200, payload={}) + ) + PASSTHROUGH_REQUESTS_PREFIXES = [ + "http://127.0.0.1", + "http://localhost", + "unix://", # docker engine + "ws://", # websockets + ] + with aioresponses(passthrough=PASSTHROUGH_REQUESTS_PREFIXES) as mock: + + # POST /http://service_host:service_port service_basepath/state ------------------------------------------------- + mock.post( + f"http://{service_host}:{service_port}{service_basepath}/state", + status=200, + callback=mocked_save_state_cb, + ) + resp = await client.delete( + f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", + params=query_params, + ) + if expected_save_state_call: + mocked_save_state_cb.assert_called_once() + + text = resp.text + assert resp.status_code == status.HTTP_204_NO_CONTENT, text + assert resp.headers["content-type"] == "application/json" + assert resp.encoding == "utf-8" + + +async def test_running_interactive_services_list_get( + configure_swarm_stack_name: EnvVarsDict, + configure_registry_access: EnvVarsDict, + configured_docker_network: EnvVarsDict, + client: httpx.AsyncClient, + push_services, + x_simcore_user_agent_header: dict[str, str], + api_version_prefix: str, + ensure_run_in_sequence_context_is_empty: None, + faker: Faker, +): + """Test case for running_interactive_services_list_get + + Returns a list of interactive services + """ + user_ids = [faker.pyint(min_value=1), faker.pyint(min_value=1)] + project_ids = [faker.uuid4(), faker.uuid4(), faker.uuid4()] + # prepare services + NUM_SERVICES = 1 + available_services = await push_services( + number_of_computational_services=0, number_of_interactive_services=NUM_SERVICES + ) + assert len(available_services) == NUM_SERVICES + # start the services + created_services = [] + for user_id in user_ids: + for project_id in project_ids: + for created_service in available_services: + service_description = created_service["service_description"] + params = {} + params["user_id"] = user_id + params["project_id"] = project_id + params["service_key"] = service_description["key"] + params["service_tag"] = service_description["version"] + params["service_uuid"] = str(uuid.uuid4()) + # start the service + resp = await client.post( + "/v0/running_interactive_services", + params=params, + headers=x_simcore_user_agent_header, + ) + assert resp.status_code == 201, resp.text + created_services.append(resp.json()["data"]) + # get the list of services + for user_id in user_ids: + for project_id in project_ids: + params = {} + # list by user_id + params["user_id"] = user_id + response = await client.get( + "/v0/running_interactive_services", params=params + ) + assert ( + response.status_code == status.HTTP_200_OK + ), f"Response body is : {response.text}" + data, error = _assert_response_and_unwrap_envelope(response) + assert data + assert not error + services_list = data + assert len(services_list) == len(project_ids) * NUM_SERVICES + # list by user_id and project_id + params["project_id"] = project_id + response = await client.get( + "/v0/running_interactive_services", params=params + ) + assert ( + response.status_code == status.HTTP_200_OK + ), f"Response body is : {response.text}" + data, error = _assert_response_and_unwrap_envelope(response) + assert data + assert not error + services_list = data + assert len(services_list) == NUM_SERVICES + # list by project_id + params = {} + params["project_id"] = project_id + response = await client.get( + "/v0/running_interactive_services", params=params + ) + assert ( + response.status_code == status.HTTP_200_OK + ), f"Response body is : {response.text}" + data, error = _assert_response_and_unwrap_envelope(response) + assert data + assert not error + services_list = data + assert len(services_list) == len(user_ids) * NUM_SERVICES + # get all the running services + response = await client.get("/v0/running_interactive_services") + assert ( + response.status_code == status.HTTP_200_OK + ), f"Response body is : {response.text}" + data, error = _assert_response_and_unwrap_envelope(response) + assert data + assert not error + services_list = data + assert len(services_list) == len(user_ids) * len(project_ids) * NUM_SERVICES + + # cleanup + for service in created_services: + resp = await client.delete( + f"/{api_version_prefix}/running_interactive_services/{service['service_uuid']}", + params={"save_state": False}, + ) + assert resp.status_code == status.HTTP_204_NO_CONTENT, resp.text diff --git a/services/director/tests/unit/api/test_rest_service_extras.py b/services/director/tests/unit/api/test_rest_service_extras.py new file mode 100644 index 00000000000..8b8bba037c3 --- /dev/null +++ b/services/director/tests/unit/api/test_rest_service_extras.py @@ -0,0 +1,64 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +from urllib.parse import quote + +import httpx +from fastapi import status +from fixtures.fake_services import ServiceInRegistryInfoDict +from pytest_simcore.helpers.typing_env import EnvVarsDict + + +def _assert_response_and_unwrap_envelope(got: httpx.Response): + assert got.headers["content-type"] == "application/json" + assert got.encoding == "utf-8" + + body = got.json() + assert isinstance(body, dict) + assert "data" in body or "error" in body + return body.get("data"), body.get("error") + + +async def test_get_services_extras_by_key_and_version_with_empty_registry( + configure_registry_access: EnvVarsDict, + client: httpx.AsyncClient, + api_version_prefix: str, +): + resp = await client.get( + f"/{api_version_prefix}/service_extras/whatever/someversion" + ) + assert resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, f"Got f{resp.text}" + resp = await client.get( + f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/someversion" + ) + assert resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, f"Got f{resp.text}" + resp = await client.get( + f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/1.5.2" + ) + assert resp.status_code == status.HTTP_404_NOT_FOUND, f"Got f{resp.text}" + + +async def test_get_services_extras_by_key_and_version( + configure_registry_access: EnvVarsDict, + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert len(created_services) == 5 + + for created_service in created_services: + service_description = created_service["service_description"] + # note that it is very important to remove the safe="/" from quote!!!! + key, version = ( + quote(service_description[key], safe="") for key in ("key", "version") + ) + url = f"/{api_version_prefix}/service_extras/{key}/{version}" + resp = await client.get(url) + + assert resp.status_code == status.HTTP_200_OK, f"Got {resp.text=}" + + service_extras, error = _assert_response_and_unwrap_envelope(resp) + assert not error + assert created_service["service_extras"] == service_extras diff --git a/services/director/tests/unit/api/test_rest_services.py b/services/director/tests/unit/api/test_rest_services.py new file mode 100644 index 00000000000..a34ec6a76d4 --- /dev/null +++ b/services/director/tests/unit/api/test_rest_services.py @@ -0,0 +1,199 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +from urllib.parse import quote + +import httpx +from fastapi import status +from fixtures.fake_services import ServiceInRegistryInfoDict +from models_library.api_schemas_director.services import ServiceDataGet +from pytest_simcore.helpers.typing_env import EnvVarsDict + + +def _assert_response_and_unwrap_envelope(got: httpx.Response): + assert got.headers["content-type"] == "application/json" + assert got.encoding == "utf-8" + + body = got.json() + assert isinstance(body, dict) + assert "data" in body or "error" in body + return body.get("data"), body.get("error") + + +def _assert_services( + *, + expected: list[ServiceInRegistryInfoDict], + got: list[dict], + schema_version="v1", +): + assert len(expected) == len(got) + + expected_key_version_tuples = [ + (s["service_description"]["key"], s["service_description"]["version"]) + for s in expected + ] + + for data in got: + service = ServiceDataGet.parse_obj(data) + assert ( + expected_key_version_tuples.count((f"{service.key}", f"{service.version}")) + == 1 + ) + + +async def test_list_services_with_empty_registry( + docker_registry: str, + configure_registry_access: EnvVarsDict, + client: httpx.AsyncClient, + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + + # empty case + resp = await client.get(f"/{api_version_prefix}/services") + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" + + services, error = _assert_response_and_unwrap_envelope(resp) + assert not error + assert isinstance(services, list) + + _assert_services(expected=[], got=services) + + +async def test_list_services( + docker_registry: str, + configure_registry_access: EnvVarsDict, + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + + resp = await client.get(f"/{api_version_prefix}/services") + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" + + services, error = _assert_response_and_unwrap_envelope(resp) + assert not error + assert isinstance(services, list) + + _assert_services(expected=created_services, got=services) + + +async def test_get_service_bad_request( + docker_registry: str, + configure_registry_access: EnvVarsDict, + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + assert len(created_services) > 0 + + resp = await client.get(f"/{api_version_prefix}/services?service_type=blahblah") + assert resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, f"Got f{resp.text}" + + # NOTE: only successful errors are enveloped + + +async def test_list_services_by_service_type( + docker_registry: str, + configure_registry_access: EnvVarsDict, + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + assert len(created_services) == 5 + + resp = await client.get( + f"/{api_version_prefix}/services?service_type=computational" + ) + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" + + services, error = _assert_response_and_unwrap_envelope(resp) + assert not error + assert services + assert len(services) == 3 + + resp = await client.get(f"/{api_version_prefix}/services?service_type=dynamic") + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" + + services, error = _assert_response_and_unwrap_envelope(resp) + assert not error + assert services + assert len(services) == 2 + + +async def test_get_services_by_key_and_version_with_empty_registry( + configure_registry_access: EnvVarsDict, + client: httpx.AsyncClient, + api_version_prefix: str, +): + resp = await client.get(f"/{api_version_prefix}/services/whatever/someversion") + assert resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, f"Got f{resp.text}" + + resp = await client.get( + f"/{api_version_prefix}/simcore/services/dynamic/something/someversion" + ) + assert resp.status_code == status.HTTP_404_NOT_FOUND, f"Got f{resp.text}" + + resp = await client.get( + f"/{api_version_prefix}/simcore/services/dynamic/something/1.5.2" + ) + assert resp.status_code == status.HTTP_404_NOT_FOUND, f"Got f{resp.text}" + + +async def test_get_services_by_key_and_version( + configure_registry_access: EnvVarsDict, + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert len(created_services) == 5 + + retrieved_services: list[dict] = [] + for created_service in created_services: + service_description = created_service["service_description"] + # note that it is very important to remove the safe="/" from quote!!!! + key, version = ( + quote(service_description[key], safe="") for key in ("key", "version") + ) + url = f"/{api_version_prefix}/services/{key}/{version}" + resp = await client.get(url) + + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" + + services, error = _assert_response_and_unwrap_envelope(resp) + assert not error + assert isinstance(services, list) + assert len(services) == 1 + + retrieved_services.append(services[0]) + + _assert_services(expected=created_services, got=retrieved_services) + + +async def test_get_service_labels( + configure_registry_access: EnvVarsDict, + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert len(created_services) == 5 + + for service in created_services: + service_description = service["service_description"] + # note that it is very important to remove the safe="/" from quote!!!! + key, version = ( + quote(service_description[key], safe="") for key in ("key", "version") + ) + url = f"/{api_version_prefix}/services/{key}/{version}/labels" + resp = await client.get(url) + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" + + labels, error = _assert_response_and_unwrap_envelope(resp) + assert not error + + assert service["docker_labels"] == labels diff --git a/services/director/tests/unit/conftest.py b/services/director/tests/unit/conftest.py new file mode 100644 index 00000000000..75ba8e7fd5c --- /dev/null +++ b/services/director/tests/unit/conftest.py @@ -0,0 +1,177 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +from collections.abc import AsyncIterator, Awaitable, Callable +from pathlib import Path +from typing import Any + +import pytest +import simcore_service_director +from asgi_lifespan import LifespanManager +from fastapi import FastAPI +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_director.core.application import create_app +from simcore_service_director.core.settings import ApplicationSettings + +pytest_plugins = [ + "fixtures.fake_services", + "pytest_simcore.cli_runner", + "pytest_simcore.docker", + "pytest_simcore.docker_compose", + "pytest_simcore.docker_registry", + "pytest_simcore.docker_swarm", + "pytest_simcore.environment_configs", + "pytest_simcore.faker_projects_data", + "pytest_simcore.faker_users_data", + "pytest_simcore.repository_paths", + "pytest_simcore.simcore_service_library_fixtures", +] + + +def pytest_addoption(parser): + parser.addoption("--registry_url", action="store", default="default url") + parser.addoption("--registry_user", action="store", default="default user") + parser.addoption("--registry_pw", action="store", default="default pw") + + +@pytest.fixture(scope="session") +def project_slug_dir(osparc_simcore_root_dir: Path) -> Path: + # fixtures in pytest_simcore.environs + service_folder = osparc_simcore_root_dir / "services" / "director" + assert service_folder.exists() + assert any(service_folder.glob("src/simcore_service_director")) + return service_folder + + +@pytest.fixture(scope="session") +def installed_package_dir() -> Path: + dirpath = Path(simcore_service_director.__file__).resolve().parent + assert dirpath.exists() + return dirpath + + +@pytest.fixture(scope="session") +def common_schemas_specs_dir(osparc_simcore_root_dir: Path) -> Path: + specs_dir = osparc_simcore_root_dir / "api" / "specs" / "director" / "schemas" + assert specs_dir.exists() + return specs_dir + + +@pytest.fixture +def configure_swarm_stack_name( + app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +) -> EnvVarsDict: + return app_environment | setenvs_from_dict( + monkeypatch, + envs={ + "SWARM_STACK_NAME": "test_stack", + }, + ) + + +@pytest.fixture +def configure_registry_access( + app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch, docker_registry: str +) -> EnvVarsDict: + return app_environment | setenvs_from_dict( + monkeypatch, + envs={ + "REGISTRY_URL": docker_registry, + "REGISTRY_PATH": docker_registry, + "REGISTRY_SSL": False, + "DIRECTOR_REGISTRY_CACHING": False, + }, + ) + + +@pytest.fixture(scope="session") +def configure_custom_registry( + app_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, + pytestconfig: pytest.Config, +) -> EnvVarsDict: + # to set these values call + # pytest --registry_url myregistry --registry_user username --registry_pw password + registry_url = pytestconfig.getoption("registry_url") + assert registry_url + assert isinstance(registry_url, str) + registry_user = pytestconfig.getoption("registry_user") + assert registry_user + assert isinstance(registry_user, str) + registry_pw = pytestconfig.getoption("registry_pw") + assert registry_pw + assert isinstance(registry_pw, str) + return app_environment | setenvs_from_dict( + monkeypatch, + envs={ + "REGISTRY_URL": registry_url, + "REGISTRY_AUTH": True, + "REGISTRY_USER": registry_user, + "REGISTRY_PW": registry_pw, + "REGISTRY_SSL": False, + "DIRECTOR_REGISTRY_CACHING": False, + }, + ) + + +@pytest.fixture +def api_version_prefix() -> str: + return "v0" + + +@pytest.fixture +def app_environment( + monkeypatch: pytest.MonkeyPatch, + docker_compose_service_environment_dict: EnvVarsDict, +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + **docker_compose_service_environment_dict, + "DIRECTOR_TRACING": "null", + }, + ) + + +MAX_TIME_FOR_APP_TO_STARTUP = 10 +MAX_TIME_FOR_APP_TO_SHUTDOWN = 10 + + +@pytest.fixture +def app_settings(app_environment: EnvVarsDict) -> ApplicationSettings: + return ApplicationSettings.create_from_envs() + + +@pytest.fixture +async def app( + app_settings: ApplicationSettings, is_pdb_enabled: bool +) -> AsyncIterator[FastAPI]: + the_test_app = create_app(settings=app_settings) + async with LifespanManager( + the_test_app, + startup_timeout=None if is_pdb_enabled else MAX_TIME_FOR_APP_TO_STARTUP, + shutdown_timeout=None if is_pdb_enabled else MAX_TIME_FOR_APP_TO_SHUTDOWN, + ): + yield the_test_app + + +@pytest.fixture +async def with_docker_network( + docker_network: Callable[..., Awaitable[dict[str, Any]]], +) -> dict[str, Any]: + return await docker_network() + + +@pytest.fixture +def configured_docker_network( + with_docker_network: dict[str, Any], + app_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, +) -> EnvVarsDict: + return app_environment | setenvs_from_dict( + monkeypatch, + {"DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME": with_docker_network["Name"]}, + ) diff --git a/services/director/tests/fixtures/dummy_service_description-v1.json b/services/director/tests/unit/fixtures/dummy_service_description-v1.json similarity index 96% rename from services/director/tests/fixtures/dummy_service_description-v1.json rename to services/director/tests/unit/fixtures/dummy_service_description-v1.json index e7e0f4907ca..f68f21a15d6 100644 --- a/services/director/tests/fixtures/dummy_service_description-v1.json +++ b/services/director/tests/unit/fixtures/dummy_service_description-v1.json @@ -55,4 +55,4 @@ "type": "data:application/json" } } -} \ No newline at end of file +} diff --git a/services/director/tests/unit/fixtures/fake_services.py b/services/director/tests/unit/fixtures/fake_services.py new file mode 100644 index 00000000000..1edb799ee9c --- /dev/null +++ b/services/director/tests/unit/fixtures/fake_services.py @@ -0,0 +1,321 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + + +import asyncio +import json +import logging +import random +import sys +from collections.abc import Awaitable, Iterator +from io import BytesIO +from pathlib import Path +from typing import Any, Literal, Protocol, TypedDict + +import pytest +import requests +from aiodocker import utils +from aiodocker.docker import Docker +from aiodocker.exceptions import DockerError +from simcore_service_director.core.settings import ApplicationSettings + +_logger = logging.getLogger(__name__) + + +CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + + +class NodeRequirementsDict(TypedDict): + CPU: float + RAM: float + + +class ServiceExtrasDict(TypedDict): + node_requirements: NodeRequirementsDict + build_date: str + vcs_ref: str + vcs_url: str + + +class ServiceDescriptionDict(TypedDict): + key: str + version: str + type: Literal["computational", "dynamic"] + + +class ServiceInRegistryInfoDict(TypedDict): + service_description: ServiceDescriptionDict + docker_labels: dict[str, Any] + image_path: str + internal_port: int | None + entry_point: str + service_extras: ServiceExtrasDict + + +def _create_service_description( + service_type: Literal["computational", "dynamic"], name: str, tag: str +) -> ServiceDescriptionDict: + service_desc = json.loads( + (CURRENT_DIR / "dummy_service_description-v1.json").read_text() + ) + + if service_type == "computational": + service_key_type = "comp" + elif service_type == "dynamic": + service_key_type = "dynamic" + else: + msg = f"Invalid {service_type=}" + raise ValueError(msg) + + service_desc["key"] = f"simcore/services/{service_key_type}/{name}" + service_desc["version"] = tag + service_desc["type"] = service_type + + return service_desc + + +def _create_docker_labels( + service_description: ServiceDescriptionDict, *, bad_json_format: bool +) -> dict[str, str]: + docker_labels = {} + for key, value in service_description.items(): + docker_labels[".".join(["io", "simcore", key])] = json.dumps({key: value}) + if bad_json_format: + docker_labels[".".join(["io", "simcore", key])] = ( + "d32;'" + docker_labels[".".join(["io", "simcore", key])] + ) + + return docker_labels + + +async def _create_base_image(labels, tag) -> dict[str, Any]: + dockerfile = """ +FROM alpine +CMD while true; do sleep 10; done + """ + f = BytesIO(dockerfile.encode("utf-8")) + tar_obj = utils.mktar_from_dockerfile(f) + + # build docker base image + docker = Docker() + base_docker_image = await docker.images.build( + fileobj=tar_obj, encoding="gzip", rm=True, labels=labels, tag=tag + ) + await docker.close() + return base_docker_image + + +async def _build_and_push_image( + registry_url: str, + service_type: Literal["computational", "dynamic"], + name: str, + tag: str, + dependent_image=None, + *, + bad_json_format: bool = False, + app_settings: ApplicationSettings, +) -> ServiceInRegistryInfoDict: + + # crate image + service_description = _create_service_description(service_type, name, tag) + docker_labels = _create_docker_labels( + service_description, bad_json_format=bad_json_format + ) + additional_docker_labels = [ + { + "name": "constraints", + "type": "string", + "value": ["node.role==manager"], + } + ] + + internal_port = None + entry_point = "" + if service_type == "dynamic": + internal_port = random.randint(1, 65535) # noqa: S311 + additional_docker_labels.append( + { + "name": "ports", + "type": "int", + "value": internal_port, + } + ) + entry_point = "/test/entry_point" + docker_labels["simcore.service.bootsettings"] = json.dumps( + [ + { + "name": "entry_point", + "type": "string", + "value": entry_point, + } + ] + ) + docker_labels["simcore.service.settings"] = json.dumps(additional_docker_labels) + if bad_json_format: + docker_labels["simcore.service.settings"] = ( + "'fjks" + docker_labels["simcore.service.settings"] + ) + + if dependent_image is not None: + dependent_description = dependent_image["service_description"] + dependency_docker_labels = [ + { + "key": dependent_description["key"], + "tag": dependent_description["version"], + } + ] + docker_labels["simcore.service.dependencies"] = json.dumps( + dependency_docker_labels + ) + if bad_json_format: + docker_labels["simcore.service.dependencies"] = ( + "'fjks" + docker_labels["simcore.service.dependencies"] + ) + + # create the typical org.label-schema labels + service_extras = ServiceExtrasDict( + node_requirements=NodeRequirementsDict( + CPU=app_settings.DIRECTOR_DEFAULT_MAX_NANO_CPUS / 1e9, + RAM=app_settings.DIRECTOR_DEFAULT_MAX_MEMORY, + ), + build_date="2020-08-19T15:36:27Z", + vcs_ref="ca180ef1", + vcs_url="git@github.com:ITISFoundation/osparc-simcore.git", + ) + docker_labels["org.label-schema.build-date"] = service_extras["build_date"] + docker_labels["org.label-schema.schema-version"] = "1.0" + docker_labels["org.label-schema.vcs-ref"] = service_extras["vcs_ref"] + docker_labels["org.label-schema.vcs-url"] = service_extras["vcs_url"] + + image_tag = registry_url + "/{key}:{version}".format( + key=service_description["key"], version=tag + ) + await _create_base_image(docker_labels, image_tag) + + # push image to registry + try: + docker = Docker() + await docker.images.push(image_tag) + finally: + await docker.close() + + # remove image from host + # docker.images.remove(image_tag) + + return ServiceInRegistryInfoDict( + service_description=service_description, + docker_labels=docker_labels, + image_path=image_tag, + internal_port=internal_port, + entry_point=entry_point, + service_extras=service_extras, + ) + + +def _clean_registry(registry_url: str, list_of_images: list[ServiceInRegistryInfoDict]): + request_headers = {"accept": "application/vnd.docker.distribution.manifest.v2+json"} + for image in list_of_images: + service_description = image["service_description"] + # get the image digest + tag = service_description["version"] + url = "http://{host}/v2/{name}/manifests/{tag}".format( + host=registry_url, name=service_description["key"], tag=tag + ) + response = requests.get(url, headers=request_headers, timeout=10) + docker_content_digest = response.headers["Docker-Content-Digest"] + # remove the image from the registry + url = "http://{host}/v2/{name}/manifests/{digest}".format( + host=registry_url, + name=service_description["key"], + digest=docker_content_digest, + ) + response = requests.delete(url, headers=request_headers, timeout=5) + + +class PushServicesCallable(Protocol): + async def __call__( + self, + *, + number_of_computational_services: int, + number_of_interactive_services: int, + inter_dependent_services: bool = False, + bad_json_format: bool = False, + version="1.0.", + ) -> list[ServiceInRegistryInfoDict]: + ... + + +@pytest.fixture +def push_services( + docker_registry: str, app_settings: ApplicationSettings +) -> Iterator[PushServicesCallable]: + registry_url = docker_registry + list_of_pushed_images_tags: list[ServiceInRegistryInfoDict] = [] + dependent_images = [] + + async def _build_push_images_to_docker_registry( + *, + number_of_computational_services, + number_of_interactive_services, + inter_dependent_services=False, + bad_json_format=False, + version="1.0.", + ) -> list[ServiceInRegistryInfoDict]: + try: + dependent_image = None + if inter_dependent_services: + dependent_image = await _build_and_push_image( + registry_url=registry_url, + service_type="computational", + name="dependency", + tag="10.52.999999", + dependent_image=None, + bad_json_format=bad_json_format, + app_settings=app_settings, + ) + dependent_images.append(dependent_image) + + images_to_build: list[Awaitable] = [ + _build_and_push_image( + registry_url=registry_url, + service_type="computational", + name="test", + tag=f"{version}{image_index}", + dependent_image=dependent_image, + bad_json_format=bad_json_format, + app_settings=app_settings, + ) + for image_index in range(number_of_computational_services) + ] + + images_to_build.extend( + [ + _build_and_push_image( + registry_url=registry_url, + service_type="dynamic", + name="test", + tag=f"{version}{image_index}", + dependent_image=dependent_image, + bad_json_format=bad_json_format, + app_settings=app_settings, + ) + for image_index in range(number_of_interactive_services) + ] + ) + + results = await asyncio.gather(*images_to_build) + list_of_pushed_images_tags.extend(results) + + except DockerError: + _logger.exception("Docker API error while building and pushing images") + raise + + return list_of_pushed_images_tags + + yield _build_push_images_to_docker_registry + + _logger.info("clean registry") + _clean_registry(registry_url, list_of_pushed_images_tags) + _clean_registry(registry_url, dependent_images) diff --git a/services/director/tests/unit/test__model_examples.py b/services/director/tests/unit/test__model_examples.py new file mode 100644 index 00000000000..d9604d738d6 --- /dev/null +++ b/services/director/tests/unit/test__model_examples.py @@ -0,0 +1,28 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +import json +from typing import Any + +import pytest +import simcore_service_director.models +from pydantic import BaseModel, ValidationError +from pytest_simcore.pydantic_models import walk_model_examples_in_package + + +@pytest.mark.parametrize( + "model_cls, example_name, example_data", + walk_model_examples_in_package(simcore_service_director.models), +) +def test_director_service_model_examples( + model_cls: type[BaseModel], example_name: int, example_data: Any +): + try: + assert model_cls.parse_obj(example_data) is not None + except ValidationError as err: + pytest.fail( + f"\n{example_name}: {json.dumps(example_data, indent=1)}\nError: {err}" + ) diff --git a/services/director/tests/unit/test_cli.py b/services/director/tests/unit/test_cli.py new file mode 100644 index 00000000000..3b42989bcff --- /dev/null +++ b/services/director/tests/unit/test_cli.py @@ -0,0 +1,34 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments +import os + +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_director._meta import API_VERSION +from simcore_service_director.cli import main +from simcore_service_director.core.settings import ApplicationSettings +from typer.testing import CliRunner + + +def test_cli_help_and_version(cli_runner: CliRunner): + result = cli_runner.invoke(main, "--help") + assert result.exit_code == os.EX_OK, result.output + + result = cli_runner.invoke(main, "--version") + assert result.exit_code == os.EX_OK, result.output + assert result.stdout.strip() == API_VERSION + + +def test_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): + result = cli_runner.invoke(main, ["settings", "--show-secrets", "--as-json"]) + assert result.exit_code == os.EX_OK + + settings = ApplicationSettings.parse_raw(result.output) + assert settings.dict() == ApplicationSettings.create_from_envs().dict() + + +def test_run(cli_runner: CliRunner): + result = cli_runner.invoke(main, ["run"]) + assert result.exit_code == 0 + assert "disabled" in result.stdout diff --git a/services/director/tests/unit/test_core_settings.py b/services/director/tests/unit/test_core_settings.py new file mode 100644 index 00000000000..5ac622ba668 --- /dev/null +++ b/services/director/tests/unit/test_core_settings.py @@ -0,0 +1,153 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + + +import pytest +from pytest_simcore.helpers.monkeypatch_envs import ( + setenvs_from_dict, + setenvs_from_envfile, +) +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_director.core.settings import ApplicationSettings + + +def test_valid_web_application_settings(app_environment: EnvVarsDict): + """ + We validate actual envfiles (e.g. repo.config files) by passing them via the CLI + + $ ln -s /path/to/osparc-config/deployments/mydeploy.com/repo.config .secrets + $ pytest --external-envfile=.secrets --pdb tests/unit/test_core_settings.py + + """ + settings = ApplicationSettings() # type: ignore + assert settings + + assert settings == ApplicationSettings.create_from_envs() + + assert ( + str( + app_environment.get( + "DIRECTOR_DEFAULT_MAX_MEMORY", + ApplicationSettings.__fields__["DIRECTOR_DEFAULT_MAX_MEMORY"].default, + ) + ) + == f"{settings.DIRECTOR_DEFAULT_MAX_MEMORY}" + ) + + +def test_docker_container_env_sample(monkeypatch: pytest.MonkeyPatch): + monkeypatch.delenv("DIRECTOR_DEFAULT_MAX_MEMORY", raising=False) + + setenvs_from_envfile( + monkeypatch, + """ + DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS={} + DIRECTOR_REGISTRY_CACHING=True + DIRECTOR_REGISTRY_CACHING_TTL=900 + DIRECTOR_SELF_SIGNED_SSL_FILENAME= + DIRECTOR_SELF_SIGNED_SSL_SECRET_ID= + DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME= + DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS=node.labels.io.simcore.autoscaled-node!=true + EXTRA_HOSTS_SUFFIX=undefined + GPG_KEY=0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D + HOME=/root + HOSTNAME=osparc-master-01-2 + LANG=C.UTF-8 + LC_ALL=C.UTF-8 + LOGLEVEL=WARNING + MONITORING_ENABLED=True + PATH=/home/scu/.venv/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin + POSTGRES_DB=simcoredb + POSTGRES_ENDPOINT=master_postgres:5432 + POSTGRES_HOST=master_postgres + POSTGRES_PASSWORD=z43 + POSTGRES_PORT=5432 + POSTGRES_USER=scu + PUBLISHED_HOST_NAME=osparc-master.speag.com + PWD=/home/scu + PYTHONDONTWRITEBYTECODE=1 + PYTHONOPTIMIZE=TRUE + PYTHON_GET_PIP_SHA256=adsfasdf + PYTHON_GET_PIP_URL=https://github.com/pypa/get-pip/raw/eff16c878c7fd6b688b9b4c4267695cf1a0bf01b/get-pip.py + PYTHON_PIP_VERSION=20.1.1 + PYTHON_VERSION=3.6.10 + REGISTRY_AUTH=True + REGISTRY_PATH= + REGISTRY_PW=adsfasdf + REGISTRY_SSL=True + REGISTRY_URL=registry.osparc-master.speag.com + REGISTRY_USER=admin + REGISTRY_VERSION=v2 + S3_ACCESS_KEY=adsfasdf + S3_BUCKET_NAME=master-simcore + S3_ENDPOINT=https://ceph-prod-rgw.speag.com + S3_REGION=us-east-1 + S3_SECRET_KEY=asdf + SC_BOOT_MODE=production + SC_BUILD_TARGET=production + SC_USER_ID=8004 + SC_USER_NAME=scu + SHLVL=0 + SIMCORE_SERVICES_NETWORK_NAME=master-simcore_interactive_services_subnet + STORAGE_ENDPOINT=master_storage:8080 + SWARM_STACK_NAME=master-simcore + TERM=xterm + TRACING_OPENTELEMETRY_COLLECTOR_EXPORTER_ENDPOINT=http://jaeger:4318 + TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE=50 + TRAEFIK_SIMCORE_ZONE=master_internal_simcore_stack + VIRTUAL_ENV=/home/scu/.venv + LOG_FORMAT_LOCAL_DEV_ENABLED=1 + """, + ) + + settings = ApplicationSettings.create_from_envs() + + assert settings.DIRECTOR_DEFAULT_MAX_MEMORY == 0, "default!" + + +def test_docker_compose_environment_sample( + monkeypatch: pytest.MonkeyPatch, app_environment: EnvVarsDict +): + + setenvs_from_dict( + monkeypatch, + { + **app_environment, + "DEFAULT_MAX_MEMORY": "0", + "DEFAULT_MAX_NANO_CPUS": "0", + "DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS": '{"VRAM": "node.labels.gpu==true"}', + "DIRECTOR_REGISTRY_CACHING": "True", + "DIRECTOR_REGISTRY_CACHING_TTL": "900", + "DIRECTOR_SELF_SIGNED_SSL_FILENAME": "", + "DIRECTOR_SELF_SIGNED_SSL_SECRET_ID": "", + "DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME": "", + "DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS": "", + "DIRECTOR_TRACING": "{}", + "EXTRA_HOSTS_SUFFIX": "undefined", + "LOGLEVEL": "DEBUG", + "MONITORING_ENABLED": "True", + "POSTGRES_DB": "simcoredb", + "POSTGRES_ENDPOINT": "osparc-dev.foo.com:5432", + "POSTGRES_HOST": "osparc-dev.foo.com", + "POSTGRES_PASSWORD": "adsfasdf", + "POSTGRES_PORT": "5432", + "POSTGRES_USER": "postgres", + "PUBLISHED_HOST_NAME": "osparc-master-zmt.click", + "REGISTRY_AUTH": "True", + "REGISTRY_PATH": "", + "REGISTRY_PW": "asdf", + "REGISTRY_SSL": "True", + "REGISTRY_URL": "registry.osparc-master-zmt.click", + "REGISTRY_USER": "admin", + "SIMCORE_SERVICES_NETWORK_NAME": "master-simcore_interactive_services_subnet", + "STORAGE_ENDPOINT": "master_storage:8080", + "SWARM_STACK_NAME": "master-simcore", + "TRACING_OPENTELEMETRY_COLLECTOR_EXPORTER_ENDPOINT": "http://jaeger:4318", + "TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE": "50", + "TRAEFIK_SIMCORE_ZONE": "master_internal_simcore_stack", + }, + ) + + settings = ApplicationSettings.create_from_envs() diff --git a/services/director/tests/test_docker_utils.py b/services/director/tests/unit/test_docker_utils.py similarity index 50% rename from services/director/tests/test_docker_utils.py rename to services/director/tests/unit/test_docker_utils.py index f6cce146e4b..81ad8299f31 100644 --- a/services/director/tests/test_docker_utils.py +++ b/services/director/tests/unit/test_docker_utils.py @@ -5,8 +5,6 @@ # pylint: disable=not-async-context-manager from asyncio import sleep -import pytest -from aiodocker.exceptions import DockerError from simcore_service_director import docker_utils @@ -29,39 +27,14 @@ async def test_docker_client(): await container.delete(force=True) -@pytest.mark.parametrize( - "fct", - [ - (docker_utils.swarm_get_number_nodes), - (docker_utils.swarm_has_manager_nodes), - (docker_utils.swarm_has_worker_nodes), - ], -) -async def test_swarm_method_with_no_swarm(fct): - # if this fails on your development machine run - # `docker swarm leave --force` to leave the swarm - with pytest.raises(DockerError): - await fct() - - -async def test_swarm_get_number_nodes(docker_swarm): +async def test_swarm_get_number_nodes(docker_swarm: None): num_nodes = await docker_utils.swarm_get_number_nodes() assert num_nodes == 1 -async def test_swarm_has_manager_nodes(docker_swarm): - assert (await docker_utils.swarm_has_manager_nodes()) == True - - -async def test_swarm_has_worker_nodes(docker_swarm): - assert (await docker_utils.swarm_has_worker_nodes()) == False +async def test_swarm_has_manager_nodes(docker_swarm: None): + assert (await docker_utils.swarm_has_manager_nodes()) is True -async def test_push_services( - push_services, - configure_registry_access, - configure_schemas_location, -): - images = await push_services( - number_of_computational_services=3, number_of_interactive_services=3 - ) +async def test_swarm_has_worker_nodes(docker_swarm: None): + assert (await docker_utils.swarm_has_worker_nodes()) is False diff --git a/services/director/tests/test_producer.py b/services/director/tests/unit/test_producer.py similarity index 69% rename from services/director/tests/test_producer.py rename to services/director/tests/unit/test_producer.py index e8fcc4a6fdb..4b729c424bb 100644 --- a/services/director/tests/test_producer.py +++ b/services/director/tests/unit/test_producer.py @@ -6,40 +6,68 @@ import json import uuid +from collections.abc import AsyncIterator, Awaitable, Callable from dataclasses import dataclass -from typing import Callable +from typing import Any import docker import pytest -from simcore_service_director import config, exceptions, producer +from fastapi import FastAPI +from models_library.projects import ProjectID +from models_library.users import UserID +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from settings_library.docker_registry import RegistrySettings +from simcore_service_director import producer +from simcore_service_director.constants import ( + CPU_RESOURCE_LIMIT_KEY, + MEM_RESOURCE_LIMIT_KEY, +) +from simcore_service_director.core.errors import ( + DirectorRuntimeError, + ServiceNotAvailableError, + ServiceUUIDNotFoundError, +) +from simcore_service_director.core.settings import ApplicationSettings from tenacity import Retrying from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed @pytest.fixture -def ensure_service_runs_in_ci(monkeypatch): - monkeypatch.setattr(config, "DEFAULT_MAX_MEMORY", int(25 * pow(1024, 2))) - monkeypatch.setattr(config, "DEFAULT_MAX_NANO_CPUS", int(0.01 * pow(10, 9))) +def ensure_service_runs_in_ci( + app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +) -> EnvVarsDict: + return app_environment | setenvs_from_dict( + monkeypatch, + envs={ + "DIRECTOR_DEFAULT_MAX_MEMORY": f"{int(25 * pow(1024, 2))}", + "DIRECTOR_DEFAULT_MAX_NANO_CPUS": f"{int(0.01 * pow(10, 9))}", + }, + ) @pytest.fixture async def run_services( - ensure_service_runs_in_ci, - aiohttp_mock_app, - configure_registry_access, - configure_schemas_location, + ensure_service_runs_in_ci: EnvVarsDict, + configure_registry_access: EnvVarsDict, + app: FastAPI, + app_settings: ApplicationSettings, push_services, - docker_swarm, - user_id, - project_id, + docker_swarm: None, + user_id: UserID, + project_id: ProjectID, docker_client: docker.client.DockerClient, -) -> Callable: +) -> AsyncIterator[Callable[[int, int], Awaitable[list[dict[str, Any]]]]]: started_services = [] - async def push_start_services(number_comp: int, number_dyn: int, dependant=False): + async def push_start_services( + number_comp: int, number_dyn: int, dependant=False + ) -> list[dict[str, Any]]: pushed_services = await push_services( - number_comp, number_dyn, inter_dependent_services=dependant + number_of_computational_services=number_comp, + number_of_interactive_services=number_dyn, + inter_dependent_services=dependant, ) assert len(pushed_services) == (number_comp + number_dyn) for pushed_service in pushed_services: @@ -50,13 +78,13 @@ async def push_start_services(number_comp: int, number_dyn: int, dependant=False service_entry_point = pushed_service["entry_point"] service_uuid = str(uuid.uuid1()) service_basepath = "/my/base/path" - with pytest.raises(exceptions.ServiceUUIDNotFoundError): - await producer.get_service_details(aiohttp_mock_app, service_uuid) + with pytest.raises(ServiceUUIDNotFoundError): + await producer.get_service_details(app, service_uuid) # start the service started_service = await producer.start_service( - aiohttp_mock_app, - user_id, - project_id, + app, + f"{user_id}", + f"{project_id}", service_key, service_version, service_uuid, @@ -84,9 +112,7 @@ async def push_start_services(number_comp: int, number_dyn: int, dependant=False assert "service_message" in started_service # wait for service to be running - node_details = await producer.get_service_details( - aiohttp_mock_app, service_uuid - ) + node_details = await producer.get_service_details(app, service_uuid) max_time = 60 for attempt in Retrying( wait=wait_fixed(1), stop=stop_after_delay(max_time), reraise=True @@ -95,11 +121,9 @@ async def push_start_services(number_comp: int, number_dyn: int, dependant=False print( f"--> waiting for {started_service['service_key']}:{started_service['service_version']} to run..." ) - node_details = await producer.get_service_details( - aiohttp_mock_app, service_uuid - ) + node_details = await producer.get_service_details(app, service_uuid) print( - f"<-- {started_service['service_key']}:{started_service['service_version']} state is {node_details['service_state']} using {config.DEFAULT_MAX_MEMORY}Bytes, {config.DEFAULT_MAX_NANO_CPUS}nanocpus" + f"<-- {started_service['service_key']}:{started_service['service_version']} state is {node_details['service_state']} using {app_settings.DIRECTOR_DEFAULT_MAX_MEMORY}Bytes, {app_settings.DIRECTOR_DEFAULT_MAX_NANO_CPUS}nanocpus" ) for service in docker_client.services.list(): tasks = service.tasks() @@ -123,9 +147,9 @@ async def push_start_services(number_comp: int, number_dyn: int, dependant=False # NOTE: Fake services are not even web-services therefore we cannot # even emulate a legacy dy-service that does not implement a save-state feature # so here we must make save_state=False - await producer.stop_service(aiohttp_mock_app, service_uuid, save_state=False) - with pytest.raises(exceptions.ServiceUUIDNotFoundError): - await producer.get_service_details(aiohttp_mock_app, service_uuid) + await producer.stop_service(app, node_uuid=service_uuid, save_state=False) + with pytest.raises(ServiceUUIDNotFoundError): + await producer.get_service_details(app, service_uuid) async def test_find_service_tag(): @@ -142,32 +166,44 @@ async def test_find_service_tag(): "1.2.3", ] } - with pytest.raises(exceptions.ServiceNotAvailableError): - await producer._find_service_tag(list_of_images, "some_wrong_key", None) - with pytest.raises(exceptions.ServiceNotAvailableError): - await producer._find_service_tag( + with pytest.raises(ServiceNotAvailableError): + await producer._find_service_tag( # noqa: SLF001 + list_of_images, "some_wrong_key", None + ) + with pytest.raises(ServiceNotAvailableError): + await producer._find_service_tag( # noqa: SLF001 list_of_images, my_service_key, "some wrong key" ) # get the latest (e.g. 2.11.0) - latest_version = await producer._find_service_tag( + latest_version = await producer._find_service_tag( # noqa: SLF001 list_of_images, my_service_key, None ) assert latest_version == "2.11.0" - latest_version = await producer._find_service_tag( + latest_version = await producer._find_service_tag( # noqa: SLF001 list_of_images, my_service_key, "latest" ) assert latest_version == "2.11.0" # get a specific version - version = await producer._find_service_tag(list_of_images, my_service_key, "1.2.3") + await producer._find_service_tag( # noqa: SLF001 + list_of_images, my_service_key, "1.2.3" + ) -async def test_start_stop_service(docker_network, run_services): +async def test_start_stop_service( + configure_registry_access: EnvVarsDict, + configured_docker_network: EnvVarsDict, + run_services: Callable[..., Awaitable[list[dict[str, Any]]]], +): # standard test await run_services(number_comp=1, number_dyn=1) async def test_service_assigned_env_variables( - docker_network, run_services, user_id, project_id + configure_registry_access: EnvVarsDict, + configured_docker_network: EnvVarsDict, + run_services: Callable[..., Awaitable[list[dict[str, Any]]]], + user_id: UserID, + project_id: ProjectID, ): started_services = await run_services(number_comp=1, number_dyn=1) client = docker.from_env() @@ -192,21 +228,25 @@ async def test_service_assigned_env_variables( assert "STORAGE_ENDPOINT" in envs_dict assert "SIMCORE_USER_ID" in envs_dict - assert envs_dict["SIMCORE_USER_ID"] == user_id + assert envs_dict["SIMCORE_USER_ID"] == f"{user_id}" assert "SIMCORE_NODE_UUID" in envs_dict assert envs_dict["SIMCORE_NODE_UUID"] == service_uuid assert "SIMCORE_PROJECT_ID" in envs_dict - assert envs_dict["SIMCORE_PROJECT_ID"] == project_id + assert envs_dict["SIMCORE_PROJECT_ID"] == f"{project_id}" assert "SIMCORE_NODE_BASEPATH" in envs_dict assert envs_dict["SIMCORE_NODE_BASEPATH"] == service["service_basepath"] assert "SIMCORE_HOST_NAME" in envs_dict assert envs_dict["SIMCORE_HOST_NAME"] == docker_service.name - assert config.MEM_RESOURCE_LIMIT_KEY in envs_dict - assert config.CPU_RESOURCE_LIMIT_KEY in envs_dict + assert MEM_RESOURCE_LIMIT_KEY in envs_dict + assert CPU_RESOURCE_LIMIT_KEY in envs_dict -async def test_interactive_service_published_port(docker_network, run_services): +async def test_interactive_service_published_port( + configure_registry_access: EnvVarsDict, + configured_docker_network: EnvVarsDict, + run_services, +): running_dynamic_services = await run_services(number_comp=0, number_dyn=1) assert len(running_dynamic_services) == 1 @@ -231,33 +271,11 @@ async def test_interactive_service_published_port(docker_network, run_services): assert docker_service.attrs["Spec"]["EndpointSpec"]["Mode"] == "dnsrr" -@pytest.fixture -def docker_network( - docker_client: docker.client.DockerClient, docker_swarm: None -) -> docker.models.networks.Network: - network = docker_client.networks.create( - "test_network_default", driver="overlay", scope="swarm" - ) - print(f"--> docker network '{network.name}' created") - config.SIMCORE_SERVICES_NETWORK_NAME = network.name - yield network - - # cleanup - print(f"<-- removing docker network '{network.name}'...") - network.remove() - - for attempt in Retrying(stop=stop_after_delay(60), wait=wait_fixed(1)): - with attempt: - list_networks = docker_client.networks.list( - config.SIMCORE_SERVICES_NETWORK_NAME - ) - assert not list_networks - config.SIMCORE_SERVICES_NETWORK_NAME = None - print(f"<-- removed docker network '{network.name}'") - - async def test_interactive_service_in_correct_network( - docker_network: docker.models.networks.Network, run_services + configure_registry_access: EnvVarsDict, + with_docker_network: dict[str, Any], + configured_docker_network: EnvVarsDict, + run_services, ): running_dynamic_services = await run_services( number_comp=0, number_dyn=2, dependant=False @@ -273,11 +291,16 @@ async def test_interactive_service_in_correct_network( assert len(list_of_services) == 1 docker_service = list_of_services[0] assert ( - docker_service.attrs["Spec"]["Networks"][0]["Target"] == docker_network.id + docker_service.attrs["Spec"]["Networks"][0]["Target"] + == with_docker_network["Id"] ) -async def test_dependent_services_have_common_network(docker_network, run_services): +async def test_dependent_services_have_common_network( + configure_registry_access: EnvVarsDict, + configured_docker_network: EnvVarsDict, + run_services, +): running_dynamic_services = await run_services( number_comp=0, number_dyn=2, dependant=True ) @@ -305,6 +328,11 @@ class FakeDockerService: expected_tag: str +@pytest.fixture +def registry_settings(app_settings: ApplicationSettings) -> RegistrySettings: + return app_settings.DIRECTOR_REGISTRY + + @pytest.mark.parametrize( "fake_service", [ @@ -321,13 +349,15 @@ class FakeDockerService: ], ) async def test_get_service_key_version_from_docker_service( + configure_registry_access: EnvVarsDict, + registry_settings: RegistrySettings, fake_service: FakeDockerService, ): docker_service_partial_inspect = { "Spec": { "TaskTemplate": { "ContainerSpec": { - "Image": f"{config.REGISTRY_PATH}{fake_service.service_str}" + "Image": f"{registry_settings.resolved_registry_url}{fake_service.service_str}" } } } @@ -335,8 +365,8 @@ async def test_get_service_key_version_from_docker_service( ( service_key, service_tag, - ) = await producer._get_service_key_version_from_docker_service( - docker_service_partial_inspect + ) = await producer._get_service_key_version_from_docker_service( # noqa: SLF001 + docker_service_partial_inspect, registry_settings ) assert service_key == fake_service.expected_key assert service_tag == fake_service.expected_tag @@ -352,18 +382,20 @@ async def test_get_service_key_version_from_docker_service( ], ) async def test_get_service_key_version_from_docker_service_except_invalid_keys( + configure_registry_access: EnvVarsDict, + registry_settings: RegistrySettings, fake_service_str: str, ): docker_service_partial_inspect = { "Spec": { "TaskTemplate": { "ContainerSpec": { - "Image": f"{config.REGISTRY_PATH if fake_service_str.startswith('/') else ''}{fake_service_str}" + "Image": f"{registry_settings.resolved_registry_url if fake_service_str.startswith('/') else ''}{fake_service_str}" } } } } - with pytest.raises(exceptions.DirectorException): - await producer._get_service_key_version_from_docker_service( - docker_service_partial_inspect + with pytest.raises(DirectorRuntimeError): + await producer._get_service_key_version_from_docker_service( # noqa: SLF001 + docker_service_partial_inspect, registry_settings ) diff --git a/services/director/tests/test_registry_proxy.py b/services/director/tests/unit/test_registry_proxy.py similarity index 65% rename from services/director/tests/test_registry_proxy.py rename to services/director/tests/unit/test_registry_proxy.py index dec9ee43708..2e5738c2670 100644 --- a/services/director/tests/test_registry_proxy.py +++ b/services/director/tests/unit/test_registry_proxy.py @@ -5,98 +5,65 @@ import time import pytest - -from simcore_service_director import config, registry_proxy +from fastapi import FastAPI +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_director import registry_proxy +from simcore_service_director.core.settings import ApplicationSettings async def test_list_no_services_available( - aiohttp_mock_app, - docker_registry, - configure_registry_access, - configure_schemas_location, + configure_registry_access: EnvVarsDict, + app: FastAPI, ): computational_services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.COMPUTATIONAL - ) - assert not computational_services # it's empty - interactive_services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.DYNAMIC - ) - assert not interactive_services - all_services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.ALL - ) - assert not all_services - - -async def test_list_services_with_bad_json_formatting( - aiohttp_mock_app, - docker_registry, - configure_registry_access, - configure_schemas_location, - push_services, -): - # some services - created_services = await push_services( - number_of_computational_services=3, - number_of_interactive_services=2, - bad_json_format=True, - ) - assert len(created_services) == 5 - computational_services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.COMPUTATIONAL + app, registry_proxy.ServiceType.COMPUTATIONAL ) assert not computational_services # it's empty interactive_services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.DYNAMIC + app, registry_proxy.ServiceType.DYNAMIC ) assert not interactive_services all_services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.ALL + app, registry_proxy.ServiceType.ALL ) assert not all_services async def test_list_computational_services( - aiohttp_mock_app, - docker_registry, + configure_registry_access: EnvVarsDict, + app: FastAPI, push_services, - configure_registry_access, - configure_schemas_location, ): await push_services( number_of_computational_services=6, number_of_interactive_services=3 ) computational_services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.COMPUTATIONAL + app, registry_proxy.ServiceType.COMPUTATIONAL ) assert len(computational_services) == 6 async def test_list_interactive_services( - aiohttp_mock_app, - docker_registry, + configure_registry_access: EnvVarsDict, + app: FastAPI, push_services, - configure_registry_access, - configure_schemas_location, ): await push_services( number_of_computational_services=5, number_of_interactive_services=4 ) interactive_services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.DYNAMIC + app, registry_proxy.ServiceType.DYNAMIC ) assert len(interactive_services) == 4 async def test_list_of_image_tags( - aiohttp_mock_app, - docker_registry, + configure_registry_access: EnvVarsDict, + app: FastAPI, push_services, - configure_registry_access, - configure_schemas_location, ): images = await push_services( number_of_computational_services=5, number_of_interactive_services=3 @@ -110,16 +77,14 @@ async def test_list_of_image_tags( image_number[key] = image_number[key] + 1 for key, number in image_number.items(): - list_of_image_tags = await registry_proxy.list_image_tags(aiohttp_mock_app, key) + list_of_image_tags = await registry_proxy.list_image_tags(app, key) assert len(list_of_image_tags) == number async def test_list_interactive_service_dependencies( - aiohttp_mock_app, - docker_registry, + configure_registry_access: EnvVarsDict, + app: FastAPI, push_services, - configure_registry_access, - configure_schemas_location, ): images = await push_services( number_of_computational_services=2, @@ -133,10 +98,12 @@ async def test_list_interactive_service_dependencies( docker_dependencies = json.loads( docker_labels["simcore.service.dependencies"] ) - image_dependencies = await registry_proxy.list_interactive_service_dependencies( - aiohttp_mock_app, - service_description["key"], - service_description["version"], + image_dependencies = ( + await registry_proxy.list_interactive_service_dependencies( + app, + service_description["key"], + service_description["version"], + ) ) assert isinstance(image_dependencies, list) assert len(image_dependencies) == len(docker_dependencies) @@ -145,11 +112,9 @@ async def test_list_interactive_service_dependencies( async def test_get_image_labels( - aiohttp_mock_app, - docker_registry, + configure_registry_access: EnvVarsDict, + app: FastAPI, push_services, - configure_registry_access, - configure_schemas_location, ): images = await push_services( number_of_computational_services=1, number_of_interactive_services=1 @@ -158,7 +123,7 @@ async def test_get_image_labels( for image in images: service_description = image["service_description"] labels, image_manifest_digest = await registry_proxy.get_image_labels( - aiohttp_mock_app, service_description["key"], service_description["version"] + app, service_description["key"], service_description["version"] ) assert "io.simcore.key" in labels assert "io.simcore.version" in labels @@ -174,12 +139,13 @@ async def test_get_image_labels( assert "simcore.service.settings" in labels assert image_manifest_digest == await registry_proxy.get_image_digest( - aiohttp_mock_app, service_description["key"], service_description["version"] + app, service_description["key"], service_description["version"] ) assert image_manifest_digest is not None assert image_manifest_digest not in images_digests images_digests.add(image_manifest_digest) + def test_get_service_first_name(): repo = "simcore/services/dynamic/myservice/modeler/my-sub-modeler" assert registry_proxy.get_service_first_name(repo) == "myservice" @@ -219,10 +185,9 @@ def test_get_service_last_namess(): async def test_get_image_details( - aiohttp_mock_app, + configure_registry_access: EnvVarsDict, + app: FastAPI, push_services, - configure_registry_access, - configure_schemas_location, ): images = await push_services( number_of_computational_services=1, number_of_interactive_services=1 @@ -230,7 +195,7 @@ async def test_get_image_details( for image in images: service_description = image["service_description"] details = await registry_proxy.get_image_details( - aiohttp_mock_app, service_description["key"], service_description["version"] + app, service_description["key"], service_description["version"] ) assert details.pop("image_digest").startswith("sha") @@ -238,54 +203,57 @@ async def test_get_image_details( assert details == service_description +@pytest.fixture +def configure_registry_caching( + app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +) -> EnvVarsDict: + return app_environment | setenvs_from_dict( + monkeypatch, {"DIRECTOR_REGISTRY_CACHING": True} + ) + + async def test_registry_caching( - aiohttp_mock_app, + configure_registry_access: EnvVarsDict, + configure_registry_caching: EnvVarsDict, + app_settings: ApplicationSettings, + app: FastAPI, push_services, - configure_registry_access, - configure_schemas_location, ): images = await push_services( - number_of_computational_services=1, number_of_interactive_services=1 + number_of_computational_services=21, number_of_interactive_services=21 ) - config.DIRECTOR_REGISTRY_CACHING = True + assert app_settings.DIRECTOR_REGISTRY_CACHING is True + start_time = time.perf_counter() - services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.ALL - ) + services = await registry_proxy.list_services(app, registry_proxy.ServiceType.ALL) time_to_retrieve_without_cache = time.perf_counter() - start_time assert len(services) == len(images) start_time = time.perf_counter() - services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.ALL - ) + services = await registry_proxy.list_services(app, registry_proxy.ServiceType.ALL) time_to_retrieve_with_cache = time.perf_counter() - start_time assert len(services) == len(images) assert time_to_retrieve_with_cache < time_to_retrieve_without_cache + print("time to retrieve services without cache: ", time_to_retrieve_without_cache) + print("time to retrieve services with cache: ", time_to_retrieve_with_cache) @pytest.mark.skip(reason="test needs credentials to real registry") async def test_get_services_performance( - aiohttp_mock_app, loop, configure_custom_registry + configure_registry_access: EnvVarsDict, + app: FastAPI, ): start_time = time.perf_counter() - services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.ALL - ) + services = await registry_proxy.list_services(app, registry_proxy.ServiceType.ALL) stop_time = time.perf_counter() print( - "\nTime to run getting services: {}s, #services {}, time per call {}s/service".format( - stop_time - start_time, - len(services), - (stop_time - start_time) / len(services), - ) + f"\nTime to run getting services: {stop_time - start_time}s, #services {len(services)}, time per call {(stop_time - start_time) / len(services)}s/service" ) async def test_generate_service_extras( - aiohttp_mock_app, + configure_registry_access: EnvVarsDict, + app: FastAPI, push_services, - configure_registry_access, - configure_schemas_location, ): images = await push_services( number_of_computational_services=1, number_of_interactive_services=1 @@ -296,7 +264,7 @@ async def test_generate_service_extras( service_extras = image["service_extras"] extras = await registry_proxy.get_service_extras( - aiohttp_mock_app, service_description["key"], service_description["version"] + app, service_description["key"], service_description["version"] ) assert extras == service_extras diff --git a/services/docker-compose.local.yml b/services/docker-compose.local.yml index f0254834105..37bbb3e9b05 100644 --- a/services/docker-compose.local.yml +++ b/services/docker-compose.local.yml @@ -50,8 +50,9 @@ services: director: environment: <<: *common_environment + DIRECTOR_REMOTE_DEBUGGING_PORT : 3000 ports: - - "8080" + - "8000" - "3004:3000" director-v2: diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 2f039977889..e31261ca20c 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -249,43 +249,39 @@ services: init: true hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" environment: - DEFAULT_MAX_MEMORY: ${DIRECTOR_DEFAULT_MAX_MEMORY:-0} - DEFAULT_MAX_NANO_CPUS: ${DIRECTOR_DEFAULT_MAX_NANO_CPUS:-0} + DIRECTOR_DEFAULT_MAX_MEMORY: ${DIRECTOR_DEFAULT_MAX_MEMORY} + DIRECTOR_DEFAULT_MAX_NANO_CPUS: ${DIRECTOR_DEFAULT_MAX_NANO_CPUS} + DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: ${DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS} + DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} + DIRECTOR_LOGLEVEL: ${DIRECTOR_LOGLEVEL} + DIRECTOR_MONITORING_ENABLED: ${DIRECTOR_MONITORING_ENABLED} + DIRECTOR_PUBLISHED_HOST_NAME: ${DIRECTOR_PUBLISHED_HOST_NAME} DIRECTOR_REGISTRY_CACHING_TTL: ${DIRECTOR_REGISTRY_CACHING_TTL} DIRECTOR_REGISTRY_CACHING: ${DIRECTOR_REGISTRY_CACHING} - DIRECTOR_SELF_SIGNED_SSL_FILENAME: ${DIRECTOR_SELF_SIGNED_SSL_FILENAME} - DIRECTOR_SELF_SIGNED_SSL_SECRET_ID: ${DIRECTOR_SELF_SIGNED_SSL_SECRET_ID} - DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME: ${DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME} DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS: ${DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS} - DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: ${DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS} - EXTRA_HOSTS_SUFFIX: undefined - LOGLEVEL: ${LOG_LEVEL:-WARNING} - MONITORING_ENABLED: ${MONITORING_ENABLED:-True} - PUBLISHED_HOST_NAME: ${MACHINE_FQDN} + DIRECTOR_TRACING: ${DIRECTOR_TRACING} + POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_ENDPOINT: ${POSTGRES_ENDPOINT} POSTGRES_HOST: ${POSTGRES_HOST} POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} POSTGRES_PORT: ${POSTGRES_PORT} POSTGRES_USER: ${POSTGRES_USER} + REGISTRY_AUTH: ${REGISTRY_AUTH} REGISTRY_PATH: ${REGISTRY_PATH} REGISTRY_PW: ${REGISTRY_PW} REGISTRY_SSL: ${REGISTRY_SSL} REGISTRY_URL: ${REGISTRY_URL} REGISTRY_USER: ${REGISTRY_USER} - S3_ACCESS_KEY: ${S3_ACCESS_KEY} - S3_BUCKET_NAME: ${S3_BUCKET_NAME} - S3_ENDPOINT: ${S3_ENDPOINT} - S3_REGION: ${S3_REGION} - S3_SECRET_KEY: ${S3_SECRET_KEY} + SIMCORE_SERVICES_NETWORK_NAME: interactive_services_subnet STORAGE_ENDPOINT: ${STORAGE_ENDPOINT} - SWARM_STACK_NAME: ${SWARM_STACK_NAME:-simcore} - DIRECTOR_TRACING: ${DIRECTOR_TRACING} - TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE: ${TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE} - TRACING_OPENTELEMETRY_COLLECTOR_EXPORTER_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_EXPORTER_ENDPOINT} - TRAEFIK_SIMCORE_ZONE: ${TRAEFIK_SIMCORE_ZONE:-internal_simcore_stack} + SWARM_STACK_NAME: ${SWARM_STACK_NAME} + + TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} + TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} + + TRAEFIK_SIMCORE_ZONE: ${TRAEFIK_SIMCORE_ZONE} volumes: - "/var/run/docker.sock:/var/run/docker.sock" deploy: diff --git a/services/dynamic-scheduler/setup.py b/services/dynamic-scheduler/setup.py index 5ca9677bd20..6cde52f1491 100755 --- a/services/dynamic-scheduler/setup.py +++ b/services/dynamic-scheduler/setup.py @@ -45,7 +45,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": DESCRIPTION, "long_description": README, "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/dynamic-sidecar/setup.py b/services/dynamic-sidecar/setup.py index 83a6b2abc06..a980edda233 100644 --- a/services/dynamic-sidecar/setup.py +++ b/services/dynamic-sidecar/setup.py @@ -49,7 +49,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "": "src", }, "include_package_data": True, - "python_requires": "~=3.10", + "python_requires": "~=3.11", "PROD_REQUIREMENTS": PROD_REQUIREMENTS, "TEST_REQUIREMENTS": TEST_REQUIREMENTS, "setup_requires": ["setuptools_scm"], diff --git a/services/efs-guardian/setup.py b/services/efs-guardian/setup.py index ed3f29fc23b..1a0f0921ee3 100755 --- a/services/efs-guardian/setup.py +++ b/services/efs-guardian/setup.py @@ -46,7 +46,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": DESCRIPTION, "long_description": README, "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/invitations/setup.py b/services/invitations/setup.py index 4f053c504ac..9e9cf583c0c 100755 --- a/services/invitations/setup.py +++ b/services/invitations/setup.py @@ -44,7 +44,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": DESCRIPTION, "long_description": README, "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/osparc-gateway-server/setup.py b/services/osparc-gateway-server/setup.py index 531804628f8..c3a7becc072 100755 --- a/services/osparc-gateway-server/setup.py +++ b/services/osparc-gateway-server/setup.py @@ -37,7 +37,7 @@ def read_reqs(reqs_path: Path) -> set[str]: ], "long_description": (CURRENT_DIR / "README.md").read_text(), "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/osparc-gateway-server/tests/conftest.py b/services/osparc-gateway-server/tests/conftest.py index cb948ccb538..b7d545e4f0b 100644 --- a/services/osparc-gateway-server/tests/conftest.py +++ b/services/osparc-gateway-server/tests/conftest.py @@ -1,18 +1,16 @@ # pylint: disable=unused-argument # pylint: disable=redefined-outer-name -import asyncio +from collections.abc import AsyncIterator from pathlib import Path -from typing import Any, AsyncIterator, Awaitable, Callable import aiodocker import pytest -from faker import Faker -from tenacity.asyncio import AsyncRetrying -from tenacity.stop import stop_after_delay -from tenacity.wait import wait_fixed -pytest_plugins = ["pytest_simcore.repository_paths", "pytest_simcore.docker_swarm"] +pytest_plugins = [ + "pytest_simcore.repository_paths", + "pytest_simcore.docker_swarm", +] @pytest.fixture(scope="session") @@ -26,39 +24,3 @@ def package_dir(osparc_simcore_services_dir: Path): async def async_docker_client() -> AsyncIterator[aiodocker.Docker]: async with aiodocker.Docker() as docker_client: yield docker_client - - -@pytest.fixture -async def docker_network( - async_docker_client: aiodocker.Docker, faker: Faker -) -> AsyncIterator[Callable[..., Awaitable[dict[str, Any]]]]: - networks = [] - - async def _network_creator(**network_config_kwargs) -> dict[str, Any]: - network = await async_docker_client.networks.create( - config={"Name": faker.uuid4(), "Driver": "overlay"} | network_config_kwargs - ) - assert network - print(f"--> created network {network=}") - networks.append(network) - return await network.show() - - yield _network_creator - - # wait until all networks are really gone - async def _wait_for_network_deletion(network: aiodocker.docker.DockerNetwork): - network_name = (await network.show())["Name"] - await network.delete() - async for attempt in AsyncRetrying( - reraise=True, wait=wait_fixed(1), stop=stop_after_delay(60) - ): - with attempt: - print(f"<-- waiting for network '{network_name}' deletion...") - list_of_network_names = [ - n["Name"] for n in await async_docker_client.networks.list() - ] - assert network_name not in list_of_network_names - print(f"<-- network '{network_name}' deleted") - - print(f"<-- removing all networks {networks=}") - await asyncio.gather(*[_wait_for_network_deletion(network) for network in networks]) diff --git a/services/payments/setup.py b/services/payments/setup.py index 234334fa2ab..c1f3fa64313 100755 --- a/services/payments/setup.py +++ b/services/payments/setup.py @@ -45,7 +45,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": DESCRIPTION, "long_description": README, "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/resource-usage-tracker/setup.py b/services/resource-usage-tracker/setup.py index ce2abba82a1..26afa3e2183 100755 --- a/services/resource-usage-tracker/setup.py +++ b/services/resource-usage-tracker/setup.py @@ -45,7 +45,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": DESCRIPTION, "long_description": README, "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/storage/setup.py b/services/storage/setup.py index 792ff4bebcd..2a0ca0d9c41 100644 --- a/services/storage/setup.py +++ b/services/storage/setup.py @@ -38,7 +38,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), "description": "Service to manage data storage in simcore", "author": "Manuel Guidon (mguidon)", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": {"": "src"}, "include_package_data": True, diff --git a/services/web/server/setup.py b/services/web/server/setup.py index aba3c322d87..57ada0bc03b 100644 --- a/services/web/server/setup.py +++ b/services/web/server/setup.py @@ -60,7 +60,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "simcore-service=simcore_service_webserver.__main__:main", ] }, - "python_requires": "~=3.10", + "python_requires": "~=3.11", "install_requires": INSTALL_REQUIREMENTS, "tests_require": TEST_REQUIREMENTS, "setup_requires": ["pytest-runner"], diff --git a/tests/e2e-playwright/Makefile b/tests/e2e-playwright/Makefile index 88a15a845d1..fc4c0463de2 100644 --- a/tests/e2e-playwright/Makefile +++ b/tests/e2e-playwright/Makefile @@ -18,8 +18,8 @@ define _up_simcore # set some parameters to allow for e2e to run echo LOGIN_REGISTRATION_INVITATION_REQUIRED=0 >> $(SIMCORE_DOT_ENV) echo LOGIN_REGISTRATION_CONFIRMATION_REQUIRED=0 >> $(SIMCORE_DOT_ENV) -echo DEFAULT_MAX_NANO_CPUS=1000000000 >> $(SIMCORE_DOT_ENV) -echo DEFAULT_MAX_MEMORY=134217728 >> $(SIMCORE_DOT_ENV) +echo DIRECTOR_DEFAULT_MAX_NANO_CPUS=1000000000 >> $(SIMCORE_DOT_ENV) +echo DIRECTOR_DEFAULT_MAX_MEMORY=134217728 >> $(SIMCORE_DOT_ENV) echo SIDECAR_FORCE_CPU_NODE=1 >> $(SIMCORE_DOT_ENV) $(MAKE_C) $(REPO_BASE_DIR) up-prod ops_ci=1 endef diff --git a/tests/public-api/conftest.py b/tests/public-api/conftest.py index 935d63a18a8..3b4a0b27b9c 100644 --- a/tests/public-api/conftest.py +++ b/tests/public-api/conftest.py @@ -46,12 +46,14 @@ @pytest.fixture(scope="session") -def testing_environ_vars(testing_environ_vars: EnvVarsDict) -> EnvVarsDict: - # OVERRIDES packages/pytest-simcore/src/pytest_simcore/docker_compose.py::testing_environ_vars fixture +def env_vars_for_docker_compose( + env_vars_for_docker_compose: EnvVarsDict, +) -> EnvVarsDict: + # OVERRIDES packages/pytest-simcore/src/pytest_simcore/docker_compose.py::env_vars_for_docker_compose fixture # help faster update of service_metadata table by catalog - testing_environ_vars["CATALOG_BACKGROUND_TASK_REST_TIME"] = "1" - return testing_environ_vars.copy() + env_vars_for_docker_compose["CATALOG_BACKGROUND_TASK_REST_TIME"] = "1" + return env_vars_for_docker_compose.copy() @pytest.fixture(scope="module") @@ -170,7 +172,7 @@ def registered_user( def services_registry( docker_registry_image_injector: Callable, registered_user: RegisteredUserDict, - testing_environ_vars: dict[str, str], + env_vars_for_docker_compose: dict[str, str], ) -> dict[ServiceNameStr, ServiceInfoDict]: # NOTE: service image MUST be injected in registry AFTER user is registered # @@ -249,7 +251,7 @@ def services_registry( } wait_for_catalog_to_detect = float( - testing_environ_vars["CATALOG_BACKGROUND_TASK_REST_TIME"] + env_vars_for_docker_compose["CATALOG_BACKGROUND_TASK_REST_TIME"] ) print( f"Catalog should take {wait_for_catalog_to_detect} secs to detect new services ...", diff --git a/tests/swarm-deploy/conftest.py b/tests/swarm-deploy/conftest.py index c295e328fc8..b6f221c7c80 100644 --- a/tests/swarm-deploy/conftest.py +++ b/tests/swarm-deploy/conftest.py @@ -59,9 +59,9 @@ def core_services_selection(simcore_docker_compose: dict) -> list[ServiceNameStr @pytest.fixture(scope="module") -def core_stack_namespace(testing_environ_vars: EnvVarsDict) -> str: +def core_stack_namespace(env_vars_for_docker_compose: EnvVarsDict) -> str: """returns 'com.docker.stack.namespace' service label core stack""" - stack_name = testing_environ_vars["SWARM_STACK_NAME"] + stack_name = env_vars_for_docker_compose["SWARM_STACK_NAME"] assert stack_name is not None return stack_name @@ -144,7 +144,7 @@ def ops_services_selection(ops_docker_compose: ComposeSpec) -> list[ServiceNameS @pytest.fixture(scope="module") -def ops_stack_namespace(testing_environ_vars: EnvVarsDict) -> str: +def ops_stack_namespace(env_vars_for_docker_compose: EnvVarsDict) -> str: """returns 'com.docker.stack.namespace' service label operations stack""" return "pytest-ops" diff --git a/tests/swarm-deploy/requirements/_test.in b/tests/swarm-deploy/requirements/_test.in index 9b1e1ece9fe..e325cd6a11f 100644 --- a/tests/swarm-deploy/requirements/_test.in +++ b/tests/swarm-deploy/requirements/_test.in @@ -11,6 +11,7 @@ aiodocker alembic click docker +faker jsonschema pytest pytest-asyncio diff --git a/tests/swarm-deploy/requirements/_test.txt b/tests/swarm-deploy/requirements/_test.txt index dad3c42339d..6fa1a4ebab6 100644 --- a/tests/swarm-deploy/requirements/_test.txt +++ b/tests/swarm-deploy/requirements/_test.txt @@ -117,6 +117,8 @@ docker==7.1.0 # -r requirements/_test.in email-validator==2.2.0 # via pydantic +faker==30.8.2 + # via -r requirements/_test.in fast-depends==2.4.12 # via faststream faststream==0.5.28 @@ -365,7 +367,9 @@ pytest-runner==6.0.1 pytest-sugar==1.0.0 # via -r requirements/_test.in python-dateutil==2.9.0.post0 - # via arrow + # via + # arrow + # faker python-dotenv==1.0.1 # via -r requirements/_test.in pyyaml==6.0.2 @@ -487,6 +491,7 @@ typing-extensions==4.12.2 # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # aiodebug # alembic + # faker # faststream # flexcache # flexparser