Skip to content

Commit

Permalink
Merge branch 'master' into fix-invitation-url
Browse files Browse the repository at this point in the history
  • Loading branch information
giancarloromeo authored Nov 25, 2024
2 parents 25b3ef8 + a703f56 commit 2081202
Show file tree
Hide file tree
Showing 10 changed files with 146 additions and 70 deletions.
7 changes: 1 addition & 6 deletions packages/models-library/src/models_library/aiodocker_api.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from pydantic import ConfigDict, Field, field_validator
from pydantic import Field, field_validator

from .generated_models.docker_rest_api import (
ContainerSpec,
Expand All @@ -7,7 +7,6 @@
ServiceSpec,
TaskSpec,
)
from .utils.change_case import camel_to_snake


class AioDockerContainerSpec(ContainerSpec):
Expand Down Expand Up @@ -38,8 +37,6 @@ class AioDockerResources1(Resources1):
None, description="Define resources reservation.", alias="Reservations"
)

model_config = ConfigDict(populate_by_name=True)


class AioDockerTaskSpec(TaskSpec):
container_spec: AioDockerContainerSpec | None = Field(
Expand All @@ -51,5 +48,3 @@ class AioDockerTaskSpec(TaskSpec):

class AioDockerServiceSpec(ServiceSpec):
task_template: AioDockerTaskSpec | None = Field(default=None, alias="TaskTemplate")

model_config = ConfigDict(populate_by_name=True, alias_generator=camel_to_snake)
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ def validate_volume_limits(cls, v, info: ValidationInfo) -> str | None:
"outputs_path": "/tmp/outputs", # noqa: S108 nosec
"inputs_path": "/tmp/inputs", # noqa: S108 nosec
"state_paths": ["/tmp/save_1", "/tmp_save_2"], # noqa: S108 nosec
"state_exclude": ["/tmp/strip_me/*", "*.py"], # noqa: S108 nosec
"state_exclude": ["/tmp/strip_me/*"], # noqa: S108 nosec
},
{
"outputs_path": "/t_out",
Expand Down
6 changes: 4 additions & 2 deletions packages/models-library/tests/test_service_settings_labels.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,9 @@ def test_container_outgoing_permit_list_and_container_allow_internet_without_com
)
},
):
assert TypeAdapter(DynamicSidecarServiceLabels).validate_json(json.dumps(dict_data))
assert TypeAdapter(DynamicSidecarServiceLabels).validate_json(
json.dumps(dict_data)
)


def test_container_allow_internet_no_compose_spec_not_ok():
Expand Down Expand Up @@ -414,7 +416,7 @@ def service_labels() -> dict[str, str]:
"inputs_path": "/tmp/inputs", # noqa: S108
"outputs_path": "/tmp/outputs", # noqa: S108
"state_paths": ["/tmp/save_1", "/tmp_save_2"], # noqa: S108
"state_exclude": ["/tmp/strip_me/*", "*.py"], # noqa: S108
"state_exclude": ["/tmp/strip_me/*"], # noqa: S108
}
),
"simcore.service.compose-spec": json.dumps(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,15 +58,34 @@

_DYNAMIC_SIDECAR_SERVICE_EXTENDABLE_SPECS: Final[tuple[list[str], ...]] = (
["labels"],
["task_template", "Resources", "Limits"],
["task_template", "Resources", "Reservation", "MemoryBytes"],
["task_template", "Resources", "Reservation", "NanoCPUs"],
["task_template", "Placement", "Constraints"],
["task_template", "ContainerSpec", "Env"],
["task_template", "Resources", "Reservation", "GenericResources"],
["task_template", "container_spec", "env"],
["task_template", "placement", "constraints"],
["task_template", "resources", "reservation", "generic_resources"],
["task_template", "resources", "limits"],
["task_template", "resources", "reservation", "memory_bytes"],
["task_template", "resources", "reservation", "nano_cp_us"],
)


def _merge_service_base_and_user_specs(
dynamic_sidecar_service_spec_base: AioDockerServiceSpec,
user_specific_service_spec: AioDockerServiceSpec,
) -> AioDockerServiceSpec:
# NOTE: since user_specific_service_spec follows Docker Service Spec and not Aio
# we do not use aliases when exporting dynamic_sidecar_service_spec_base
return AioDockerServiceSpec.model_validate(
nested_update(
jsonable_encoder(
dynamic_sidecar_service_spec_base, exclude_unset=True, by_alias=False
),
jsonable_encoder(
user_specific_service_spec, exclude_unset=True, by_alias=False
),
include=_DYNAMIC_SIDECAR_SERVICE_EXTENDABLE_SPECS,
)
)


async def _create_proxy_service(
app,
*,
Expand Down Expand Up @@ -245,14 +264,8 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None:
user_specific_service_spec = AioDockerServiceSpec.model_validate(
user_specific_service_spec
)
# NOTE: since user_specific_service_spec follows Docker Service Spec and not Aio
# we do not use aliases when exporting dynamic_sidecar_service_spec_base
dynamic_sidecar_service_final_spec = AioDockerServiceSpec.model_validate(
nested_update(
jsonable_encoder(dynamic_sidecar_service_spec_base, exclude_unset=True),
jsonable_encoder(user_specific_service_spec, exclude_unset=True),
include=_DYNAMIC_SIDECAR_SERVICE_EXTENDABLE_SPECS,
)
dynamic_sidecar_service_final_spec = _merge_service_base_and_user_specs(
dynamic_sidecar_service_spec_base, user_specific_service_spec
)
rabbit_message = ProgressRabbitMessageNode.model_construct(
user_id=scheduler_data.user_id,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,10 @@
from simcore_service_director_v2.modules.dynamic_sidecar.docker_service_specs import (
get_dynamic_sidecar_spec,
)
from simcore_service_director_v2.modules.dynamic_sidecar.scheduler._core._event_create_sidecars import (
_DYNAMIC_SIDECAR_SERVICE_EXTENDABLE_SPECS,
_merge_service_base_and_user_specs,
)
from simcore_service_director_v2.utils.dict_utils import nested_update


Expand Down Expand Up @@ -180,7 +184,7 @@ def expected_dynamic_sidecar_spec(
"paths_mapping": {
"inputs_path": "/tmp/inputs", # noqa: S108
"outputs_path": "/tmp/outputs", # noqa: S108
"state_exclude": ["/tmp/strip_me/*", "*.py"], # noqa: S108
"state_exclude": ["/tmp/strip_me/*"], # noqa: S108
"state_paths": ["/tmp/save_1", "/tmp_save_2"], # noqa: S108
},
"callbacks_mapping": CallbacksMapping.model_config[
Expand Down Expand Up @@ -239,7 +243,7 @@ def expected_dynamic_sidecar_spec(
"DY_SIDECAR_PATH_OUTPUTS": "/tmp/outputs", # noqa: S108
"DY_SIDECAR_PROJECT_ID": "dd1d04d9-d704-4f7e-8f0f-1ca60cc771fe",
"DY_SIDECAR_STATE_EXCLUDE": json_dumps(
["*.py", "/tmp/strip_me/*"] # noqa: S108
["/tmp/strip_me/*"] # noqa: S108
),
"DY_SIDECAR_STATE_PATHS": json_dumps(
["/tmp/save_1", "/tmp_save_2"] # noqa: S108
Expand Down Expand Up @@ -614,14 +618,66 @@ async def test_merge_dynamic_sidecar_specs_with_user_specific_specs(
another_merged_dict = nested_update(
orig_dict,
user_dict,
include=(
["labels"],
["task_template", "Resources", "Limits"],
["task_template", "Resources", "Reservation", "MemoryBytes"],
["task_template", "Resources", "Reservation", "NanoCPUs"],
["task_template", "Placement", "Constraints"],
["task_template", "ContainerSpec", "Env"],
["task_template", "Resources", "Reservation", "GenericResources"],
),
include=_DYNAMIC_SIDECAR_SERVICE_EXTENDABLE_SPECS,
)
assert another_merged_dict


def test_regression__merge_service_base_and_user_specs():
mock_service_spec = AioDockerServiceSpec.model_validate(
{"Labels": {"l1": "false", "l0": "a"}}
)
mock_catalog_constraints = AioDockerServiceSpec.model_validate(
{
"Labels": {"l1": "true", "l2": "a"},
"TaskTemplate": {
"Placement": {
"Constraints": [
"c1==true",
"c2==true",
],
},
"Resources": {
"Limits": {"MemoryBytes": 1, "NanoCPUs": 1},
"Reservations": {
"GenericResources": [
{"DiscreteResourceSpec": {"Kind": "VRAM", "Value": 1}}
],
"MemoryBytes": 2,
"NanoCPUs": 2,
},
},
"ContainerSpec": {
"Env": [
"key-1=value-1",
"key2-value2=a",
]
},
},
}
)
result = _merge_service_base_and_user_specs(
mock_service_spec, mock_catalog_constraints
)
assert result.model_dump(by_alias=True, exclude_unset=True) == {
"Labels": {"l1": "true", "l2": "a", "l0": "a"},
"TaskTemplate": {
"Placement": {
"Constraints": [
"c1==true",
"c2==true",
],
},
"Resources": {
"Limits": {"MemoryBytes": 1, "NanoCPUs": 1},
"Reservations": {
"GenericResources": [
{"DiscreteResourceSpec": {"Kind": "VRAM", "Value": 1}}
],
"MemoryBytes": 2,
"NanoCPUs": 2,
},
},
"ContainerSpec": {"Env": {"key-1": "value-1", "key2-value2": "a"}},
},
}
25 changes: 12 additions & 13 deletions tests/performance/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ export LOCUST_VERSION
ENV_FILE=$(shell pwd)/.env
export ENV_FILE

KERNEL_NAME=$(shell uname -s)

NETWORK_NAME=dashboards_timenet

# UTILS
Expand Down Expand Up @@ -44,22 +46,17 @@ build: ## builds distributed osparc locust docker image
push:
docker push itisfoundation/locust:$(LOCUST_VERSION)



.PHONY: down
down: ## stops and removes osparc locust containers
docker compose --file docker-compose.yml down

.PHONY: test
test: ## runs osparc locust. Locust and test configuration are specified in ENV_FILE
.PHONY: test-up test-down
test-up: ## runs osparc locust. Locust and test configuration are specified in ENV_FILE
@if [ ! -f $${ENV_FILE} ]; then echo "You must generate a .env file before running tests!!! See the README..." && exit 1; fi;
@if ! docker network ls | grep -q $(NETWORK_NAME); then \
docker network create $(NETWORK_NAME); \
echo "Created docker network $(NETWORK_NAME)"; \
fi
docker compose --file docker-compose.yml up --scale worker=4 --exit-code-from=master


test-down: ## stops and removes osparc locust containers
@docker compose --file docker-compose.yml down

.PHONY: dashboards-up dashboards-down

Expand All @@ -69,12 +66,14 @@ dashboards-up: ## Create Grafana dashboard for inspecting locust results. See da
docker network rm $(NETWORK_NAME); \
echo "Removed docker network $(NETWORK_NAME)"; \
fi
@if [[ "$(KERNEL_NAME)" == "Linux" ]]; then \
( sleep 3 && xdg-open http://localhost:3000 ) & \
fi
@locust-compose up

dashboards-down:
@locust-compose down


dashboards-down: ## stops and removes Grafana dashboard and Timescale postgress containers
@locust-compose down

.PHONY: install-ci install-dev

Expand All @@ -86,6 +85,6 @@ install-ci:


.PHONY: config
config:
config: ## Create config for your locust tests
@$(call check_defined, input, please define inputs when calling $@ - e.g. ```make $@ input="--help"```)
@uv run locust_settings.py $(input) | tee "${ENV_FILE}"
13 changes: 5 additions & 8 deletions tests/performance/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,21 +15,18 @@ make config input="--LOCUST_HOST=https://api.osparc-master.speag.com
```
This will validate your settings and you should be good to go once you see a the settings printed in your terminal.

2. Add settings related to your locust file. E.g. if your file expects to find an environment variable `MYENVVAR` you add it to `.env`:
2. Once you have all settings setup you run your test script using the Make `test` recipe:
```bash
echo "MYENVVAR=thisismyenvvar" >> .env
make test-up
```

3. Once you have all settings setup you uun your test script using the Make `test` recipe:
```bash
make test
```
3. If you want to clean up after your tests (remove docker containers) you run `make test-down`

## Dashboards for visualization
- You can visualize the results of your tests (in real time) in a collection of beautiful [Grafana dashboards](https://github.com/SvenskaSpel/locust-plugins/tree/master/locust_plugins/dashboards).
- To do this, run `make dashboards-up` and go to `localhost:3000` to view the dashboards. The way you tell locust to send test results to the database/grafana is by ensuring `LOCUST_TIMESCALE=1` (see how to generate settings in [usage](#usage))
- To do this, run `make dashboards-up`. If you are on linux you should see your browser opening `localhost:3000`, where you can view the dashboards. If the browser doesn't open automatically, do it manually and navigate to `localhost:3000`.The way you tell locust to send test results to the database/grafana is by ensuring `LOCUST_TIMESCALE=1` (see how to generate settings in [usage](#usage))
- When you are done you run `make dashboards-down` to clean up.
- If you are using VPN you will need to forward port 300 to your local machine to view the dashboard.
- If you are using VPN you will need to forward port 3000 to your local machine to view the dashboard.


## Tricky settings 🚨
Expand Down
3 changes: 2 additions & 1 deletion tests/performance/locust_files/metamodeling/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from locust import HttpUser, task
from pydantic import Field
from pydantic_settings import BaseSettings
from pydantic_settings import BaseSettings, SettingsConfigDict
from requests.auth import HTTPBasicAuth
from tenacity import (
Retrying,
Expand All @@ -27,6 +27,7 @@


class UserSettings(BaseSettings):
model_config = SettingsConfigDict(extra="ignore")
OSPARC_API_KEY: str = Field(default=...)
OSPARC_API_SECRET: str = Field(default=...)

Expand Down
3 changes: 2 additions & 1 deletion tests/performance/locust_files/platform_ping_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from locust import task
from locust.contrib.fasthttp import FastHttpUser
from pydantic import Field
from pydantic_settings import BaseSettings
from pydantic_settings import BaseSettings, SettingsConfigDict

logging.basicConfig(level=logging.INFO)

Expand All @@ -20,6 +20,7 @@


class MonitoringBasicAuth(BaseSettings):
model_config = SettingsConfigDict(extra="ignore")
SC_USER_NAME: str = Field(default=..., examples=["<your username>"])
SC_PASSWORD: str = Field(default=..., examples=["<your password>"])

Expand Down
Loading

0 comments on commit 2081202

Please sign in to comment.