Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

pydantic2 making ported services green #6701

Merged
Show file tree
Hide file tree
Changes from 16 commits
Commits
Show all changes
51 commits
Select commit Hold shift + click to select a range
088fcdd
fixed settings
Nov 11, 2024
49c187b
pylint
Nov 11, 2024
8d6ca68
fixed catalog settings
Nov 11, 2024
ab5ed66
fixed settings dask-sdidecar
Nov 11, 2024
c4c56c5
pylint
Nov 11, 2024
742c889
migrated osparc-gateway-server to pydantic2
Nov 11, 2024
efa4bd9
fixed settings
Nov 11, 2024
77a4252
fixed import
Nov 11, 2024
209143b
fixed import
Nov 11, 2024
95c8e94
foixed exceptions
Nov 11, 2024
72cf61c
fixed tests
Nov 11, 2024
fb2e2e3
mypy
Nov 11, 2024
54176bf
fixed validation of SIMCORE_VCS_RELEASE_URL
Nov 11, 2024
45c163c
mypy
Nov 11, 2024
7e56899
pylint
Nov 11, 2024
ede8a16
fix broken test
Nov 11, 2024
6a18261
fixed broken tests
Nov 11, 2024
11154e1
fixed broken serializer
Nov 11, 2024
346590c
fixed failing tests
Nov 11, 2024
fc4aab7
fixed broken tests
Nov 11, 2024
07f6094
fixed thumbnail issues
Nov 12, 2024
2f0862c
added note
Nov 12, 2024
21baf8b
revert to discriminator to original field name type
Nov 12, 2024
072498c
reverted some changes
Nov 12, 2024
e7a9e9d
revert
Nov 12, 2024
cf8ddbe
fixed broken tests
Nov 12, 2024
4e0f21f
this should fix the issue with the failing tests
Nov 12, 2024
eaf6d73
fixed broken test
Nov 12, 2024
7b2b82e
revert default changes
Nov 12, 2024
b492d5d
fixed tests
Nov 12, 2024
d33653a
refactor new format
Nov 12, 2024
108cad4
fixed failing tests
Nov 12, 2024
80a59e5
bypass tests for now
Nov 12, 2024
3a25863
fixed issue with create_node tests
Nov 12, 2024
748b664
making catalog tests green
Nov 12, 2024
0d61811
fixed dask sidecar settings
Nov 12, 2024
ab28ae1
fixed test
Nov 12, 2024
0c5ac7e
fixed tests
Nov 12, 2024
7697eab
fixed tests
Nov 12, 2024
6675a5c
using global fix
Nov 12, 2024
49b9cd1
revert unused
Nov 12, 2024
042e867
revert changes
Nov 12, 2024
b934595
making settings fail
Nov 12, 2024
5c0b587
fixed xfail
Nov 12, 2024
d720750
fixed broken tests
Nov 12, 2024
2294bfe
pylint
Nov 12, 2024
f070a07
pylint
Nov 12, 2024
bded988
fixed broken tests
Nov 12, 2024
4e34ec7
pylint
Nov 13, 2024
02811cd
using typeadapter
Nov 13, 2024
b90220c
removed unused
Nov 13, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 8 additions & 10 deletions packages/common-library/src/common_library/serialization.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
from datetime import timedelta
from typing import Any

from pydantic import BaseModel, SecretStr
from pydantic import BaseModel, SecretStr, TypeAdapter
from pydantic_core import Url

from .pydantic_fields_extension import get_type


def model_dump_with_secrets(
settings_obj: BaseModel, *, show_secrets: bool, **pydantic_export_options
Expand All @@ -31,12 +29,12 @@ def model_dump_with_secrets(
data[field_name] = str(field_data)

elif isinstance(field_data, dict):
field_type = get_type(settings_obj.model_fields[field_name])
if issubclass(field_type, BaseModel):
data[field_name] = model_dump_with_secrets(
field_type.model_validate(field_data),
show_secrets=show_secrets,
**pydantic_export_options,
)
field_type = settings_obj.model_fields[field_name].annotation

data[field_name] = model_dump_with_secrets(
TypeAdapter(field_type).validate_python(field_data),
show_secrets=show_secrets,
**pydantic_export_options,
)

return data
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import datetime
from enum import auto

from pydantic import AnyUrl, BaseModel
from pydantic import AnyUrl, BaseModel, Field

from ..clusters import ClusterAuthentication
from ..users import UserID
Expand All @@ -17,7 +17,7 @@ class ClusterState(StrAutoEnum):

class OnDemandCluster(BaseModel):
endpoint: AnyUrl
authentication: ClusterAuthentication
authentication: ClusterAuthentication = Field(discriminator="discriminator_type")
state: ClusterState
user_id: UserID
wallet_id: WalletID | None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,9 @@ class ClusterDetailsGet(ClusterDetails):

class ClusterCreate(BaseCluster):
owner: GroupID | None = None # type: ignore[assignment]
authentication: ExternalClusterAuthentication
authentication: ExternalClusterAuthentication = Field(
discriminator="discriminator_type"
)
access_rights: dict[GroupID, ClusterAccessRights] = Field(
alias="accessRights", default_factory=dict
)
Expand Down Expand Up @@ -174,7 +176,7 @@ class ClusterPatch(BaseCluster):
owner: GroupID | None = None # type: ignore[assignment]
thumbnail: HttpUrl | None = None
endpoint: AnyUrl | None = None # type: ignore[assignment]
authentication: ExternalClusterAuthentication | None = None # type: ignore[assignment]
authentication: ExternalClusterAuthentication | None = Field(None, discriminator="discriminator_type") # type: ignore[assignment]
access_rights: dict[GroupID, ClusterAccessRights] | None = Field( # type: ignore[assignment]
default=None, alias="accessRights"
)
Expand Down Expand Up @@ -203,5 +205,7 @@ class ClusterPatch(BaseCluster):
class ClusterPing(BaseModel):
endpoint: AnyHttpUrl
authentication: ClusterAuthentication = Field(
..., description="Dask gateway authentication"
...,
description="Dask gateway authentication",
discriminator="discriminator_type",
)
39 changes: 23 additions & 16 deletions packages/models-library/src/models_library/clusters.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,21 +42,21 @@ class ClusterAccessRights(BaseModel):


class BaseAuthentication(BaseModel):
type: str
discriminator_type: str
GitHK marked this conversation as resolved.
Show resolved Hide resolved

model_config = ConfigDict(frozen=True, extra="forbid")


class SimpleAuthentication(BaseAuthentication):
type: Literal["simple"] = "simple"
discriminator_type: Literal["simple"] = "simple"
username: str
password: SecretStr

model_config = ConfigDict(
json_schema_extra={
"examples": [
{
"type": "simple",
"discriminator_type": "simple",
"username": "someuser",
"password": "somepassword",
},
Expand All @@ -66,40 +66,45 @@ class SimpleAuthentication(BaseAuthentication):


class KerberosAuthentication(BaseAuthentication):
type: Literal["kerberos"] = "kerberos"
discriminator_type: Literal["kerberos"] = "kerberos"

model_config = ConfigDict(
json_schema_extra={
"examples": [
{
"type": "kerberos",
"discriminator_type": "kerberos",
},
]
}
)


class JupyterHubTokenAuthentication(BaseAuthentication):
type: Literal["jupyterhub"] = "jupyterhub"
discriminator_type: Literal["jupyterhub"] = "jupyterhub"
api_token: str

model_config = ConfigDict(
json_schema_extra={
"examples": [
{"type": "jupyterhub", "api_token": "some_jupyterhub_token"},
{
"discriminator_type": "jupyterhub",
"api_token": "some_jupyterhub_token",
},
]
}
)


class NoAuthentication(BaseAuthentication):
type: Literal["none"] = "none"
discriminator_type: Literal["none"] = "none"

model_config = ConfigDict(json_schema_extra={"examples": [{"type": "none"}]})
model_config = ConfigDict(
json_schema_extra={"examples": [{"discriminator_type": "none"}]}
)


class TLSAuthentication(BaseAuthentication):
type: Literal["tls"] = "tls"
discriminator_type: Literal["tls"] = "tls"
tls_ca_file: Path
tls_client_cert: Path
tls_client_key: Path
Expand All @@ -108,7 +113,7 @@ class TLSAuthentication(BaseAuthentication):
json_schema_extra={
"examples": [
{
"type": "tls",
"discriminator_type": "tls",
"tls_ca_file": "/path/to/ca_file",
"tls_client_cert": "/path/to/cert_file",
"tls_client_key": "/path/to/key_file",
Expand Down Expand Up @@ -140,7 +145,9 @@ class BaseCluster(BaseModel):
)
endpoint: AnyUrl
authentication: ClusterAuthentication = Field(
..., description="Dask gateway authentication"
...,
description="Dask gateway authentication",
discriminator="discriminator_type",
)
access_rights: dict[GroupID, ClusterAccessRights] = Field(default_factory=dict)

Expand Down Expand Up @@ -169,7 +176,7 @@ class Cluster(BaseCluster):
"owner": 1456,
"endpoint": "tcp://default-dask-scheduler:8786",
"authentication": {
"type": "simple",
"discriminator_type": "simple",
"username": "someuser",
"password": "somepassword",
},
Expand All @@ -181,7 +188,7 @@ class Cluster(BaseCluster):
"owner": 12,
"endpoint": "https://registry.osparc-development.fake.dev",
"authentication": {
"type": "simple",
"discriminator_type": "simple",
"username": "someuser",
"password": "somepassword",
},
Expand All @@ -193,7 +200,7 @@ class Cluster(BaseCluster):
"type": ClusterTypeInModel.AWS,
"owner": 154,
"endpoint": "https://registry.osparc-development.fake.dev",
"authentication": {"type": "kerberos"},
"authentication": {"discriminator_type": "kerberos"},
"access_rights": {
154: CLUSTER_ADMIN_RIGHTS, # type: ignore[dict-item]
12: CLUSTER_MANAGER_RIGHTS, # type: ignore[dict-item]
Expand All @@ -208,7 +215,7 @@ class Cluster(BaseCluster):
"owner": 2321,
"endpoint": "https://registry.osparc-development.fake2.dev",
"authentication": {
"type": "jupyterhub",
"discriminator_type": "jupyterhub",
"api_token": "some_fake_token",
},
"access_rights": {
Expand Down
4 changes: 2 additions & 2 deletions packages/models-library/src/models_library/projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from datetime import datetime
from enum import Enum
from typing import Any, Final, TypeAlias
from typing import Annotated, Any, Final, TypeAlias
from uuid import UUID

from models_library.basic_types import ConstrainedStr
Expand Down Expand Up @@ -77,7 +77,7 @@ class BaseProjectModel(BaseModel):
last_change_date: datetime = Field(...)

# Pipeline of nodes (SEE projects_nodes.py)
workbench: NodesDict = Field(..., description="Project's pipeline")
workbench: Annotated[NodesDict, Field(..., description="Project's pipeline")]

# validators
_empty_thumbnail_is_none = field_validator("thumbnail", mode="before")(
Expand Down
3 changes: 2 additions & 1 deletion packages/models-library/src/models_library/projects_state.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
"""

from enum import Enum, unique
from typing import Annotated

from pydantic import (
BaseModel,
Expand Down Expand Up @@ -126,7 +127,7 @@ class ProjectRunningState(BaseModel):


class ProjectState(BaseModel):
locked: ProjectLocked = Field(..., description="The project lock state")
locked: Annotated[ProjectLocked, Field(..., description="The project lock state")]
state: ProjectRunningState = Field(..., description="The project running state")

model_config = ConfigDict(extra="forbid")
4 changes: 1 addition & 3 deletions packages/service-library/src/servicelib/aiohttp/tracing.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,7 @@
except ImportError:
HAS_BOTOCORE = False
try:
from opentelemetry.instrumentation.aiopg import ( # type: ignore[import-not-found]
AiopgInstrumentor,
)
from opentelemetry.instrumentation.aiopg import AiopgInstrumentor

HAS_AIOPG = True
except ImportError:
Expand Down
4 changes: 1 addition & 3 deletions packages/service-library/src/servicelib/fastapi/tracing.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,7 @@
HAS_ASYNCPG = False

try:
from opentelemetry.instrumentation.aiopg import ( # type: ignore[import-not-found]
AiopgInstrumentor,
)
from opentelemetry.instrumentation.aiopg import AiopgInstrumentor

HAS_AIOPG = True
except ImportError:
Expand Down
9 changes: 6 additions & 3 deletions services/catalog/src/simcore_service_catalog/core/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from models_library.basic_types import LogLevel
from models_library.services_resources import ResourcesDict, ResourceValue
from pydantic import AliasChoices, ByteSize, Field, PositiveInt, TypeAdapter
from pydantic_settings import SettingsConfigDict
from servicelib.logging_utils_filtering import LoggerName, MessageSubstring
from settings_library.application import BaseApplicationSettings
from settings_library.base import BaseCustomSettings
Expand Down Expand Up @@ -47,20 +48,20 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings):
LOG_LEVEL: LogLevel = Field(
LogLevel.INFO.value,
validation_alias=AliasChoices(
"CATALOG_LOG_LEVEL", "CATALOG_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"
"CATALOG_LOGLEVEL", "LOG_LEVEL", "CATALOG_LOG_LEVEL", "LOGLEVEL"
GitHK marked this conversation as resolved.
Show resolved Hide resolved
),
)
CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field(
default=False,
validation_alias=AliasChoices(
"CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED"
"LOG_FORMAT_LOCAL_DEV_ENABLED", "CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED"
),
description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!",
)
CATALOG_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field(
default_factory=dict,
validation_alias=AliasChoices(
"CATALOG_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING"
"LOG_FILTER_MAPPING", "CATALOG_LOG_FILTER_MAPPING"
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If the order matters. ... if both are defined, which one has precedence here? It should resolve in CATALOG_LOG_FILTER_MAPPING provided that both are valid!
Are there tests for these? if not, it would be good to have tests for those having aliases

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You can specify the order inside the validator. But we are not allowing any extras here. So it should be fine.

),
description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.",
)
Expand Down Expand Up @@ -101,3 +102,5 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings):
json_schema_extra={"auto_default_from_env": True},
description="settings for opentelemetry tracing",
)

model_config = SettingsConfigDict(extra="allow")
GitHK marked this conversation as resolved.
Show resolved Hide resolved
23 changes: 14 additions & 9 deletions services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from pathlib import Path
from typing import Any
from typing import Annotated, Any

from models_library.basic_types import LogLevel
from pydantic import AliasChoices, Field, field_validator
from pydantic_settings import SettingsConfigDict
from servicelib.logging_utils_filtering import LoggerName, MessageSubstring
from settings_library.base import BaseCustomSettings
from settings_library.utils_logging import MixinLoggingSettings
Expand All @@ -13,12 +14,15 @@ class Settings(BaseCustomSettings, MixinLoggingSettings):

SC_BUILD_TARGET: str | None = None
SC_BOOT_MODE: str | None = None
LOG_LEVEL: LogLevel = Field(
LogLevel.INFO.value,
validation_alias=AliasChoices(
"DASK_SIDECAR_LOGLEVEL", "SIDECAR_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"
LOG_LEVEL: Annotated[
LogLevel,
Field(
LogLevel.INFO.value,
validation_alias=AliasChoices(
"SIDECAR_LOGLEVEL", "LOG_LEVEL", "DASK_SIDECAR_LOGLEVEL", "LOGLEVEL"
),
),
)
]

# sidecar config ---

Expand All @@ -41,14 +45,13 @@ class Settings(BaseCustomSettings, MixinLoggingSettings):
DASK_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field(
default=False,
validation_alias=AliasChoices(
"DASK_LOG_FORMAT_LOCAL_DEV_ENABLED",
"LOG_FORMAT_LOCAL_DEV_ENABLED",
"LOG_FORMAT_LOCAL_DEV_ENABLED", "DASK_LOG_FORMAT_LOCAL_DEV_ENABLED"
),
description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!",
)
DASK_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field(
default_factory=dict,
validation_alias=AliasChoices("DASK_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING"),
validation_alias=AliasChoices("LOG_FILTER_MAPPING", "DASK_LOG_FILTER_MAPPING"),
description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.",
)

Expand All @@ -65,3 +68,5 @@ def as_worker(self) -> bool:
@classmethod
def _validate_loglevel(cls, value: Any) -> str:
return cls.validate_log_level(f"{value}")

model_config = SettingsConfigDict(extra="allow")
GitHK marked this conversation as resolved.
Show resolved Hide resolved
2 changes: 1 addition & 1 deletion services/dask-sidecar/tests/unit/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,4 +30,4 @@ def test_list_settings(cli_runner: CliRunner, app_environment: EnvVarsDict):

print(result.output)
settings = Settings.model_validate_json(result.output)
assert settings == Settings.create_from_envs()
assert settings.model_dump() == Settings.create_from_envs().model_dump()
4 changes: 2 additions & 2 deletions services/director-v2/tests/unit/test_modules_dask_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
from models_library.projects_nodes_io import NodeID
from models_library.resource_tracker import HardwareInfo
from models_library.users import UserID
from pydantic import AnyUrl, ByteSize, SecretStr
from pydantic import AnyUrl, ByteSize, SecretStr, TypeAdapter
from pydantic.tools import parse_obj_as
from pytest_mock.plugin import MockerFixture
from pytest_simcore.helpers.typing_env import EnvVarsDict
Expand Down Expand Up @@ -375,7 +375,7 @@ def _mocked_node_ports(mocker: MockerFixture) -> None:
)
mocker.patch(
"simcore_service_director_v2.modules.dask_client.dask_utils.compute_service_log_file_upload_link",
return_value=parse_obj_as(AnyUrl, "file://undefined"),
return_value=TypeAdapter(AnyUrl).validate_python("file://undefined"),
)


Expand Down
1 change: 1 addition & 0 deletions services/osparc-gateway-server/requirements/_base.in
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,5 @@
aiodocker
async-timeout
dask-gateway-server[local]
pydantic-settings
pydantic[email,dotenv]
Loading
Loading