diff --git a/packages/aws-library/src/aws_library/s3/_client.py b/packages/aws-library/src/aws_library/s3/_client.py index 38e7c0f96364..5f89a2cee7c1 100644 --- a/packages/aws-library/src/aws_library/s3/_client.py +++ b/packages/aws-library/src/aws_library/s3/_client.py @@ -73,7 +73,7 @@ async def create( session = aioboto3.Session() session_client = session.client( "s3", - endpoint_url=settings.S3_ENDPOINT, + endpoint_url=f"{settings.S3_ENDPOINT}", aws_access_key_id=settings.S3_ACCESS_KEY, aws_secret_access_key=settings.S3_SECRET_KEY, region_name=settings.S3_REGION, diff --git a/packages/common-library/src/common_library/pydantic_validators.py b/packages/common-library/src/common_library/pydantic_validators.py index 471ba1a4bf38..a0122fccbe89 100644 --- a/packages/common-library/src/common_library/pydantic_validators.py +++ b/packages/common-library/src/common_library/pydantic_validators.py @@ -1,12 +1,11 @@ -import datetime +import datetime as dt import re import warnings -from datetime import timedelta from pydantic import TypeAdapter, field_validator -def _validate_legacy_timedelta_str(time_str: str | timedelta) -> str | timedelta: +def _validate_legacy_timedelta_str(time_str: str | dt.timedelta) -> str | dt.timedelta: if not isinstance(time_str, str): return time_str @@ -34,14 +33,14 @@ def validate_numeric_string_as_timedelta(field: str): """Transforms a float/int number into a valid datetime as it used to work in the past""" def _numeric_string_as_timedelta( - v: datetime.timedelta | str | float, - ) -> datetime.timedelta | str | float: + v: dt.timedelta | str | float, + ) -> dt.timedelta | str | float: if isinstance(v, str): try: converted_value = float(v) - iso8601_format = TypeAdapter(timedelta).dump_python( - timedelta(seconds=converted_value), mode="json" + iso8601_format = TypeAdapter(dt.timedelta).dump_python( + dt.timedelta(seconds=converted_value), mode="json" ) warnings.warn( f"{field}='{v}' -should be set to-> {field}='{iso8601_format}' (ISO8601 datetime format). " diff --git a/packages/common-library/src/common_library/serialization.py b/packages/common-library/src/common_library/serialization.py index 4394fa8cc459..70dd53e13c4b 100644 --- a/packages/common-library/src/common_library/serialization.py +++ b/packages/common-library/src/common_library/serialization.py @@ -21,10 +21,9 @@ def model_dump_with_secrets( data[field_name] = field_data.total_seconds() elif isinstance(field_data, SecretStr): - if show_secrets: - data[field_name] = field_data.get_secret_value() - else: - data[field_name] = str(field_data) + data[field_name] = ( + field_data.get_secret_value() if show_secrets else str(field_data) + ) elif isinstance(field_data, Url): data[field_name] = str(field_data) diff --git a/packages/common-library/tests/test_serialization.py b/packages/common-library/tests/test_serialization.py index d897ff5ec5d6..d5dea70ec22f 100644 --- a/packages/common-library/tests/test_serialization.py +++ b/packages/common-library/tests/test_serialization.py @@ -4,22 +4,31 @@ class Credentials(BaseModel): - USERNAME: str | None = None - PASSWORD: SecretStr | None = None + username: str + password: SecretStr + + +class Access(BaseModel): + credentials: Credentials @pytest.mark.parametrize( "expected,show_secrets", [ ( - {"USERNAME": "DeepThought", "PASSWORD": "42"}, + {"credentials": {"username": "DeepThought", "password": "42"}}, True, ), ( - {"USERNAME": "DeepThought", "PASSWORD": "**********"}, + {"credentials": {"username": "DeepThought", "password": "**********"}}, False, # hide secrets ), ], ) def test_model_dump_with_secrets(expected: dict, show_secrets: bool): - assert expected == model_dump_with_secrets(Credentials(USERNAME="DeepThought", PASSWORD=SecretStr("42")), show_secrets=show_secrets) + assert expected == model_dump_with_secrets( + Access( + credentials=Credentials(username="DeepThought", password=SecretStr("42")) + ), + show_secrets=show_secrets, + ) diff --git a/packages/models-library/src/models_library/aiodocker_api.py b/packages/models-library/src/models_library/aiodocker_api.py index 865570fbe36d..4f1c86b8f4d7 100644 --- a/packages/models-library/src/models_library/aiodocker_api.py +++ b/packages/models-library/src/models_library/aiodocker_api.py @@ -11,12 +11,13 @@ class AioDockerContainerSpec(ContainerSpec): - Env: dict[str, str | None] | None = Field( + env: dict[str, str | None] | None = Field( # type: ignore[assignment] default=None, - description="aiodocker expects here a dictionary and re-convert it back internally`.\n", + alias="Env", + description="aiodocker expects here a dictionary and re-convert it back internally", ) - @field_validator("Env", mode="before") + @field_validator("env", mode="before") @classmethod def convert_list_to_dict(cls, v): if v is not None and isinstance(v, list): @@ -33,7 +34,7 @@ def convert_list_to_dict(cls, v): class AioDockerResources1(Resources1): # NOTE: The Docker REST API documentation is wrong!!! # Do not set that back to singular Reservation. - Reservation: ResourceObject | None = Field( + reservation: ResourceObject | None = Field( None, description="Define resources reservation.", alias="Reservations" ) @@ -41,17 +42,14 @@ class AioDockerResources1(Resources1): class AioDockerTaskSpec(TaskSpec): - ContainerSpec: AioDockerContainerSpec | None = Field( - None, + container_spec: AioDockerContainerSpec | None = Field( + default=None, alias="ContainerSpec" ) - Resources: AioDockerResources1 | None = Field( - None, - description="Resource requirements which apply to each individual container created\nas part of the service.\n", - ) + resources: AioDockerResources1 | None = Field(default=None, alias="Resources") class AioDockerServiceSpec(ServiceSpec): - TaskTemplate: AioDockerTaskSpec | None = None + task_template: AioDockerTaskSpec | None = Field(default=None, alias="TaskTemplate") model_config = ConfigDict(populate_by_name=True, alias_generator=camel_to_snake) diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py index a16ba29289b3..0539ec5a3eb3 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py @@ -1,4 +1,4 @@ -from typing import TypeAlias +from typing import Any, TypeAlias from pydantic import ( AnyHttpUrl, @@ -48,13 +48,12 @@ class WorkerMetrics(BaseModel): class UsedResources(DictModel[str, NonNegativeFloat]): @model_validator(mode="before") @classmethod - def ensure_negative_value_is_zero(cls, values): + def ensure_negative_value_is_zero(cls, values: dict[str, Any]): # dasks adds/remove resource values and sometimes # they end up being negative instead of 0 - if v := values.get("__root__", {}): - for res_key, res_value in v.items(): - if res_value < 0: - v[res_key] = 0 + for res_key, res_value in values.items(): + if res_value < 0: + values[res_key] = 0 return values diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/comp_tasks.py b/packages/models-library/src/models_library/api_schemas_directorv2/comp_tasks.py index 2204186c3ed1..0663cc37f784 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/comp_tasks.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/comp_tasks.py @@ -1,7 +1,15 @@ from typing import Any, TypeAlias from models_library.basic_types import IDStr -from pydantic import AnyHttpUrl, AnyUrl, BaseModel, ConfigDict, Field, field_validator +from pydantic import ( + AnyHttpUrl, + AnyUrl, + BaseModel, + ConfigDict, + Field, + ValidationInfo, + field_validator, +) from ..clusters import ClusterID from ..projects import ProjectID @@ -63,16 +71,18 @@ class ComputationCreate(BaseModel): @field_validator("product_name") @classmethod - def ensure_product_name_defined_if_computation_starts(cls, v, values): - if "start_pipeline" in values and values["start_pipeline"] and v is None: + def _ensure_product_name_defined_if_computation_starts( + cls, v, info: ValidationInfo + ): + if info.data.get("start_pipeline") and v is None: msg = "product_name must be set if computation shall start!" raise ValueError(msg) return v @field_validator("use_on_demand_clusters") @classmethod - def ensure_expected_options(cls, v, values): - if v is True and ("cluster_id" in values and values["cluster_id"] is not None): + def _ensure_expected_options(cls, v, info: ValidationInfo): + if v and info.data.get("cluster_id") is not None: msg = "cluster_id cannot be set if use_on_demand_clusters is set" raise ValueError(msg) return v diff --git a/packages/models-library/src/models_library/clusters.py b/packages/models-library/src/models_library/clusters.py index 243144600e9b..8b38913c2466 100644 --- a/packages/models-library/src/models_library/clusters.py +++ b/packages/models-library/src/models_library/clusters.py @@ -2,6 +2,7 @@ from pathlib import Path from typing import Final, Literal, TypeAlias +from models_library.utils._original_fastapi_encoders import jsonable_encoder from pydantic import ( AnyUrl, BaseModel, @@ -224,6 +225,8 @@ class Cluster(BaseCluster): @model_validator(mode="before") @classmethod def check_owner_has_access_rights(cls, values): + values = jsonable_encoder(values) + is_default_cluster = bool(values["id"] == DEFAULT_CLUSTER_ID) owner_gid = values["owner"] @@ -231,11 +234,15 @@ def check_owner_has_access_rights(cls, values): access_rights = values.get("access_rights", values.get("accessRights", {})) if owner_gid not in access_rights: access_rights[owner_gid] = ( - CLUSTER_USER_RIGHTS if is_default_cluster else CLUSTER_ADMIN_RIGHTS + CLUSTER_USER_RIGHTS.model_dump() + if is_default_cluster + else CLUSTER_ADMIN_RIGHTS.model_dump() ) # check owner has the expected access if access_rights[owner_gid] != ( - CLUSTER_USER_RIGHTS if is_default_cluster else CLUSTER_ADMIN_RIGHTS + CLUSTER_USER_RIGHTS.model_dump() + if is_default_cluster + else CLUSTER_ADMIN_RIGHTS.model_dump() ): msg = f"the cluster owner access rights are incorrectly set: {access_rights[owner_gid]}" raise ValueError(msg) diff --git a/packages/models-library/src/models_library/docker.py b/packages/models-library/src/models_library/docker.py index b8134b3ec731..6e87f06b62e0 100644 --- a/packages/models-library/src/models_library/docker.py +++ b/packages/models-library/src/models_library/docker.py @@ -37,13 +37,7 @@ def from_key(cls, key: str) -> "DockerLabelKey": str, StringConstraints(pattern=DOCKER_GENERIC_TAG_KEY_RE) ] - -class DockerPlacementConstraint(ConstrainedStr): - strip_whitespace = True - regex = re.compile( - r"^(?!-)(?![.])(?!.*--)(?!.*[.][.])[a-zA-Z0-9.-]*(? typing.AsyncIterator[S3Client]: exit_stack = contextlib.AsyncExitStack() session_client = session.client( "s3", - endpoint_url=s3_settings.S3_ENDPOINT, + endpoint_url=f"{s3_settings.S3_ENDPOINT}", aws_access_key_id=s3_settings.S3_ACCESS_KEY, aws_secret_access_key=s3_settings.S3_SECRET_KEY, region_name=s3_settings.S3_REGION, config=Config(signature_version="s3v4"), ) assert isinstance(session_client, ClientCreatorContext) - client = typing.cast(S3Client, await exit_stack.enter_async_context(session_client)) # type: ignore[arg-type] + client = typing.cast(S3Client, await exit_stack.enter_async_context(session_client)) # type: ignore[arg-type] yield client diff --git a/packages/pytest-simcore/src/pytest_simcore/minio_service.py b/packages/pytest-simcore/src/pytest_simcore/minio_service.py index 38b9d2bdf8d6..f8adf8cda9fd 100644 --- a/packages/pytest-simcore/src/pytest_simcore/minio_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/minio_service.py @@ -4,6 +4,7 @@ import pytest from faker import Faker +from pydantic import AnyHttpUrl, TypeAdapter from pytest_simcore.helpers.docker import get_service_published_port from pytest_simcore.helpers.host import get_localhost_ip from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict @@ -20,7 +21,9 @@ def minio_s3_settings( return S3Settings( S3_ACCESS_KEY=testing_environ_vars["S3_ACCESS_KEY"], S3_SECRET_KEY=testing_environ_vars["S3_SECRET_KEY"], - S3_ENDPOINT=f"http://{get_localhost_ip()}:{get_service_published_port('minio')}", + S3_ENDPOINT=TypeAdapter(AnyHttpUrl).validate_python( + f"http://{get_localhost_ip()}:{get_service_published_port('minio')}" + ), S3_BUCKET_NAME=testing_environ_vars["S3_BUCKET_NAME"], S3_REGION="us-east-1", ) @@ -31,5 +34,7 @@ def minio_s3_settings_envs( minio_s3_settings: S3Settings, monkeypatch: pytest.MonkeyPatch, ) -> EnvVarsDict: - changed_envs: EnvVarsDict = minio_s3_settings.model_dump(exclude_unset=True) + changed_envs: EnvVarsDict = minio_s3_settings.model_dump( + mode="json", exclude_unset=True + ) return setenvs_from_dict(monkeypatch, changed_envs) diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py index 2c0015251736..35b734ac0553 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py @@ -118,7 +118,7 @@ async def _wait_task_completion() -> None: logger.debug("%s, %s", f"{task_id=}", f"{result=}") yield result - except asyncio.TimeoutError as e: + except TimeoutError as e: await client.cancel_and_delete_task(task_id) raise TaskClientTimeoutError( task_id=task_id, diff --git a/packages/settings-library/src/settings_library/basic_types.py b/packages/settings-library/src/settings_library/basic_types.py index f4c745b22842..9d1d440d5ae6 100644 --- a/packages/settings-library/src/settings_library/basic_types.py +++ b/packages/settings-library/src/settings_library/basic_types.py @@ -3,7 +3,7 @@ # This is a minor evil to avoid the maintenance burden that creates # an extra dependency to a larger models_library (intra-repo library) -from enum import Enum +from enum import StrEnum from typing import Annotated, TypeAlias from pydantic import Field, StringConstraints @@ -16,14 +16,14 @@ VersionTag: TypeAlias = Annotated[str, StringConstraints(pattern=r"^v\d$")] -class LogLevel(str, Enum): +class LogLevel(StrEnum): DEBUG = "DEBUG" INFO = "INFO" WARNING = "WARNING" ERROR = "ERROR" -class BootMode(str, Enum): +class BootMode(StrEnum): """ Values taken by SC_BOOT_MODE environment variable set in Dockerfile and used during docker/boot.sh @@ -36,7 +36,7 @@ class BootMode(str, Enum): DEVELOPMENT = "development" -class BuildTargetEnum(str, Enum): +class BuildTargetEnum(StrEnum): """ Values taken by SC_BUILD_TARGET environment variable set in Dockerfile that defines the stage targeted in the diff --git a/packages/settings-library/src/settings_library/postgres.py b/packages/settings-library/src/settings_library/postgres.py index 883d14c3bb4c..e65f02e6edcb 100644 --- a/packages/settings-library/src/settings_library/postgres.py +++ b/packages/settings-library/src/settings_library/postgres.py @@ -56,31 +56,27 @@ def _check_size(cls, v, info: ValidationInfo): @cached_property def dsn(self) -> str: - dsn: str = str( - PostgresDsn.build( # pylint: disable=no-member - scheme="postgresql", - username=self.POSTGRES_USER, - password=self.POSTGRES_PASSWORD.get_secret_value(), - host=self.POSTGRES_HOST, - port=self.POSTGRES_PORT, - path=f"{self.POSTGRES_DB}", - ) + url = PostgresDsn.build( # pylint: disable=no-member + scheme="postgresql", + username=self.POSTGRES_USER, + password=self.POSTGRES_PASSWORD.get_secret_value(), + host=self.POSTGRES_HOST, + port=self.POSTGRES_PORT, + path=f"{self.POSTGRES_DB}", ) - return dsn + return f"{url}" @cached_property def dsn_with_async_sqlalchemy(self) -> str: - dsn: str = str( - PostgresDsn.build( # pylint: disable=no-member - scheme="postgresql+asyncpg", - username=self.POSTGRES_USER, - password=self.POSTGRES_PASSWORD.get_secret_value(), - host=self.POSTGRES_HOST, - port=self.POSTGRES_PORT, - path=f"{self.POSTGRES_DB}", - ) + url = PostgresDsn.build( # pylint: disable=no-member + scheme="postgresql+asyncpg", + username=self.POSTGRES_USER, + password=self.POSTGRES_PASSWORD.get_secret_value(), + host=self.POSTGRES_HOST, + port=self.POSTGRES_PORT, + path=f"{self.POSTGRES_DB}", ) - return dsn + return f"{url}" @cached_property def dsn_with_query(self) -> str: diff --git a/packages/settings-library/src/settings_library/s3.py b/packages/settings-library/src/settings_library/s3.py index 95268b419205..18f23860658b 100644 --- a/packages/settings-library/src/settings_library/s3.py +++ b/packages/settings-library/src/settings_library/s3.py @@ -1,20 +1,16 @@ -from typing import Annotated - -from pydantic import AnyHttpUrl, BeforeValidator, Field, TypeAdapter +from pydantic import AnyHttpUrl, Field from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings from .basic_types import IDStr -ANY_HTTP_URL_ADAPTER: TypeAdapter = TypeAdapter(AnyHttpUrl) - class S3Settings(BaseCustomSettings): S3_ACCESS_KEY: IDStr S3_BUCKET_NAME: IDStr - S3_ENDPOINT: Annotated[ - str, BeforeValidator(lambda x: str(ANY_HTTP_URL_ADAPTER.validate_python(x))) - ] | None = Field(default=None, description="do not define if using standard AWS") + S3_ENDPOINT: AnyHttpUrl | None = Field( + default=None, description="do not define if using standard AWS" + ) S3_REGION: IDStr S3_SECRET_KEY: IDStr diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py index 0c0c03b03632..717a428a1ed6 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py @@ -50,7 +50,9 @@ async def cleanup_bucket_after_test( yield - async with session.client("s3", endpoint_url=aws_s3_cli_settings.AWS_S3_CLI_S3.S3_ENDPOINT) as s3_client: # type: ignore + async with session.client( + "s3", endpoint_url=f"{aws_s3_cli_settings.AWS_S3_CLI_S3.S3_ENDPOINT}" + ) as s3_client: # List all object versions paginator = s3_client.get_paginator("list_object_versions") async for page in paginator.paginate( diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py index 5d728aad51d3..c94fc524bec9 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py @@ -51,7 +51,9 @@ async def cleanup_bucket_after_test( yield - async with session.client("s3", endpoint_url=r_clone_settings.R_CLONE_S3.S3_ENDPOINT) as s3_client: # type: ignore + async with session.client( + "s3", endpoint_url=f"{r_clone_settings.R_CLONE_S3.S3_ENDPOINT}" + ) as s3_client: # List all object versions paginator = s3_client.get_paginator("list_object_versions") async for page in paginator.paginate( diff --git a/scripts/openapi-pydantic-models-generator.bash b/scripts/openapi-pydantic-models-generator.bash index 788cb90e7921..88e071a5273b 100755 --- a/scripts/openapi-pydantic-models-generator.bash +++ b/scripts/openapi-pydantic-models-generator.bash @@ -1,5 +1,4 @@ #!/bin/bash -#!/bin/bash # http://redsymbol.net/articles/unofficial-bash-strict-mode/ set -o errexit set -o nounset @@ -27,6 +26,7 @@ ENTRYPOINT ["datamodel-codegen", \ "--use-standard-collections", \ "--use-union-operator", \ "--use-schema-description", \ + "--allow-population-by-field-name", \ "--use-subclass-enum", \ "--use-double-quotes", \ "--field-constraints", \ diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py index e5b78bd286a5..1016cfd5c5cd 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py @@ -75,7 +75,7 @@ def _s3fs_settings_from_s3_settings(s3_settings: S3Settings) -> S3FsSettingsDict # setting it for the us-east-1 creates issue when creating buckets (which we do in tests) s3fs_settings["client_kwargs"]["region_name"] = s3_settings.S3_REGION if s3_settings.S3_ENDPOINT is not None: - s3fs_settings["client_kwargs"]["endpoint_url"] = s3_settings.S3_ENDPOINT + s3fs_settings["client_kwargs"]["endpoint_url"] = f"{s3_settings.S3_ENDPOINT}" return s3fs_settings diff --git a/services/director-v2/src/simcore_service_director_v2/api/errors/http_error.py b/services/director-v2/src/simcore_service_director_v2/api/errors/http_error.py index da51e6f9e26b..5edfb25aa200 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/errors/http_error.py +++ b/services/director-v2/src/simcore_service_director_v2/api/errors/http_error.py @@ -6,7 +6,9 @@ from starlette.responses import JSONResponse -async def http_error_handler(_: Request, exc: HTTPException) -> JSONResponse: +async def http_error_handler(_: Request, exc: Exception) -> JSONResponse: + assert isinstance(exc, HTTPException) + return JSONResponse( content=jsonable_encoder({"errors": [exc.detail]}), status_code=exc.status_code ) @@ -14,7 +16,7 @@ async def http_error_handler(_: Request, exc: HTTPException) -> JSONResponse: def make_http_error_handler_for_exception( status_code: int, exception_cls: type[BaseException] -) -> Callable[[Request, type[BaseException]], Awaitable[JSONResponse]]: +) -> Callable[[Request, Exception], Awaitable[JSONResponse]]: """ Produces a handler for BaseException-type exceptions which converts them into an error JSON response with a given status code @@ -22,7 +24,7 @@ def make_http_error_handler_for_exception( SEE https://docs.python.org/3/library/exceptions.html#concrete-exceptions """ - async def _http_error_handler(_: Request, exc: type[BaseException]) -> JSONResponse: + async def _http_error_handler(_: Request, exc: Exception) -> JSONResponse: assert isinstance(exc, exception_cls) # nosec return JSONResponse( content=jsonable_encoder({"errors": [str(exc)]}), status_code=status_code diff --git a/services/director-v2/src/simcore_service_director_v2/api/errors/validation_error.py b/services/director-v2/src/simcore_service_director_v2/api/errors/validation_error.py index fb70f6791ac9..b3509cbbec9e 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/errors/validation_error.py +++ b/services/director-v2/src/simcore_service_director_v2/api/errors/validation_error.py @@ -12,8 +12,10 @@ async def http422_error_handler( _: Request, - exc: Union[RequestValidationError, ValidationError], + exc: Exception, ) -> JSONResponse: + assert isinstance(exc, RequestValidationError | ValidationError) + return JSONResponse( content=jsonable_encoder({"errors": exc.errors()}), status_code=HTTP_422_UNPROCESSABLE_ENTITY, diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py index b7f47b186e71..e624ed0785cb 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py @@ -35,7 +35,7 @@ from models_library.services import ServiceKeyVersion from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import AnyHttpUrl, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter from servicelib.async_utils import run_sequentially_in_context from servicelib.logging_utils import log_decorator from servicelib.rabbitmq import RabbitMQRPCClient @@ -399,13 +399,11 @@ async def create_computation( # noqa: PLR0913 # pylint: disable=too-many-positi pipeline_details=await compute_pipeline_details( complete_dag, minimal_computational_dag, comp_tasks ), - url=parse_obj_as( - AnyHttpUrl, + url=TypeAdapter(AnyHttpUrl).validate_python( f"{request.url}/{computation.project_id}?user_id={computation.user_id}", ), stop_url=( - parse_obj_as( - AnyHttpUrl, + TypeAdapter(AnyHttpUrl).validate_python( f"{request.url}/{computation.project_id}:stop?user_id={computation.user_id}", ) if computation.start_pipeline @@ -510,9 +508,9 @@ async def get_computation( id=project_id, state=pipeline_state, pipeline_details=pipeline_details, - url=parse_obj_as(AnyHttpUrl, f"{request.url}"), + url=TypeAdapter(AnyHttpUrl).validate_python(f"{request.url}"), stop_url=( - parse_obj_as(AnyHttpUrl, f"{self_url}:stop?user_id={user_id}") + TypeAdapter(AnyHttpUrl).validate_python(f"{self_url}:stop?user_id={user_id}") if pipeline_state.is_running() else None ), @@ -588,7 +586,7 @@ async def stop_computation( pipeline_details=await compute_pipeline_details( complete_dag, pipeline_dag, tasks ), - url=parse_obj_as(AnyHttpUrl, f"{request.url}"), + url=TypeAdapter(AnyHttpUrl).validate_python(f"{request.url}"), stop_url=None, iteration=last_run.iteration if last_run else None, cluster_id=last_run.cluster_id if last_run else None, diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py b/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py index 743d7ae2a66f..24db21cbd232 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py @@ -273,7 +273,7 @@ async def service_retrieve_data_on_ports( dynamic_services_settings.DYNAMIC_SCHEDULER ) timeout = httpx.Timeout( - dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT, + dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT.total_seconds(), connect=dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_CONNECT_TIMEOUT, ) @@ -281,12 +281,12 @@ async def service_retrieve_data_on_ports( response = await services_client.request( "POST", f"{service_base_url}/retrieve", - content=retrieve_settings.json(by_alias=True), + content=retrieve_settings.model_dump_json(by_alias=True), timeout=timeout, ) # validate and return - return RetrieveDataOutEnveloped.parse_obj(response.json()) + return RetrieveDataOutEnveloped.model_validate(response.json()) @router.post( diff --git a/services/director-v2/src/simcore_service_director_v2/cli/_close_and_save_service.py b/services/director-v2/src/simcore_service_director_v2/cli/_close_and_save_service.py index be75694f55c6..fb8f70bf62f4 100644 --- a/services/director-v2/src/simcore_service_director_v2/cli/_close_and_save_service.py +++ b/services/director-v2/src/simcore_service_director_v2/cli/_close_and_save_service.py @@ -5,7 +5,7 @@ import rich from fastapi import FastAPI from models_library.projects_nodes_io import NodeID -from pydantic import AnyHttpUrl, PositiveFloat, parse_obj_as +from pydantic import AnyHttpUrl, PositiveFloat, TypeAdapter from rich.progress import ( BarColumn, Progress, @@ -106,7 +106,7 @@ async def async_close_and_save_service( client = Client( app=app, async_client=thin_dv2_localhost_client.client, - base_url=parse_obj_as(AnyHttpUrl, thin_dv2_localhost_client.BASE_ADDRESS), + base_url=f"{TypeAdapter(AnyHttpUrl).validate_python(thin_dv2_localhost_client.BASE_ADDRESS)}", ) if not skip_container_removal: diff --git a/services/director-v2/src/simcore_service_director_v2/cli/_core.py b/services/director-v2/src/simcore_service_director_v2/cli/_core.py index 893aed2504ed..028a882f268a 100644 --- a/services/director-v2/src/simcore_service_director_v2/cli/_core.py +++ b/services/director-v2/src/simcore_service_director_v2/cli/_core.py @@ -12,7 +12,7 @@ from models_library.projects_nodes_io import NodeID, NodeIDStr from models_library.services import ServiceType from models_library.services_enums import ServiceBootType, ServiceState -from pydantic import AnyHttpUrl, BaseModel, PositiveInt, parse_obj_as +from pydantic import AnyHttpUrl, BaseModel, PositiveInt, TypeAdapter from rich.live import Live from rich.table import Table from servicelib.services_utils import get_service_from_key @@ -58,7 +58,9 @@ def _get_dynamic_sidecar_endpoint( dynamic_sidecar_names = DynamicSidecarNamesHelper.make(NodeID(node_id)) hostname = dynamic_sidecar_names.service_name_dynamic_sidecar port = settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR.DYNAMIC_SIDECAR_PORT - url: AnyHttpUrl = parse_obj_as(AnyHttpUrl, f"http://{hostname}:{port}") # NOSONAR + url: AnyHttpUrl = TypeAdapter(AnyHttpUrl).validate_python( + f"http://{hostname}:{port}" + ) return url diff --git a/services/director-v2/src/simcore_service_director_v2/constants.py b/services/director-v2/src/simcore_service_director_v2/constants.py index fc700254ed0b..b84865745dff 100644 --- a/services/director-v2/src/simcore_service_director_v2/constants.py +++ b/services/director-v2/src/simcore_service_director_v2/constants.py @@ -1,5 +1,4 @@ from typing import Final - # dynamic services DYNAMIC_SIDECAR_SERVICE_PREFIX: Final[str] = "dy-sidecar" @@ -14,7 +13,7 @@ # - itisfoundation # - 10.0.0.0:8473 (IP & Port) DYNAMIC_SIDECAR_DOCKER_IMAGE_RE = ( - r"(^([_a-zA-Z0-9:.-]+)/)?(dynamic-sidecar):([_a-zA-Z0-9.-]+$)" + r"^(([_a-zA-Z0-9:.-]+)/)?(dynamic-sidecar):([_a-zA-Z0-9.-]+)$" ) REGEX_DY_SERVICE_SIDECAR = rf"^{DYNAMIC_SIDECAR_SERVICE_PREFIX}_[a-zA-Z0-9-_]*" diff --git a/services/director-v2/src/simcore_service_director_v2/core/application.py b/services/director-v2/src/simcore_service_director_v2/core/application.py index 416e5c853677..d611351f7828 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/application.py +++ b/services/director-v2/src/simcore_service_director_v2/core/application.py @@ -9,6 +9,7 @@ from servicelib.fastapi.profiler_middleware import ProfilerMiddleware from servicelib.fastapi.tracing import setup_tracing from servicelib.logging_utils import config_all_loggers +from settings_library.basic_types import BootMode from .._meta import API_VERSION, API_VTAG, APP_NAME, PROJECT_NAME, SUMMARY from ..api.entrypoints import api_router @@ -127,12 +128,12 @@ def create_base_app(settings: AppSettings | None = None) -> FastAPI: logging.getLogger(name).setLevel(quiet_level) app = FastAPI( - debug=settings.SC_BOOT_MODE.is_devel_mode(), + debug=settings.SC_BOOT_MODE is BootMode.DEVELOPMENT, title=PROJECT_NAME, description=SUMMARY, version=API_VERSION, openapi_url=f"/api/{API_VTAG}/openapi.json", - **get_common_oas_options(settings.SC_BOOT_MODE.is_devel_mode()), + **get_common_oas_options(settings.SC_BOOT_MODE == BootMode.DEVELOPMENT), ) override_fastapi_openapi_method(app) app.state.settings = settings diff --git a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/__init__.py b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/__init__.py index b4abd4f5b6e1..c3ed002edd6d 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/__init__.py +++ b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/__init__.py @@ -13,22 +13,22 @@ class DynamicServicesSettings(BaseCustomSettings): default=True, description="Enables/Disables the dynamic_sidecar submodule" ) - DYNAMIC_SIDECAR: DynamicSidecarSettings = Field(auto_default_from_env=True) + DYNAMIC_SIDECAR: DynamicSidecarSettings = Field(json_schema_extra={"auto_default_from_env": True}) DYNAMIC_SCHEDULER: DynamicServicesSchedulerSettings = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) DYNAMIC_SIDECAR_PROXY_SETTINGS: DynamicSidecarProxySettings = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) DYNAMIC_SIDECAR_EGRESS_PROXY_SETTINGS: EgressProxySettings = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) DYNAMIC_SIDECAR_PLACEMENT_SETTINGS: PlacementSettings = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) - WEBSERVER_SETTINGS: WebServerSettings = Field(auto_default_from_env=True) + WEBSERVER_SETTINGS: WebServerSettings = Field(json_schema_extra={"auto_default_from_env": True}) diff --git a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/scheduler.py b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/scheduler.py index 74810cdd101b..5072a365af6f 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/scheduler.py @@ -103,8 +103,8 @@ class DynamicServicesSchedulerSettings(BaseCustomSettings): ), ) - DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT: PositiveFloat = Field( - 60.0 * _MINUTE, + DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT: timedelta = Field( + timedelta(hours=1), description=( "When saving and restoring the state of a dynamic service, depending on the payload " "some services take longer or shorter to save and restore. Across the " diff --git a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/sidecar.py b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/sidecar.py index 98ce21fc6a49..434c3e0941f6 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/sidecar.py +++ b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/sidecar.py @@ -9,7 +9,7 @@ ensure_unique_dict_values_validator, ensure_unique_list_values_validator, ) -from pydantic import Field, PositiveInt, validator +from pydantic import AliasChoices, Field, PositiveInt, ValidationInfo, field_validator from settings_library.aws_s3_cli import AwsS3CliSettings from settings_library.base import BaseCustomSettings from settings_library.efs import AwsEfsSettings @@ -45,10 +45,10 @@ class RCloneSettings(SettingsLibraryRCloneSettings): description="VFS operation mode, defines how and when the disk cache is synced", ) - @validator("R_CLONE_POLL_INTERVAL_SECONDS") + @field_validator("R_CLONE_POLL_INTERVAL_SECONDS") @classmethod - def enforce_r_clone_requirement(cls, v: int, values) -> PositiveInt: - dir_cache_time = values["R_CLONE_DIR_CACHE_TIME_SECONDS"] + def enforce_r_clone_requirement(cls, v: int, info: ValidationInfo) -> PositiveInt: + dir_cache_time = info.data["R_CLONE_DIR_CACHE_TIME_SECONDS"] if v >= dir_cache_time: msg = f"R_CLONE_POLL_INTERVAL_SECONDS={v} must be lower than R_CLONE_DIR_CACHE_TIME_SECONDS={dir_cache_time}" raise ValueError(msg) @@ -60,7 +60,7 @@ class PlacementSettings(BaseCustomSettings): # https://docs.docker.com/engine/swarm/services/#control-service-placement. DIRECTOR_V2_SERVICES_CUSTOM_CONSTRAINTS: list[DockerPlacementConstraint] = Field( default_factory=list, - example='["node.labels.region==east", "one!=yes"]', + examples=['["node.labels.region==east", "one!=yes"]'], ) DIRECTOR_V2_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: dict[ @@ -72,20 +72,18 @@ class PlacementSettings(BaseCustomSettings): "see https://github.com/ITISFoundation/osparc-simcore/issues/5250 " "When `None` (default), uses generic resources" ), - example='{"AIRAM": "node.labels.custom==true"}', + examples=['{"AIRAM": "node.labels.custom==true"}'], ) - _unique_custom_constraints = validator( + _unique_custom_constraints = field_validator( "DIRECTOR_V2_SERVICES_CUSTOM_CONSTRAINTS", - allow_reuse=True, )(ensure_unique_list_values_validator) - _unique_resource_placement_constraints_substitutions = validator( + _unique_resource_placement_constraints_substitutions = field_validator( "DIRECTOR_V2_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS", - allow_reuse=True, )(ensure_unique_dict_values_validator) - @validator("DIRECTOR_V2_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS") + @field_validator("DIRECTOR_V2_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS") @classmethod def warn_if_any_values_provided(cls, value: dict) -> dict: if len(value) > 0: @@ -101,40 +99,46 @@ def warn_if_any_values_provided(cls, value: dict) -> dict: class DynamicSidecarSettings(BaseCustomSettings, MixinLoggingSettings): DYNAMIC_SIDECAR_ENDPOINT_SPECS_MODE_DNSRR_ENABLED: bool = Field( # doc: https://docs.docker.com/engine/swarm/networking/#configure-service-discovery default=False, - env=["DYNAMIC_SIDECAR_ENDPOINT_SPECS_MODE_DNSRR_ENABLED"], + validation_alias=AliasChoices( + "DYNAMIC_SIDECAR_ENDPOINT_SPECS_MODE_DNSRR_ENABLED" + ), description="dynamic-sidecar's service 'endpoint_spec' with {'Mode': 'dnsrr'}", ) DYNAMIC_SIDECAR_SC_BOOT_MODE: BootModeEnum = Field( ..., description="Boot mode used for the dynamic-sidecar services" "By defaults, it uses the same boot mode set for the director-v2", - env=["DYNAMIC_SIDECAR_SC_BOOT_MODE", "SC_BOOT_MODE"], + validation_alias=AliasChoices("DYNAMIC_SIDECAR_SC_BOOT_MODE", "SC_BOOT_MODE"), ) DYNAMIC_SIDECAR_LOG_LEVEL: str = Field( "WARNING", description="log level of the dynamic sidecar" "If defined, it captures global env vars LOG_LEVEL and LOGLEVEL from the director-v2 service", - env=["DYNAMIC_SIDECAR_LOG_LEVEL", "LOG_LEVEL", "LOGLEVEL"], + validation_alias=AliasChoices( + "DYNAMIC_SIDECAR_LOG_LEVEL", "LOG_LEVEL", "LOGLEVEL" + ), ) DYNAMIC_SIDECAR_IMAGE: str = Field( ..., - regex=DYNAMIC_SIDECAR_DOCKER_IMAGE_RE, + pattern=DYNAMIC_SIDECAR_DOCKER_IMAGE_RE, description="used by the director to start a specific version of the dynamic-sidecar", ) - DYNAMIC_SIDECAR_R_CLONE_SETTINGS: RCloneSettings = Field(auto_default_from_env=True) + DYNAMIC_SIDECAR_R_CLONE_SETTINGS: RCloneSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) DYNAMIC_SIDECAR_AWS_S3_CLI_SETTINGS: AwsS3CliSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) DYNAMIC_SIDECAR_EFS_SETTINGS: AwsEfsSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) DYNAMIC_SIDECAR_PLACEMENT_SETTINGS: PlacementSettings = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) # @@ -144,7 +148,7 @@ class DynamicSidecarSettings(BaseCustomSettings, MixinLoggingSettings): DYNAMIC_SIDECAR_MOUNT_PATH_DEV: Path | None = Field( None, description="Host path to the dynamic-sidecar project. Used as source path to mount to the dynamic-sidecar [DEVELOPMENT ONLY]", - example="osparc-simcore/services/dynamic-sidecar", + examples=["osparc-simcore/services/dynamic-sidecar"], ) DYNAMIC_SIDECAR_PORT: PortInt = Field( @@ -157,12 +161,16 @@ class DynamicSidecarSettings(BaseCustomSettings, MixinLoggingSettings): description="Publishes the service on localhost for debuging and testing [DEVELOPMENT ONLY]" "Can be used to access swagger doc from the host as http://127.0.0.1:30023/dev/doc " "where 30023 is the host published port", + validate_default=True, ) - @validator("DYNAMIC_SIDECAR_MOUNT_PATH_DEV", pre=True) + @field_validator("DYNAMIC_SIDECAR_MOUNT_PATH_DEV", mode="before") @classmethod - def auto_disable_if_production(cls, v, values): - if v and values.get("DYNAMIC_SIDECAR_SC_BOOT_MODE") == BootModeEnum.PRODUCTION: + def auto_disable_if_production(cls, v, info: ValidationInfo): + if ( + v + and info.data.get("DYNAMIC_SIDECAR_SC_BOOT_MODE") == BootModeEnum.PRODUCTION + ): _logger.warning( "In production DYNAMIC_SIDECAR_MOUNT_PATH_DEV cannot be set to %s, enforcing None", v, @@ -170,22 +178,22 @@ def auto_disable_if_production(cls, v, values): return None return v - @validator("DYNAMIC_SIDECAR_EXPOSE_PORT", pre=True, always=True) + @field_validator("DYNAMIC_SIDECAR_EXPOSE_PORT", mode="before") @classmethod - def auto_enable_if_development(cls, v, values): + def auto_enable_if_development(cls, v, info: ValidationInfo): if ( - boot_mode := values.get("DYNAMIC_SIDECAR_SC_BOOT_MODE") + boot_mode := info.data.get("DYNAMIC_SIDECAR_SC_BOOT_MODE") ) and boot_mode.is_devel_mode(): # Can be used to access swagger doc from the host as http://127.0.0.1:30023/dev/doc return True return v - @validator("DYNAMIC_SIDECAR_IMAGE", pre=True) + @field_validator("DYNAMIC_SIDECAR_IMAGE", mode="before") @classmethod def strip_leading_slashes(cls, v: str) -> str: return v.lstrip("/") - @validator("DYNAMIC_SIDECAR_LOG_LEVEL") + @field_validator("DYNAMIC_SIDECAR_LOG_LEVEL") @classmethod def _validate_log_level(cls, value) -> str: log_level: str = cls.validate_log_level(value) diff --git a/services/director-v2/src/simcore_service_director_v2/core/errors.py b/services/director-v2/src/simcore_service_director_v2/core/errors.py index e8c47a934d3c..213cd4744bf9 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/errors.py +++ b/services/director-v2/src/simcore_service_director_v2/core/errors.py @@ -19,18 +19,15 @@ } """ +from common_library.errors_classes import OsparcErrorMixin from models_library.errors import ErrorDict from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID -from pydantic.errors import PydanticErrorMixin class DirectorError(Exception): """Basic exception""" - def message(self) -> str: - return f"{self.args[0]}" - class ConfigurationError(DirectorError): """An error in the director-v2 configuration""" @@ -114,15 +111,15 @@ def __init__(self, pipeline_id: str): super().__init__(f"pipeline {pipeline_id} not found") -class ComputationalRunNotFoundError(PydanticErrorMixin, DirectorError): +class ComputationalRunNotFoundError(OsparcErrorMixin, DirectorError): msg_template = "Computational run not found" -class ComputationalTaskNotFoundError(PydanticErrorMixin, DirectorError): +class ComputationalTaskNotFoundError(OsparcErrorMixin, DirectorError): msg_template = "Computational task {node_id} not found" -class WalletNotEnoughCreditsError(PydanticErrorMixin, DirectorError): +class WalletNotEnoughCreditsError(OsparcErrorMixin, DirectorError): msg_template = "Wallet '{wallet_name}' has {wallet_credit_amount} credits." @@ -132,8 +129,6 @@ class WalletNotEnoughCreditsError(PydanticErrorMixin, DirectorError): class SchedulerError(DirectorError): - code = "scheduler_error" - def __init__(self, msg: str | None = None): super().__init__(msg or "Unexpected error in the scheduler") @@ -148,6 +143,8 @@ def __init__(self, pipeline_id: str, msg: str | None = None): class TaskSchedulingError(SchedulerError): """A task cannot be scheduled""" + code: str = "task scheduler error" + def __init__(self, project_id: ProjectID, node_id: NodeID, msg: str | None = None): super().__init__(msg=msg) self.project_id = project_id @@ -161,7 +158,7 @@ def get_errors(self) -> list[ErrorDict]: f"{self.project_id}", f"{self.node_id}", ), - "msg": self.message(), + "msg": f"{self.args[0]}", "type": self.code, }, ] @@ -227,39 +224,33 @@ def get_errors(self) -> list[ErrorDict]: return value_errors -class ComputationalSchedulerChangedError(PydanticErrorMixin, SchedulerError): - code = "computational_backend.scheduler_changed" +class ComputationalSchedulerChangedError(OsparcErrorMixin, SchedulerError): msg_template = "The dask scheduler ID changed from '{original_scheduler_id}' to '{current_scheduler_id}'" -class ComputationalBackendNotConnectedError(PydanticErrorMixin, SchedulerError): - code = "computational_backend.not_connected" +class ComputationalBackendNotConnectedError(OsparcErrorMixin, SchedulerError): msg_template = "The dask computational backend is not connected" -class ComputationalBackendNoS3AccessError(PydanticErrorMixin, SchedulerError): +class ComputationalBackendNoS3AccessError(OsparcErrorMixin, SchedulerError): msg_template = "The S3 backend is not ready, please try again later" -class ComputationalBackendTaskNotFoundError(PydanticErrorMixin, SchedulerError): - code = "computational_backend.task_not_found" +class ComputationalBackendTaskNotFoundError(OsparcErrorMixin, SchedulerError): msg_template = ( "The dask computational backend does not know about the task '{job_id}'" ) -class ComputationalBackendTaskResultsNotReadyError(PydanticErrorMixin, SchedulerError): - code = "computational_backend.task_result_not_ready" +class ComputationalBackendTaskResultsNotReadyError(OsparcErrorMixin, SchedulerError): msg_template = "The task result is not ready yet for job '{job_id}'" -class ClustersKeeperNotAvailableError(PydanticErrorMixin, SchedulerError): - code = "computational_backend.clusters_keeper_not_available" +class ClustersKeeperNotAvailableError(OsparcErrorMixin, SchedulerError): msg_template = "clusters-keeper service is not available!" -class ComputationalBackendOnDemandNotReadyError(PydanticErrorMixin, SchedulerError): - code = "computational_backend.on_demand_cluster.not_ready" +class ComputationalBackendOnDemandNotReadyError(OsparcErrorMixin, SchedulerError): msg_template = ( "The on demand computational cluster is not ready 'est. remaining time: {eta}'" ) @@ -268,16 +259,15 @@ class ComputationalBackendOnDemandNotReadyError(PydanticErrorMixin, SchedulerErr # # SCHEDULER/CLUSTER ERRORS # -class ClusterNotFoundError(PydanticErrorMixin, SchedulerError): - code = "cluster.not_found" +class ClusterNotFoundError(OsparcErrorMixin, SchedulerError): msg_template = "The cluster '{cluster_id}' not found" -class ClusterAccessForbiddenError(PydanticErrorMixin, SchedulerError): +class ClusterAccessForbiddenError(OsparcErrorMixin, SchedulerError): msg_template = "Insufficient rights to access cluster '{cluster_id}'" -class ClusterInvalidOperationError(PydanticErrorMixin, SchedulerError): +class ClusterInvalidOperationError(OsparcErrorMixin, SchedulerError): msg_template = "Invalid operation on cluster '{cluster_id}'" @@ -286,25 +276,21 @@ class ClusterInvalidOperationError(PydanticErrorMixin, SchedulerError): # -class DaskClientRequestError(PydanticErrorMixin, SchedulerError): - code = "dask_client.request.error" +class DaskClientRequestError(OsparcErrorMixin, SchedulerError): msg_template = ( "The dask client to cluster on '{endpoint}' did an invalid request '{error}'" ) -class DaskClusterError(PydanticErrorMixin, SchedulerError): - code = "cluster.error" +class DaskClusterError(OsparcErrorMixin, SchedulerError): msg_template = "The dask cluster on '{endpoint}' encountered an error: '{error}'" -class DaskGatewayServerError(PydanticErrorMixin, SchedulerError): - code = "gateway.error" +class DaskGatewayServerError(OsparcErrorMixin, SchedulerError): msg_template = "The dask gateway on '{endpoint}' encountered an error: '{error}'" -class DaskClientAcquisisitonError(PydanticErrorMixin, SchedulerError): - code = "dask_client.acquisition.error" +class DaskClientAcquisisitonError(OsparcErrorMixin, SchedulerError): msg_template = ( "The dask client to cluster '{cluster}' encountered an error '{error}'" ) diff --git a/services/director-v2/src/simcore_service_director_v2/core/settings.py b/services/director-v2/src/simcore_service_director_v2/core/settings.py index 717694f7f25b..062e1ec4d5f2 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/core/settings.py @@ -6,13 +6,7 @@ from functools import cached_property from common_library.pydantic_validators import validate_numeric_string_as_timedelta -from models_library.basic_types import ( - BootModeEnum, - BuildTargetEnum, - LogLevel, - PortInt, - VersionTag, -) +from models_library.basic_types import LogLevel, PortInt, VersionTag from models_library.clusters import ( DEFAULT_CLUSTER_ID, Cluster, @@ -20,8 +14,16 @@ ClusterTypeInModel, NoAuthentication, ) -from pydantic import AliasChoices, AnyHttpUrl, AnyUrl, Field, NonNegativeInt, validator +from pydantic import ( + AliasChoices, + AnyHttpUrl, + AnyUrl, + Field, + NonNegativeInt, + field_validator, +) from servicelib.logging_utils_filtering import LoggerName, MessageSubstring +from settings_library.application import BaseApplicationSettings from settings_library.base import BaseCustomSettings from settings_library.catalog import CatalogSettings from settings_library.docker_registry import RegistrySettings @@ -56,13 +58,13 @@ class DirectorV0Settings(BaseCustomSettings): @cached_property def endpoint(self) -> str: - url: str = AnyHttpUrl.build( + url = AnyHttpUrl.build( # pylint: disable=no-member scheme="http", host=self.DIRECTOR_HOST, - port=f"{self.DIRECTOR_PORT}", - path=f"/{self.DIRECTOR_V0_VTAG}", + port=self.DIRECTOR_PORT, + path=f"{self.DIRECTOR_V0_VTAG}", ) - return url + return f"{url}" class ComputationalBackendSettings(BaseCustomSettings): @@ -108,7 +110,7 @@ def default_cluster(self) -> Cluster: type=ClusterTypeInModel.ON_PREMISE, ) - @validator("COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH", pre=True) + @field_validator("COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH", mode="before") @classmethod def _empty_auth_is_none(cls, v): if not v: @@ -116,11 +118,7 @@ def _empty_auth_is_none(cls, v): return v -class AppSettings(BaseCustomSettings, MixinLoggingSettings): - # docker environs - SC_BOOT_MODE: BootModeEnum - SC_BOOT_TARGET: BuildTargetEnum | None - +class AppSettings(BaseApplicationSettings, MixinLoggingSettings): LOG_LEVEL: LogLevel = Field( LogLevel.INFO.value, validation_alias=AliasChoices("DIRECTOR_V2_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), @@ -166,10 +164,10 @@ class AppSettings(BaseCustomSettings, MixinLoggingSettings): DIRECTOR_V2_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True DIRECTOR_V2_PROFILING: bool = False - DIRECTOR_V2_REMOTE_DEBUGGING_PORT: PortInt | None + DIRECTOR_V2_REMOTE_DEBUGGING_PORT: PortInt | None = Field(default=None) # extras - SWARM_STACK_NAME: str = Field("undefined-please-check", env="SWARM_STACK_NAME") + SWARM_STACK_NAME: str = Field(default="undefined-please-check") SERVICE_TRACKING_HEARTBEAT: datetime.timedelta = Field( default=DEFAULT_RESOURCE_USAGE_HEARTBEAT_INTERVAL, description="Service scheduler heartbeat (everytime a heartbeat is sent into RabbitMQ)" @@ -191,42 +189,56 @@ class AppSettings(BaseCustomSettings, MixinLoggingSettings): ) # debug settings - CLIENT_REQUEST: ClientRequestSettings = Field(auto_default_from_env=True) + CLIENT_REQUEST: ClientRequestSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) # App modules settings --------------------- - DIRECTOR_V2_STORAGE: StorageSettings = Field(auto_default_from_env=True) + DIRECTOR_V2_STORAGE: StorageSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) DIRECTOR_V2_NODE_PORTS_STORAGE_AUTH: StorageAuthSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) - DIRECTOR_V2_CATALOG: CatalogSettings | None = Field(auto_default_from_env=True) + DIRECTOR_V2_CATALOG: CatalogSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) - DIRECTOR_V0: DirectorV0Settings = Field(auto_default_from_env=True) + DIRECTOR_V0: DirectorV0Settings = Field( + json_schema_extra={"auto_default_from_env": True} + ) - DYNAMIC_SERVICES: DynamicServicesSettings = Field(auto_default_from_env=True) + DYNAMIC_SERVICES: DynamicServicesSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) - POSTGRES: PostgresSettings = Field(auto_default_from_env=True) + POSTGRES: PostgresSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) - REDIS: RedisSettings = Field(auto_default_from_env=True) + REDIS: RedisSettings = Field(json_schema_extra={"auto_default_from_env": True}) - DIRECTOR_V2_RABBITMQ: RabbitSettings = Field(auto_default_from_env=True) + DIRECTOR_V2_RABBITMQ: RabbitSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) TRAEFIK_SIMCORE_ZONE: str = Field("internal_simcore_stack") DIRECTOR_V2_COMPUTATIONAL_BACKEND: ComputationalBackendSettings = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) DIRECTOR_V2_DOCKER_REGISTRY: RegistrySettings = Field( - auto_default_from_env=True, + json_schema_extra={"auto_default_from_env": True}, description="settings for the private registry deployed with the platform", ) DIRECTOR_V2_DOCKER_HUB_REGISTRY: RegistrySettings | None = Field( - description="public DockerHub registry settings" + default=None, description="public DockerHub registry settings" ) DIRECTOR_V2_RESOURCE_USAGE_TRACKER: ResourceUsageTrackerSettings = Field( - auto_default_from_env=True, + json_schema_extra={"auto_default_from_env": True}, description="resource usage tracker service client's plugin", ) @@ -235,10 +247,11 @@ class AppSettings(BaseCustomSettings, MixinLoggingSettings): description="Base URL used to access the public api e.g. http://127.0.0.1:6000 for development or https://api.osparc.io", ) DIRECTOR_V2_TRACING: TracingSettings | None = Field( - auto_default_from_env=True, description="settings for opentelemetry tracing" + json_schema_extra={"auto_default_from_env": True}, + description="settings for opentelemetry tracing", ) - @validator("LOG_LEVEL", pre=True) + @field_validator("LOG_LEVEL", mode="before") @classmethod def _validate_loglevel(cls, value: str) -> str: log_level: str = cls.validate_log_level(value) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_pipelines.py b/services/director-v2/src/simcore_service_director_v2/models/comp_pipelines.py index 6e156607ae68..5de823d826b3 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_pipelines.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_pipelines.py @@ -1,10 +1,10 @@ from contextlib import suppress -from typing import Any, ClassVar, cast +from typing import cast import networkx as nx from models_library.projects import ProjectID from models_library.projects_state import RunningState -from pydantic import BaseModel, validator +from pydantic import BaseModel, ConfigDict, field_validator from simcore_postgres_database.models.comp_pipeline import StateType from ..utils.db import DB_TO_RUNNING_STATE @@ -15,7 +15,7 @@ class CompPipelineAtDB(BaseModel): dag_adjacency_list: dict[str, list[str]] # json serialization issue if using NodeID state: RunningState - @validator("state", pre=True) + @field_validator("state", mode="before") @classmethod def convert_state_from_state_type_enum_if_needed(cls, v): if isinstance(v, str): @@ -27,7 +27,7 @@ def convert_state_from_state_type_enum_if_needed(cls, v): return RunningState(DB_TO_RUNNING_STATE[StateType(v)]) return v - @validator("dag_adjacency_list", pre=True) + @field_validator("dag_adjacency_list", mode="before") @classmethod def auto_convert_dag(cls, v): # this enforcement is here because the serialization using json is not happy with non str Dict keys, also comparison gets funny if the lists are having sometimes UUIDs or str. @@ -42,10 +42,9 @@ def get_graph(self) -> nx.DiGraph: ), ) - class Config: - orm_mode = True - - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + from_attributes=True, + json_schema_extra={ "examples": [ # DB model { @@ -60,4 +59,5 @@ class Config: "state": "NOT_STARTED", } ] - } + }, + ) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py index 1d7800b97884..9b466bbc49fc 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py @@ -1,14 +1,14 @@ import datetime from contextlib import suppress -from typing import Any, ClassVar, TypedDict from models_library.clusters import DEFAULT_CLUSTER_ID, ClusterID from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.projects_state import RunningState from models_library.users import UserID -from pydantic import BaseModel, PositiveInt, validator +from pydantic import BaseModel, ConfigDict, PositiveInt, field_validator from simcore_postgres_database.models.comp_pipeline import StateType +from typing_extensions import TypedDict from ..utils.db import DB_TO_RUNNING_STATE @@ -44,12 +44,12 @@ class CompRunsAtDB(BaseModel): result: RunningState created: datetime.datetime modified: datetime.datetime - started: datetime.datetime | None - ended: datetime.datetime | None + started: datetime.datetime | None = None + ended: datetime.datetime | None = None metadata: RunMetadataDict = RunMetadataDict() use_on_demand_clusters: bool - @validator("result", pre=True) + @field_validator("result", mode="before") @classmethod def convert_result_from_state_type_enum_if_needed(cls, v): if isinstance(v, str): @@ -61,30 +61,30 @@ def convert_result_from_state_type_enum_if_needed(cls, v): return RunningState(DB_TO_RUNNING_STATE[StateType(v)]) return v - @validator("cluster_id", pre=True) + @field_validator("cluster_id", mode="before") @classmethod def convert_null_to_default_cluster_id(cls, v): if v is None: v = DEFAULT_CLUSTER_ID return v - @validator("created", "modified", "started", "ended") + @field_validator("created", "modified", "started", "ended") @classmethod def ensure_utc(cls, v: datetime.datetime | None) -> datetime.datetime | None: if v is not None and v.tzinfo is None: - v = v.replace(tzinfo=datetime.timezone.utc) + v = v.replace(tzinfo=datetime.UTC) return v - @validator("metadata", pre=True) + @field_validator("metadata", mode="before") @classmethod def convert_null_to_empty_metadata(cls, v): if v is None: v = RunMetadataDict() return v - class Config: - orm_mode = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + from_attributes=True, + json_schema_extra={ "examples": [ # DB model { @@ -107,7 +107,7 @@ class Config: "result": "SUCCESS", "created": "2021-03-01 13:07:34.19161", "modified": "2021-03-01 13:07:34.19161", - "started": "2021-03-01 8:07:34.19161", + "started": "2021-03-01 08:07:34.19161", "ended": "2021-03-01 13:07:34.10", "metadata": { "node_id_names_map": {}, @@ -119,4 +119,5 @@ class Config: "use_on_demand_clusters": False, }, ] - } + }, + ) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index 5895411b2d06..e45e7aea8967 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -1,6 +1,6 @@ -import datetime +import datetime as dt from contextlib import suppress -from typing import Any, ClassVar +from typing import Any from dask_task_models_library.container_tasks.protocol import ContainerEnvsDict from models_library.api_schemas_directorv2.services import NodeRequirements @@ -17,11 +17,12 @@ from pydantic import ( BaseModel, ByteSize, - Extra, + ConfigDict, Field, PositiveInt, - parse_obj_as, - validator, + TypeAdapter, + ValidationInfo, + field_validator, ) from simcore_postgres_database.models.comp_pipeline import StateType from simcore_postgres_database.models.comp_tasks import NodeClass @@ -30,8 +31,8 @@ class Image(BaseModel): - name: str = Field(..., regex=SERVICE_KEY_RE.pattern) - tag: str = Field(..., regex=SIMPLE_VERSION_RE) + name: str = Field(..., pattern=SERVICE_KEY_RE.pattern) + tag: str = Field(..., pattern=SIMPLE_VERSION_RE) requires_gpu: bool | None = Field( default=None, deprecated=True, description="Use instead node_requirements" @@ -40,7 +41,9 @@ class Image(BaseModel): default=None, deprecated=True, description="Use instead node_requirements" ) node_requirements: NodeRequirements | None = Field( - default=None, description="the requirements for the service to run on a node" + default=None, + description="the requirements for the service to run on a node", + validate_default=True, ) boot_mode: BootMode = BootMode.CPU command: list[str] = Field( @@ -53,9 +56,9 @@ class Image(BaseModel): default_factory=dict, description="The environment to use to run the service" ) - @validator("node_requirements", pre=True, always=True) + @field_validator("node_requirements", mode="before") @classmethod - def migrate_from_requirements(cls, v, values): + def _migrate_from_requirements(cls, v, info: ValidationInfo): if v is None: # NOTE: 'node_requirements' field's default=None although is NOT declared as nullable. # Then this validator with `pre=True, always=True` is used to create a default @@ -63,21 +66,23 @@ def migrate_from_requirements(cls, v, values): # This strategy guarantees backwards compatibility v = NodeRequirements( CPU=1.0, - GPU=1 if values.get("requires_gpu") else 0, - RAM=parse_obj_as(ByteSize, "128 MiB"), + GPU=1 if info.data.get("requires_gpu") else 0, + RAM=TypeAdapter(ByteSize).validate_python("128 MiB"), ) return v - class Config: - orm_mode = True - schema_extra: ClassVar[dict[str, Any]] = { - "examples": [ + model_config = ConfigDict( + from_attributes=True, + json_schema_extra={ + "examples": [ # type: ignore { "name": "simcore/services/dynamic/jupyter-octave-python-math", "tag": "1.3.1", "node_requirements": node_req_example, } - for node_req_example in NodeRequirements.Config.schema_extra["examples"] + for node_req_example in NodeRequirements.model_config[ # type: ignore + "json_schema_extra" + ]["examples"] ] + # old version @@ -89,14 +94,14 @@ class Config: "requires_mpi": False, } ] - } + }, + ) -# NOTE: for a long time defaultValue field was added to ServiceOutput wrongly in the DB. -# this flags allows parsing of the outputs without error. This MUST not leave the director-v2! class _ServiceOutputOverride(ServiceOutput): - class Config(ServiceOutput.Config): - extra = Extra.ignore + # NOTE: for a long time defaultValue field was added to ServiceOutput wrongly in the DB. + # this flags allows parsing of the outputs without error. This MUST not leave the director-v2! + model_config = ConfigDict(extra="ignore") _ServiceOutputsOverride = dict[ServicePortKey, _ServiceOutputOverride] @@ -105,10 +110,7 @@ class Config(ServiceOutput.Config): class NodeSchema(BaseModel): inputs: ServiceInputsDict = Field(..., description="the inputs scheam") outputs: _ServiceOutputsOverride = Field(..., description="the outputs schema") - - class Config: - extra = Extra.forbid - orm_mode = True + model_config = ConfigDict(extra="forbid", from_attributes=True) class CompTaskAtDB(BaseModel): @@ -125,32 +127,32 @@ class CompTaskAtDB(BaseModel): description="the hex digest of the resolved inputs +outputs hash at the time when the last outputs were generated", ) image: Image - submit: datetime.datetime - start: datetime.datetime | None = Field(default=None) - end: datetime.datetime | None = Field(default=None) + submit: dt.datetime + start: dt.datetime | None = None + end: dt.datetime | None = None state: RunningState - task_id: PositiveInt | None = Field(default=None) + task_id: PositiveInt | None = None internal_id: PositiveInt node_class: NodeClass - errors: list[ErrorDict] | None = Field(default=None) + errors: list[ErrorDict] | None = None progress: float | None = Field( default=None, ge=0.0, le=1.0, description="current progress of the task if available", ) - last_heartbeat: datetime.datetime | None = Field( + last_heartbeat: dt.datetime | None = Field( ..., description="Last time the running task was checked by the backend" ) - created: datetime.datetime - modified: datetime.datetime + created: dt.datetime + modified: dt.datetime # Additional information about price and hardware (ex. AWS EC2 instance type) pricing_info: dict | None hardware_info: HardwareInfo - @validator("state", pre=True) + @field_validator("state", mode="before") @classmethod - def convert_state_from_state_type_enum_if_needed(cls, v): + def _convert_state_from_state_type_enum_if_needed(cls, v): if isinstance(v, str): # try to convert to a StateType, if it fails the validations will continue # and pydantic will try to convert it to a RunninState later on @@ -160,30 +162,32 @@ def convert_state_from_state_type_enum_if_needed(cls, v): return RunningState(DB_TO_RUNNING_STATE[StateType(v)]) return v - @validator("start", "end", "submit") + @field_validator("start", "end", "submit") @classmethod - def ensure_utc(cls, v: datetime.datetime | None) -> datetime.datetime | None: + def _ensure_utc(cls, v: dt.datetime | None) -> dt.datetime | None: if v is not None and v.tzinfo is None: - v = v.replace(tzinfo=datetime.timezone.utc) + v = v.replace(tzinfo=dt.UTC) return v - @validator("hardware_info", pre=True) + @field_validator("hardware_info", mode="before") @classmethod - def backward_compatible_null_value(cls, v: HardwareInfo | None) -> HardwareInfo: + def _backward_compatible_null_value(cls, v: HardwareInfo | None) -> HardwareInfo: if v is None: return HardwareInfo(aws_ec2_instances=[]) return v def to_db_model(self, **exclusion_rules) -> dict[str, Any]: - comp_task_dict = self.dict(by_alias=True, exclude_unset=True, **exclusion_rules) + comp_task_dict = self.model_dump( + mode="json", by_alias=True, exclude_unset=True, **exclusion_rules + ) if "state" in comp_task_dict: comp_task_dict["state"] = RUNNING_STATE_TO_DB[comp_task_dict["state"]].value return comp_task_dict - class Config: - extra = Extra.forbid - orm_mode = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + from_attributes=True, + json_schema_extra={ "examples": [ # DB model { @@ -228,15 +232,16 @@ class Config: "state": "NOT_STARTED", "progress": 0.44, "last_heartbeat": None, - "created": "2022-05-20 13:28:31.139+00", - "modified": "2023-06-23 15:58:32.833081+00", + "created": "2022-05-20 13:28:31.139", + "modified": "2023-06-23 15:58:32.833081", "pricing_info": { "pricing_plan_id": 1, "pricing_unit_id": 1, "pricing_unit_cost_id": 1, }, - "hardware_info": HardwareInfo.Config.schema_extra["examples"][0], + "hardware_info": next(iter(HardwareInfo.model_config["json_schema_extra"]["examples"])), # type: ignore } - for image_example in Image.Config.schema_extra["examples"] + for image_example in Image.model_config["json_schema_extra"]["examples"] # type: ignore ] - } + }, + ) diff --git a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py index 17408a202ec2..0a2322c11c1f 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py @@ -1,12 +1,11 @@ import json import logging -import re from collections.abc import Mapping from datetime import datetime from enum import Enum from functools import cached_property from pathlib import Path -from typing import Any, TypeAlias +from typing import Annotated, Any, TypeAlias from uuid import UUID import arrow @@ -31,11 +30,11 @@ from pydantic import ( AnyHttpUrl, BaseModel, - ConstrainedStr, - Extra, + ConfigDict, Field, - parse_obj_as, - validator, + StringConstraints, + TypeAdapter, + field_validator, ) from servicelib.exception_utils import DelayedExceptionHandler @@ -55,18 +54,17 @@ DockerStatus: TypeAlias = Status2 -class DockerId(ConstrainedStr): - max_length = 25 - regex = re.compile(r"[A-Za-z0-9]{25}") - +DockerId: TypeAlias = Annotated[ + str, StringConstraints(max_length=25, pattern=r"[A-Za-z0-9]{25}") +] ServiceId: TypeAlias = DockerId NetworkId: TypeAlias = DockerId -class ServiceName(ConstrainedStr): - strip_whitespace = True - min_length = 2 +ServiceName: TypeAlias = Annotated[ + str, StringConstraints(min_length=2, strip_whitespace=True) +] logger = logging.getLogger() @@ -129,8 +127,8 @@ class DockerContainerInspect(BaseModel): @cached_property def status(self) -> DockerStatus: - assert self.container_state.Status # nosec - result: DockerStatus = self.container_state.Status + assert self.container_state.status # nosec + result: DockerStatus = self.container_state.status return result @classmethod @@ -141,9 +139,7 @@ def from_container(cls, container: dict[str, Any]) -> "DockerContainerInspect": id=container["Id"], ) - class Config: - keep_untouched = (cached_property,) - allow_mutation = False + model_config = ConfigDict(ignored_types=(cached_property,), frozen=True) class ServiceRemovalState(BaseModel): @@ -202,7 +198,7 @@ class DynamicSidecar(BaseModel): is_ready: bool = Field( default=False, - scription=( + description=( "is True while the health check on the dynamic-sidecar is responding. " "Meaning that the dynamic-sidecar is reachable and can accept requests" ), @@ -224,7 +220,7 @@ def compose_spec_submitted(self) -> bool: containers_inspect: list[DockerContainerInspect] = Field( [], - scription="docker inspect results from all the container ran at regular intervals", + description="docker inspect results from all the container ran at regular intervals", ) was_dynamic_sidecar_started: bool = False @@ -279,7 +275,7 @@ def compose_spec_submitted(self) -> bool: ) instrumentation: ServicesInstrumentation = Field( - default_factory=lambda: ServicesInstrumentation.parse_obj({}), + default_factory=lambda: ServicesInstrumentation.model_validate({}), description="keeps track times for various operations", ) @@ -317,9 +313,7 @@ def compose_spec_submitted(self) -> bool: "this value will be set to None." ), ) - - class Config: - validate_assignment = True + model_config = ConfigDict(validate_assignment=True) class DynamicSidecarNamesHelper(BaseModel): @@ -337,25 +331,25 @@ class DynamicSidecarNamesHelper(BaseModel): service_name_dynamic_sidecar: str = Field( ..., - regex=REGEX_DY_SERVICE_SIDECAR, + pattern=REGEX_DY_SERVICE_SIDECAR, max_length=MAX_ALLOWED_SERVICE_NAME_LENGTH, description="unique name of the dynamic-sidecar service", ) proxy_service_name: str = Field( ..., - regex=REGEX_DY_SERVICE_PROXY, + pattern=REGEX_DY_SERVICE_PROXY, max_length=MAX_ALLOWED_SERVICE_NAME_LENGTH, description="name of the proxy for the dynamic-sidecar", ) simcore_traefik_zone: str = Field( ..., - regex=REGEX_DY_SERVICE_SIDECAR, + pattern=REGEX_DY_SERVICE_SIDECAR, description="unique name for the traefik constraints", ) dynamic_sidecar_network_name: str = Field( ..., - regex=REGEX_DY_SERVICE_SIDECAR, + pattern=REGEX_DY_SERVICE_SIDECAR, description="based on the node_id and project_id", ) @@ -392,15 +386,13 @@ class SchedulerData(CommonServiceDetails, DynamicSidecarServiceLabels): hostname: str = Field( ..., description="dy-sidecar's service hostname (provided by docker-swarm)" ) - port: PortInt = Field( - default=parse_obj_as(PortInt, 8000), description="dynamic-sidecar port" - ) + port: PortInt = Field(default=8000, description="dynamic-sidecar port") @property def endpoint(self) -> AnyHttpUrl: """endpoint where all the services are exposed""" - url: AnyHttpUrl = parse_obj_as( - AnyHttpUrl, f"http://{self.hostname}:{self.port}" # NOSONAR + url = AnyHttpUrl.build( # pylint: disable=no-member + scheme="http", host=self.hostname, port=self.port ) return url @@ -425,7 +417,7 @@ def endpoint(self) -> AnyHttpUrl: ) service_port: PortInt = Field( - default=parse_obj_as(PortInt, TEMPORARY_PORT_NUMBER), + default=TEMPORARY_PORT_NUMBER, description=( "port where the service is exposed defined by the service; " "NOTE: temporary default because it will be changed once the service " @@ -470,8 +462,7 @@ def endpoint(self) -> AnyHttpUrl: def get_proxy_endpoint(self) -> AnyHttpUrl: """get the endpoint where the proxy's admin API is exposed""" assert self.proxy_admin_api_port # nosec - url: AnyHttpUrl = parse_obj_as( - AnyHttpUrl, + url: AnyHttpUrl = TypeAdapter(AnyHttpUrl).validate_python( f"http://{self.proxy_service_name}:{self.proxy_admin_api_port}", # nosec # NOSONAR ) return url @@ -528,9 +519,9 @@ def from_http_request( } if run_id: obj_dict["run_id"] = run_id - return cls.parse_obj(obj_dict) + return cls.model_validate(obj_dict) - @validator("user_preferences_path", pre=True) + @field_validator("user_preferences_path", mode="before") @classmethod def strip_path_serialization_to_none(cls, v): if v == "None": @@ -542,15 +533,13 @@ def from_service_inspect( cls, service_inspect: Mapping[str, Any] ) -> "SchedulerData": labels = service_inspect["Spec"]["Labels"] - return cls.parse_raw(labels[DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL]) + return cls.model_validate_json(labels[DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL]) def as_label_data(self) -> str: # compose_spec needs to be json encoded before encoding it to json # and storing it in the label - return self.copy( + return self.model_copy( update={"compose_spec": json.dumps(self.compose_spec)}, deep=True - ).json() + ).model_dump_json() - class Config: - extra = Extra.allow - allow_population_by_field_name = True + model_config = ConfigDict(extra="allow", populate_by_name=True) diff --git a/services/director-v2/src/simcore_service_director_v2/models/pricing.py b/services/director-v2/src/simcore_service_director_v2/models/pricing.py index 4aabef7cd10b..43ade424954e 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/pricing.py +++ b/services/director-v2/src/simcore_service_director_v2/models/pricing.py @@ -1,12 +1,11 @@ from decimal import Decimal -from typing import Any, ClassVar from models_library.resource_tracker import ( PricingPlanId, PricingUnitCostId, PricingUnitId, ) -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict class PricingInfo(BaseModel): @@ -15,14 +14,15 @@ class PricingInfo(BaseModel): pricing_unit_cost_id: PricingUnitCostId pricing_unit_cost: Decimal - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "pricing_plan_id": 1, "pricing_unit_id": 1, "pricing_unit_cost_id": 1, - "pricing_unit_cost": Decimal(10), + "pricing_unit_cost": Decimal(10), # type: ignore[dict-item] } ] } + ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/catalog.py b/services/director-v2/src/simcore_service_director_v2/modules/catalog.py index f5e378afa43d..2a064d14642a 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/catalog.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/catalog.py @@ -8,7 +8,7 @@ from models_library.services import ServiceKey, ServiceVersion from models_library.services_resources import ServiceResourcesDict from models_library.users import UserID -from pydantic import parse_obj_as +from pydantic import TypeAdapter from settings_library.catalog import CatalogSettings from ..utils.client_decorators import handle_errors, handle_retry @@ -90,8 +90,8 @@ async def get_service_resources( ) resp.raise_for_status() if resp.status_code == status.HTTP_200_OK: - json_response: ServiceResourcesDict = parse_obj_as( - ServiceResourcesDict, resp.json() + json_response: ServiceResourcesDict = TypeAdapter(ServiceResourcesDict).validate_python( + resp.json() ) return json_response raise HTTPException(status_code=resp.status_code, detail=resp.content) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py index 51fb3b1a3fb0..abc976386087 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py @@ -324,7 +324,7 @@ async def _process_task_result( async def _task_progress_change_handler(self, event: str) -> None: with log_catch(_logger, reraise=False): - task_progress_event = TaskProgressEvent.parse_raw(event) + task_progress_event = TaskProgressEvent.model_validate_json(event) _logger.debug("received task progress update: %s", task_progress_event) user_id = task_progress_event.task_owner.user_id project_id = task_progress_event.task_owner.project_id @@ -355,7 +355,7 @@ async def _task_progress_change_handler(self, event: str) -> None: async def _task_log_change_handler(self, event: str) -> None: with log_catch(_logger, reraise=False): - task_log_event = TaskLogEvent.parse_raw(event) + task_log_event = TaskLogEvent.model_validate_json(event) _logger.debug("received task log update: %s", task_log_event) await publish_service_log( self.rabbitmq_client, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py b/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py index fcba4ad1fd36..e28e48f82f7b 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py @@ -48,7 +48,7 @@ from models_library.projects_nodes_io import NodeID from models_library.resource_tracker import HardwareInfo from models_library.users import UserID -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError from pydantic.networks import AnyUrl from servicelib.logging_utils import log_catch from settings_library.s3 import S3Settings @@ -583,5 +583,5 @@ def _get_worker_used_resources( assert dashboard_link # nosec return ClusterDetails( scheduler=Scheduler(status=scheduler_status, **scheduler_info), - dashboard_link=parse_obj_as(AnyUrl, dashboard_link), + dashboard_link=TypeAdapter(AnyUrl).validate_python(dashboard_link), ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/clusters.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/clusters.py index 214c03b9dca8..30381110173f 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/clusters.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/clusters.py @@ -108,9 +108,9 @@ async def _compute_user_access_rights( ) and (primary_grp_rights := cluster.access_rights.get(primary_group_row.gid)): return primary_grp_rights - solved_rights = CLUSTER_NO_RIGHTS.dict() + solved_rights = CLUSTER_NO_RIGHTS.model_dump() for group_row in filter(lambda ugrp: ugrp[1] != GroupType.PRIMARY, user_groups): - grp_access = cluster.access_rights.get(group_row.gid, CLUSTER_NO_RIGHTS).dict() + grp_access = cluster.access_rights.get(group_row.gid, CLUSTER_NO_RIGHTS).model_dump() for operation in ["read", "write", "delete"]: solved_rights[operation] |= grp_access[operation] return ClusterAccessRights(**solved_rights) @@ -250,14 +250,14 @@ async def update_cluster( # pylint: disable=too-many-branches if updated_cluster.access_rights: for grp, rights in resolved_access_rights.items(): insert_stmt = pg_insert(cluster_to_groups).values( - **rights.dict(by_alias=True), gid=grp, cluster_id=the_cluster.id + **rights.model_dump(by_alias=True), gid=grp, cluster_id=the_cluster.id ) on_update_stmt = insert_stmt.on_conflict_do_update( index_elements=[ cluster_to_groups.c.cluster_id, cluster_to_groups.c.gid, ], - set_=rights.dict(by_alias=True), + set_=rights.model_dump(by_alias=True), ) await conn.execute(on_update_stmt) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_pipelines.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_pipelines.py index 3c24694c2fd0..37129141f6d0 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_pipelines.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_pipelines.py @@ -26,7 +26,7 @@ async def get_pipeline(self, project_id: ProjectID) -> CompPipelineAtDB: row: RowProxy | None = await result.fetchone() if not row: raise PipelineNotFoundError(str(project_id)) - return CompPipelineAtDB.from_orm(row) + return CompPipelineAtDB.model_validate(row) async def upsert_pipeline( self, @@ -39,14 +39,14 @@ async def upsert_pipeline( dag_adjacency_list=nx.to_dict_of_lists(dag_graph), state=RunningState.PUBLISHED if publish else RunningState.NOT_STARTED, ) - insert_stmt = insert(comp_pipeline).values(**pipeline_at_db.dict(by_alias=True)) + insert_stmt = insert(comp_pipeline).values(**pipeline_at_db.model_dump(by_alias=True)) # FIXME: This is not a nice thing. this part of the information should be kept in comp_runs. update_exclusion_policy = set() if not dag_graph.nodes(): update_exclusion_policy.add("dag_adjacency_list") on_update_stmt = insert_stmt.on_conflict_do_update( index_elements=[comp_pipeline.c.project_id], - set_=pipeline_at_db.dict( + set_=pipeline_at_db.model_dump( by_alias=True, exclude_unset=True, exclude=update_exclusion_policy ), ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py index 4f9a8e42b53c..289a0063649b 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py @@ -51,7 +51,7 @@ async def get( row: RowProxy | None = await result.first() if not row: raise ComputationalRunNotFoundError - return CompRunsAtDB.from_orm(row) + return CompRunsAtDB.model_validate(row) async def list( self, filter_by_state: set[RunningState] | None = None @@ -70,7 +70,7 @@ async def list( ) ) ): - runs_in_db.append(CompRunsAtDB.from_orm(row)) + runs_in_db.append(CompRunsAtDB.model_validate(row)) return list(runs_in_db) async def create( @@ -114,7 +114,7 @@ async def create( .returning(literal_column("*")) ) row = await result.first() - return CompRunsAtDB.from_orm(row) + return CompRunsAtDB.model_validate(row) except ForeignKeyViolation as exc: raise ClusterNotFoundError(cluster_id=cluster_id) from exc @@ -133,7 +133,7 @@ async def update( .returning(literal_column("*")) ) row = await result.first() - return CompRunsAtDB.from_orm(row) if row else None + return CompRunsAtDB.model_validate(row) if row else None async def set_run_result( self, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_core.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_core.py index dabb45dfb0f0..aa72e996d7fc 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_core.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_core.py @@ -44,7 +44,7 @@ async def get_task(self, project_id: ProjectID, node_id: NodeID) -> CompTaskAtDB row = await result.fetchone() if not row: raise ComputationalTaskNotFoundError(node_id=node_id) - return CompTaskAtDB.from_orm(row) + return CompTaskAtDB.model_validate(row) async def list_tasks( self, @@ -55,7 +55,7 @@ async def list_tasks( async for row in conn.execute( sa.select(comp_tasks).where(comp_tasks.c.project_id == f"{project_id}") ): - task_db = CompTaskAtDB.from_orm(row) + task_db = CompTaskAtDB.model_validate(row) tasks.append(task_db) return tasks @@ -72,7 +72,7 @@ async def list_computational_tasks( & (comp_tasks.c.node_class == NodeClass.COMPUTATIONAL) ) ): - task_db = CompTaskAtDB.from_orm(row) + task_db = CompTaskAtDB.model_validate(row) tasks.append(task_db) return tasks @@ -166,7 +166,7 @@ async def upsert_tasks_from_project( result = await conn.execute(on_update_stmt) row = await result.fetchone() assert row # nosec - inserted_comp_tasks_db.append(CompTaskAtDB.from_orm(row)) + inserted_comp_tasks_db.append(CompTaskAtDB.model_validate(row)) _logger.debug( "inserted the following tasks in comp_tasks: %s", f"{inserted_comp_tasks_db=}", @@ -193,7 +193,7 @@ async def _update_task( ) row = await result.fetchone() assert row # nosec - return CompTaskAtDB.from_orm(row) + return CompTaskAtDB.model_validate(row) async def mark_project_published_waiting_for_cluster_tasks_as_aborted( self, project_id: ProjectID diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py index a33f689e9daf..bdb64cbbf993 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py @@ -36,7 +36,7 @@ ) from models_library.users import UserID from models_library.wallets import ZERO_CREDITS, WalletInfo -from pydantic import parse_obj_as +from pydantic import TypeAdapter from servicelib.rabbitmq import ( RabbitMQRPCClient, RemoteMethodNotRegisteredError, @@ -89,7 +89,7 @@ async def _get_service_details( node.version, product_name, ) - obj: ServiceMetaDataPublished = ServiceMetaDataPublished.construct( + obj: ServiceMetaDataPublished = ServiceMetaDataPublished.model_construct( **service_details ) return obj @@ -105,7 +105,7 @@ def _compute_node_requirements( node_defined_resources[resource_name] = node_defined_resources.get( resource_name, 0 ) + min(resource_value.limit, resource_value.reservation) - return NodeRequirements.parse_obj(node_defined_resources) + return NodeRequirements.model_validate(node_defined_resources) def _compute_node_boot_mode(node_resources: ServiceResourcesDict) -> BootMode: @@ -174,7 +174,9 @@ async def _generate_task_image( } project_nodes_repo = ProjectNodesRepo(project_uuid=project_id) project_node = await project_nodes_repo.get(connection, node_id=node_id) - node_resources = parse_obj_as(ServiceResourcesDict, project_node.required_resources) + node_resources = TypeAdapter(ServiceResourcesDict).validate_python( + project_node.required_resources + ) if not node_resources: node_resources = await catalog_client.get_service_resources( user_id, node.key, node.version @@ -187,7 +189,7 @@ async def _generate_task_image( data.update(envs=_compute_node_envs(node_labels)) if node_extras and node_extras.container_spec: data.update(command=node_extras.container_spec.command) - return Image.parse_obj(data) + return Image.model_validate(data) async def _get_pricing_and_hardware_infos( @@ -287,7 +289,9 @@ def _by_type_name(ec2: EC2InstanceTypeGet) -> bool: # less memory than the machine theoretical amount project_nodes_repo = ProjectNodesRepo(project_uuid=project_id) node = await project_nodes_repo.get(connection, node_id=node_id) - node_resources = parse_obj_as(ServiceResourcesDict, node.required_resources) + node_resources = TypeAdapter(ServiceResourcesDict).validate_python( + node.required_resources + ) if DEFAULT_SINGLE_SERVICE_NAME in node_resources: image_resources: ImageResources = node_resources[ DEFAULT_SINGLE_SERVICE_NAME @@ -322,7 +326,7 @@ def _by_type_name(ec2: EC2InstanceTypeGet) -> bool: except ( RemoteMethodNotRegisteredError, RPCServerError, - asyncio.TimeoutError, + TimeoutError, ) as exc: raise ClustersKeeperNotAvailableError from exc @@ -343,7 +347,7 @@ async def generate_tasks_list_from_project( list_comp_tasks = [] unique_service_key_versions: set[ServiceKeyVersion] = { - ServiceKeyVersion.construct( + ServiceKeyVersion.model_construct( key=node.key, version=node.version ) # the service key version is frozen for node in project.workbench.values() @@ -362,7 +366,7 @@ async def generate_tasks_list_from_project( for internal_id, node_id in enumerate(project.workbench, 1): node: Node = project.workbench[node_id] - node_key_version = ServiceKeyVersion.construct( + node_key_version = ServiceKeyVersion.model_construct( key=node.key, version=node.version ) node_details, node_extras, node_labels = key_version_to_node_infos.get( @@ -430,8 +434,8 @@ async def generate_tasks_list_from_project( task_db = CompTaskAtDB( project_id=project.uuid, node_id=NodeID(node_id), - schema=NodeSchema.parse_obj( - node_details.dict( + schema=NodeSchema.model_validate( + node_details.model_dump( exclude_unset=True, by_alias=True, include={"inputs", "outputs"} ) ), @@ -446,7 +450,7 @@ async def generate_tasks_list_from_project( last_heartbeat=None, created=arrow.utcnow().datetime, modified=arrow.utcnow().datetime, - pricing_info=pricing_info.dict(exclude={"pricing_unit_cost"}) + pricing_info=pricing_info.model_dump(exclude={"pricing_unit_cost"}) if pricing_info else None, hardware_info=hardware_info, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects.py index 856c0ec3650a..5f5fe5263fff 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects.py @@ -23,7 +23,7 @@ async def get_project(self, project_id: ProjectID) -> ProjectAtDB: ).first() if not row: raise ProjectNotFoundError(project_id) - return ProjectAtDB.from_orm(row) + return ProjectAtDB.model_validate(row) async def is_node_present_in_workbench( self, project_id: ProjectID, node_uuid: NodeID diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py index fe0337611283..59334aa0a060 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py @@ -23,17 +23,17 @@ async def get_projects_networks(self, project_id: ProjectID) -> ProjectsNetworks ).first() if not row: raise ProjectNetworkNotFoundError(project_id) - return ProjectsNetworks.from_orm(row) + return ProjectsNetworks.model_validate(row) async def upsert_projects_networks( self, project_id: ProjectID, networks_with_aliases: NetworksWithAliases ) -> None: - projects_networks_to_insert = ProjectsNetworks.parse_obj( + projects_networks_to_insert = ProjectsNetworks.model_validate( {"project_uuid": project_id, "networks_with_aliases": networks_with_aliases} ) async with self.db_engine.acquire() as conn: - row_data = json.loads(projects_networks_to_insert.json()) + row_data = json.loads(projects_networks_to_insert.model_dump_json()) insert_stmt = pg_insert(projects_networks).values(**row_data) upsert_snapshot = insert_stmt.on_conflict_do_update( constraint=projects_networks.primary_key, set_=row_data diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/user_preferences_frontend.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/user_preferences_frontend.py index 01d7fdcce611..0ce81c14bb63 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/user_preferences_frontend.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/user_preferences_frontend.py @@ -31,5 +31,5 @@ async def get_user_preference( return ( None if preference_payload is None - else preference_class.parse_obj(preference_payload) + else preference_class.model_validate(preference_payload) ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/users.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/users.py index 94f17b90295d..434e523965cc 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/users.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/users.py @@ -1,5 +1,5 @@ from models_library.users import UserID -from pydantic import EmailStr, parse_obj_as +from pydantic import EmailStr, TypeAdapter from simcore_postgres_database.models.users import UserRole from simcore_postgres_database.utils_users import UsersRepo @@ -10,7 +10,7 @@ class UsersRepository(BaseRepository): async def get_user_email(self, user_id: UserID) -> EmailStr: async with self.db_engine.acquire() as conn: email = await UsersRepo.get_email(conn, user_id) - return parse_obj_as(EmailStr, email) + return TypeAdapter(EmailStr).validate_python(email) async def get_user_role(self, user_id: UserID) -> UserRole: async with self.db_engine.acquire() as conn: diff --git a/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py b/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py index 0bc8c799dcb3..322e5281e463 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py @@ -83,7 +83,7 @@ async def get_service_extras( f"/service_extras/{urllib.parse.quote_plus(service_key)}/{service_version}", ) if resp.status_code == status.HTTP_200_OK: - return ServiceExtras.parse_obj(unenvelope_or_raise_error(resp)) + return ServiceExtras.model_validate(unenvelope_or_raise_error(resp)) raise HTTPException(status_code=resp.status_code, detail=resp.content) @log_decorator(logger=logger) @@ -94,7 +94,7 @@ async def get_running_service_details( "GET", f"running_interactive_services/{service_uuid}" ) if resp.status_code == status.HTTP_200_OK: - return RunningDynamicServiceDetails.parse_obj( + return RunningDynamicServiceDetails.model_validate( unenvelope_or_raise_error(resp) ) raise HTTPException(status_code=resp.status_code, detail=resp.content) @@ -109,7 +109,7 @@ async def get_service_labels( ) resp.raise_for_status() if resp.status_code == status.HTTP_200_OK: - return SimcoreServiceLabels.parse_obj(unenvelope_or_raise_error(resp)) + return SimcoreServiceLabels.model_validate(unenvelope_or_raise_error(resp)) raise HTTPException(status_code=resp.status_code, detail=resp.content) @log_decorator(logger=logger) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py index 7ce782c6366a..5945e07b8e3d 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py @@ -293,7 +293,7 @@ def _get_client(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> Client: return Client( app=self._app, async_client=self._async_client, - base_url=dynamic_sidecar_endpoint, + base_url=f"{dynamic_sidecar_endpoint}", ) async def _await_for_result( @@ -358,7 +358,7 @@ async def restore_service_state(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> i result: Any | None = await self._await_for_result( task_id, dynamic_sidecar_endpoint, - self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT, + self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT.total_seconds(), _debug_progress_callback, ) assert isinstance(result, int) # nosec @@ -392,7 +392,7 @@ async def save_service_state( result: Any | None = await self._await_for_result( task_id, dynamic_sidecar_endpoint, - self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT, + self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT.total_seconds(), progress_callback, ) assert isinstance(result, int) # nosec @@ -411,7 +411,7 @@ async def pull_service_input_ports( transferred_bytes = await self._await_for_result( task_id, dynamic_sidecar_endpoint, - self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT, + self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT.total_seconds(), _debug_progress_callback, ) return transferred_bytes or 0 @@ -429,7 +429,7 @@ async def pull_service_output_ports( result: Any | None = await self._await_for_result( task_id, dynamic_sidecar_endpoint, - self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT, + self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT.total_seconds(), _debug_progress_callback, ) assert isinstance(result, int) # nosec @@ -448,7 +448,7 @@ async def push_service_output_ports( await self._await_for_result( task_id, dynamic_sidecar_endpoint, - self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT, + self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT.total_seconds(), progress_callback, ) @@ -495,7 +495,9 @@ async def get_service_activity( dynamic_sidecar_endpoint ) decoded_response = response.json() - return ActivityInfo.parse_obj(decoded_response) if decoded_response else None + return ( + ActivityInfo.model_validate(decoded_response) if decoded_response else None + ) async def free_reserved_disk_space( self, dynamic_sidecar_endpoint: AnyHttpUrl diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py index 241f32fe70ea..21ef1bbe2795 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py @@ -35,7 +35,7 @@ def __init__(self, app: FastAPI): # timeouts self._health_request_timeout = Timeout(1.0, connect=1.0) self._save_restore_timeout = Timeout( - scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT, + scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT.total_seconds(), connect=scheduler_settings.DYNAMIC_SIDECAR_API_CONNECT_TIMEOUT, ) self._restart_containers_timeout = Timeout( @@ -63,13 +63,13 @@ def _get_url( no_api_version: bool = False, ) -> str: """formats and returns an url for the request""" - api_version = "" if no_api_version else f"/{self.API_VERSION}" + api_version = "" if no_api_version else f"{self.API_VERSION}/" return f"{dynamic_sidecar_endpoint}{api_version}{postfix}" async def _get_health_common( self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/health", no_api_version=True) + url = self._get_url(dynamic_sidecar_endpoint, "health", no_api_version=True) return await self.client.get(url, timeout=self._health_request_timeout) @retry_on_errors() @@ -88,7 +88,7 @@ async def get_health_no_retry( async def get_containers( self, dynamic_sidecar_endpoint: AnyHttpUrl, *, only_status: bool ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers") + url = self._get_url(dynamic_sidecar_endpoint, "containers") return await self.client.get(url, params={"only_status": only_status}) @retry_on_errors() @@ -100,7 +100,7 @@ async def patch_containers_ports_io( enable_outputs: bool, enable_inputs: bool, ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers/ports/io") + url = self._get_url(dynamic_sidecar_endpoint, "containers/ports/io") return await self.client.patch( url, json={"enable_outputs": enable_outputs, "enable_inputs": enable_inputs} ) @@ -110,7 +110,7 @@ async def patch_containers_ports_io( async def post_containers_ports_outputs_dirs( self, dynamic_sidecar_endpoint: AnyHttpUrl, *, outputs_labels: dict[str, Any] ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers/ports/outputs/dirs") + url = self._get_url(dynamic_sidecar_endpoint, "containers/ports/outputs/dirs") return await self.client.post(url, json={"outputs_labels": outputs_labels}) @retry_on_errors() @@ -125,7 +125,7 @@ async def get_containers_name( } ) url = self._get_url( - dynamic_sidecar_endpoint, f"/containers/name?filters={filters}" + dynamic_sidecar_endpoint, f"containers/name?filters={filters}" ) return await self.client.get(url=url) @@ -140,7 +140,7 @@ async def post_containers_networks_attach( network_aliases: list[str], ) -> Response: url = self._get_url( - dynamic_sidecar_endpoint, f"/containers/{container_id}/networks:attach" + dynamic_sidecar_endpoint, f"containers/{container_id}/networks:attach" ) return await self.client.post( url, @@ -158,7 +158,7 @@ async def post_containers_networks_detach( network_id: str, ) -> Response: url = self._get_url( - dynamic_sidecar_endpoint, f"/containers/{container_id}/networks:detach" + dynamic_sidecar_endpoint, f"containers/{container_id}/networks:detach" ) return await self.client.post( url, @@ -174,7 +174,7 @@ async def post_containers_compose_spec( *, compose_spec: str, ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers/compose-spec") + url = self._get_url(dynamic_sidecar_endpoint, "containers/compose-spec") return await self.client.post(url, json={"docker_compose_yaml": compose_spec}) @retry_on_errors() @@ -185,9 +185,9 @@ async def post_containers_tasks( *, metrics_params: CreateServiceMetricsAdditionalParams, ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers") + url = self._get_url(dynamic_sidecar_endpoint, "containers") return await self.client.post( - url, json={"metrics_params": metrics_params.dict()} + url, json={"metrics_params": metrics_params.model_dump()} ) @retry_on_errors() @@ -195,7 +195,7 @@ async def post_containers_tasks( async def post_containers_tasks_down( self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers:down") + url = self._get_url(dynamic_sidecar_endpoint, "containers:down") return await self.client.post(url) @retry_on_errors() @@ -203,7 +203,7 @@ async def post_containers_tasks_down( async def post_containers_tasks_state_restore( self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers/state:restore") + url = self._get_url(dynamic_sidecar_endpoint, "containers/state:restore") return await self.client.post(url) @retry_on_errors() @@ -211,7 +211,7 @@ async def post_containers_tasks_state_restore( async def post_containers_tasks_state_save( self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers/state:save") + url = self._get_url(dynamic_sidecar_endpoint, "containers/state:save") return await self.client.post(url) @retry_on_errors() @@ -219,7 +219,7 @@ async def post_containers_tasks_state_save( async def post_containers_images_pull( self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers/images:pull") + url = self._get_url(dynamic_sidecar_endpoint, "containers/images:pull") return await self.client.post(url) @retry_on_errors() @@ -230,7 +230,7 @@ async def post_containers_tasks_ports_inputs_pull( port_keys: list[str] | None = None, ) -> Response: port_keys = [] if port_keys is None else port_keys - url = self._get_url(dynamic_sidecar_endpoint, "/containers/ports/inputs:pull") + url = self._get_url(dynamic_sidecar_endpoint, "containers/ports/inputs:pull") return await self.client.post(url, json=port_keys) @retry_on_errors() @@ -241,7 +241,7 @@ async def post_containers_tasks_ports_outputs_pull( port_keys: list[str] | None = None, ) -> Response: port_keys = [] if port_keys is None else port_keys - url = self._get_url(dynamic_sidecar_endpoint, "/containers/ports/outputs:pull") + url = self._get_url(dynamic_sidecar_endpoint, "containers/ports/outputs:pull") return await self.client.post(url, json=port_keys) @retry_on_errors() @@ -249,7 +249,7 @@ async def post_containers_tasks_ports_outputs_pull( async def post_containers_tasks_ports_outputs_push( self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers/ports/outputs:push") + url = self._get_url(dynamic_sidecar_endpoint, "containers/ports/outputs:push") return await self.client.post(url) @retry_on_errors() @@ -257,7 +257,7 @@ async def post_containers_tasks_ports_outputs_push( async def post_containers_tasks_restart( self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers:restart") + url = self._get_url(dynamic_sidecar_endpoint, "containers:restart") return await self.client.post(url) @retry_on_errors() @@ -268,7 +268,7 @@ async def put_volumes( volume_category: VolumeCategory, volume_status: VolumeStatus, ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, f"/volumes/{volume_category}") + url = self._get_url(dynamic_sidecar_endpoint, f"volumes/{volume_category}") return await self.client.put(url, json={"status": volume_status}) @@ -277,7 +277,7 @@ async def put_volumes( async def proxy_config_load( self, proxy_endpoint: AnyHttpUrl, proxy_configuration: dict[str, Any] ) -> Response: - url = self._get_url(proxy_endpoint, "/load", no_api_version=True) + url = self._get_url(proxy_endpoint, "load", no_api_version=True) return await self.client.post(url, json=proxy_configuration) @retry_on_errors() @@ -286,7 +286,7 @@ async def get_containers_activity( self, dynamic_sidecar_endpoint: AnyHttpUrl, ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers/activity") + url = self._get_url(dynamic_sidecar_endpoint, "containers/activity") return await self.client.get(url) @retry_on_errors() @@ -295,5 +295,5 @@ async def post_disk_reserved_free( self, dynamic_sidecar_endpoint: AnyHttpUrl, ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/disk/reserved:free") + url = self._get_url(dynamic_sidecar_endpoint, "disk/reserved:free") return await self.client.post(url) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py index b9d38ca65025..98ba1ea2f405 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py @@ -239,7 +239,7 @@ def _update_container_labels( spec_service_key, default_limits ) - label_keys = StandardSimcoreDockerLabels.construct( + label_keys = StandardSimcoreDockerLabels.model_construct( user_id=user_id, project_id=project_id, node_id=node_id, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py index 5100d63bab09..78a1201a7148 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py @@ -304,7 +304,7 @@ def _merge_resources_in_settings( # merge all resources empty_resource_entry: SimcoreServiceSettingLabelEntry = ( - SimcoreServiceSettingLabelEntry.parse_obj( + SimcoreServiceSettingLabelEntry.model_validate( { "name": "Resources", "type": "Resources", @@ -399,14 +399,14 @@ def _format_env_var(env_var: str, destination_container: list[str]) -> str: def _get_boot_options( service_labels: SimcoreServiceLabels, ) -> dict[EnvVarKey, BootOption] | None: - as_dict = service_labels.dict() + as_dict = service_labels.model_dump() boot_options_encoded = as_dict.get("io.simcore.boot-options", None) if boot_options_encoded is None: return None boot_options = json.loads(boot_options_encoded)["boot-options"] log.debug("got boot_options=%s", boot_options) - return {k: BootOption.parse_obj(v) for k, v in boot_options.items()} + return {k: BootOption.model_validate(v) for k, v in boot_options.items()} def _assemble_env_vars_for_boot_options( @@ -423,7 +423,7 @@ def _assemble_env_vars_for_boot_options( env_vars.append(f"{env_var_name}={value}") return SimcoreServiceSettingsLabel( - __root__=[ + root=[ SimcoreServiceSettingLabelEntry( name="env", type="string", value=list(env_vars) ) @@ -511,7 +511,7 @@ async def merge_settings_before_use( ) settings = _patch_target_service_into_env_vars(settings) - return SimcoreServiceSettingsLabel.parse_obj(settings) + return SimcoreServiceSettingsLabel.model_validate(settings) __all__ = ["merge_settings_before_use", "update_service_params_from_settings"] diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py index 001e549bf577..f344c93422fd 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py @@ -3,6 +3,7 @@ from typing import Any, NamedTuple from common_library.json_serialization import json_dumps +from common_library.serialization import model_dump_with_secrets from models_library.aiodocker_api import AioDockerServiceSpec from models_library.basic_types import BootModeEnum, PortInt from models_library.callbacks_mapping import CallbacksMapping @@ -15,13 +16,10 @@ ) from models_library.resource_tracker import HardwareInfo from models_library.service_settings_labels import SimcoreServiceSettingsLabel -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from servicelib.rabbitmq import RabbitMQRPCClient from servicelib.rabbitmq.rpc_interfaces.efs_guardian import efs_manager from servicelib.utils import unused_port -from settings_library.aws_s3_cli import AwsS3CliSettings -from settings_library.docker_registry import RegistrySettings -from settings_library.utils_encoders import create_json_encoder_wo_secrets from ....constants import DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL from ....core.dynamic_services_settings.scheduler import ( @@ -101,8 +99,11 @@ def _get_environment_variables( app_settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR.DYNAMIC_SIDECAR_AWS_S3_CLI_SETTINGS and app_settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR.DYNAMIC_SIDECAR_AWS_S3_CLI_SETTINGS.AWS_S3_CLI_S3 ): - dy_sidecar_aws_s3_cli_settings = app_settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR.DYNAMIC_SIDECAR_AWS_S3_CLI_SETTINGS.json( - encoder=create_json_encoder_wo_secrets(AwsS3CliSettings), + dy_sidecar_aws_s3_cli_settings = json_dumps( + model_dump_with_secrets( + app_settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR.DYNAMIC_SIDECAR_AWS_S3_CLI_SETTINGS, + show_secrets=True, + ) ) state_exclude = set() @@ -133,7 +134,7 @@ def _get_environment_variables( "DY_SIDECAR_USER_SERVICES_HAVE_INTERNET_ACCESS": f"{allow_internet_access}", "DY_SIDECAR_SYSTEM_MONITOR_TELEMETRY_ENABLE": f"{telemetry_enabled}", "DY_SIDECAR_STATE_EXCLUDE": json_dumps(f"{x}" for x in state_exclude), - "DY_SIDECAR_CALLBACKS_MAPPING": callbacks_mapping.json(), + "DY_SIDECAR_CALLBACKS_MAPPING": callbacks_mapping.model_dump_json(), "DY_SIDECAR_STATE_PATHS": json_dumps( f"{x}" for x in scheduler_data.paths_mapping.state_paths ), @@ -157,14 +158,22 @@ def _get_environment_variables( "RABBIT_PORT": f"{rabbit_settings.RABBIT_PORT}", "RABBIT_USER": f"{rabbit_settings.RABBIT_USER}", "RABBIT_SECURE": f"{rabbit_settings.RABBIT_SECURE}", - "DY_DEPLOYMENT_REGISTRY_SETTINGS": app_settings.DIRECTOR_V2_DOCKER_REGISTRY.json( - encoder=create_json_encoder_wo_secrets(RegistrySettings), - exclude={"resolved_registry_url", "api_url"}, + "DY_DEPLOYMENT_REGISTRY_SETTINGS": ( + json_dumps( + model_dump_with_secrets( + app_settings.DIRECTOR_V2_DOCKER_REGISTRY, + show_secrets=True, + exclude={"resolved_registry_url", "api_url"}, + ) + ) ), "DY_DOCKER_HUB_REGISTRY_SETTINGS": ( - app_settings.DIRECTOR_V2_DOCKER_HUB_REGISTRY.json( - encoder=create_json_encoder_wo_secrets(RegistrySettings), - exclude={"resolved_registry_url", "api_url"}, + json_dumps( + model_dump_with_secrets( + app_settings.DIRECTOR_V2_DOCKER_HUB_REGISTRY, + show_secrets=True, + exclude={"resolved_registry_url", "api_url"}, + ) ) if app_settings.DIRECTOR_V2_DOCKER_HUB_REGISTRY else "null" @@ -190,7 +199,7 @@ def _get_environment_variables( "NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS": f"{app_settings.DIRECTOR_V2_NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS}", } if r_clone_settings.R_CLONE_S3.S3_ENDPOINT is not None: - envs["S3_ENDPOINT"] = r_clone_settings.R_CLONE_S3.S3_ENDPOINT + envs["S3_ENDPOINT"] = f"{r_clone_settings.R_CLONE_S3.S3_ENDPOINT}" return envs @@ -471,8 +480,7 @@ async def get_dynamic_sidecar_spec( # pylint:disable=too-many-arguments# noqa: if hardware_info and len(hardware_info.aws_ec2_instances) == 1: ec2_instance_type: str = hardware_info.aws_ec2_instances[0] placement_constraints.append( - parse_obj_as( - DockerPlacementConstraint, + TypeAdapter(DockerPlacementConstraint).validate_python( f"node.labels.{DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY}=={ec2_instance_type}", ) ) @@ -554,4 +562,4 @@ async def get_dynamic_sidecar_spec( # pylint:disable=too-many-arguments# noqa: create_service_params=create_service_params, ) - return AioDockerServiceSpec.parse_obj(create_service_params) + return AioDockerServiceSpec.model_validate(create_service_params) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_states.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_states.py index 5a503f8b8a8e..afd44dc0f598 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_states.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_states.py @@ -74,10 +74,10 @@ def extract_task_state(task_status: dict[str, str]) -> tuple[ServiceState, str]: def _extract_container_status( container_state: ContainerState, ) -> tuple[ServiceState, ServiceMessage]: - assert container_state.Status # nosec + assert container_state.status # nosec return ( - _CONTAINER_STATE_TO_SERVICE_STATE[container_state.Status], - container_state.Error if container_state.Error else "", + _CONTAINER_STATE_TO_SERVICE_STATE[container_state.status], + container_state.error if container_state.error else "", ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/errors.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/errors.py index ecb86e9a6aac..8b40a4e0f35c 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/errors.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/errors.py @@ -1,6 +1,6 @@ from aiodocker.exceptions import DockerError +from common_library.errors_classes import OsparcErrorMixin from models_library.projects_nodes_io import NodeID -from pydantic.errors import PydanticErrorMixin from ...core.errors import DirectorError @@ -39,6 +39,6 @@ class LegacyServiceIsNotSupportedError(DirectorError): """This API is not implemented by the director-v0""" -class UnexpectedContainerStatusError(PydanticErrorMixin, DynamicSidecarError): - code = "dynamic_sidecar.container_status" +class UnexpectedContainerStatusError(OsparcErrorMixin, DynamicSidecarError): + code = "dynamic_sidecar.container_status" # type: ignore msg_template = "Unexpected status from containers: {containers_with_error}" diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py index 6475e324950b..166d4562186c 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py @@ -242,19 +242,19 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None: scheduler_data.user_id, scheduler_data.key, scheduler_data.version ) ).get("sidecar", {}) or {} - user_specific_service_spec = AioDockerServiceSpec.parse_obj( + user_specific_service_spec = AioDockerServiceSpec.model_validate( user_specific_service_spec ) # NOTE: since user_specific_service_spec follows Docker Service Spec and not Aio # we do not use aliases when exporting dynamic_sidecar_service_spec_base - dynamic_sidecar_service_final_spec = AioDockerServiceSpec.parse_obj( + dynamic_sidecar_service_final_spec = AioDockerServiceSpec.model_validate( nested_update( jsonable_encoder(dynamic_sidecar_service_spec_base, exclude_unset=True), jsonable_encoder(user_specific_service_spec, exclude_unset=True), include=_DYNAMIC_SIDECAR_SERVICE_EXTENDABLE_SPECS, ) ) - rabbit_message = ProgressRabbitMessageNode.construct( + rabbit_message = ProgressRabbitMessageNode.model_construct( user_id=scheduler_data.user_id, project_id=scheduler_data.project_id, node_id=scheduler_data.node_uuid, @@ -272,7 +272,7 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None: ) ) - rabbit_message = ProgressRabbitMessageNode.construct( + rabbit_message = ProgressRabbitMessageNode.model_construct( user_id=scheduler_data.user_id, project_id=scheduler_data.project_id, node_id=scheduler_data.node_uuid, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py index f708c1cb22c4..f8416b4809bf 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py @@ -7,7 +7,7 @@ from models_library.service_settings_labels import SimcoreServiceLabels from models_library.services import ServiceKeyVersion, ServiceVersion from models_library.services_creation import CreateServiceMetricsAdditionalParams -from pydantic import parse_obj_as +from pydantic import TypeAdapter from servicelib.fastapi.long_running_tasks.client import TaskId from tenacity import RetryError from tenacity.asyncio import AsyncRetrying @@ -168,7 +168,7 @@ async def progress_create_containers( project_name=project_name, node_name=node_name, service_key=scheduler_data.key, - service_version=parse_obj_as(ServiceVersion, scheduler_data.version), + service_version=TypeAdapter(ServiceVersion).validate_python(scheduler_data.version), service_resources=scheduler_data.service_resources, service_additional_metadata={}, ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py index 3071cde1060f..e861ad9f30c2 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py @@ -535,7 +535,7 @@ async def _restore_service_state_with_metrics() -> None: ) ) service_outputs_labels = json.loads( - simcore_service_labels.dict().get("io.simcore.outputs", "{}") + simcore_service_labels.model_dump().get("io.simcore.outputs", "{}") ).get("outputs", {}) _logger.debug( "Creating dirs from service outputs labels: %s", @@ -563,7 +563,7 @@ async def get_allow_metrics_collection( bool, AllowMetricsCollectionFrontendUserPreference.get_default_value() ) - allow_metrics_collection = AllowMetricsCollectionFrontendUserPreference.parse_obj( - preference + allow_metrics_collection = ( + AllowMetricsCollectionFrontendUserPreference.model_validate(preference) ) return allow_metrics_collection.value diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler.py index b68467a572d8..04853661c477 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler.py @@ -40,7 +40,7 @@ from models_library.services_types import ServicePortKey from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import AnyHttpUrl, NonNegativeFloat +from pydantic import NonNegativeFloat from servicelib.background_task import ( cancel_task, start_periodic_task, @@ -455,7 +455,7 @@ async def retrieve_service_inputs( service_name = self._inverse_search_mapping[node_uuid] scheduler_data: SchedulerData = self._to_observe[service_name] - dynamic_sidecar_endpoint: AnyHttpUrl = scheduler_data.endpoint + dynamic_sidecar_endpoint = scheduler_data.endpoint sidecars_client: SidecarsClient = await get_sidecars_client(self.app, node_uuid) started = time.time() diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler_utils.py index b03356770845..5a4a011a8747 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler_utils.py @@ -76,7 +76,7 @@ def create_model_from_scheduler_data( service_state: ServiceState, service_message: str, ) -> RunningDynamicServiceDetails: - return RunningDynamicServiceDetails.parse_obj( + return RunningDynamicServiceDetails.model_validate( { "boot_type": ServiceBootType.V2, "user_id": scheduler_data.user_id, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/instrumentation/_models.py b/services/director-v2/src/simcore_service_director_v2/modules/instrumentation/_models.py index 2f1ac9b65485..85e56d52e631 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/instrumentation/_models.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/instrumentation/_models.py @@ -2,7 +2,7 @@ from typing import Final from prometheus_client import CollectorRegistry, Histogram -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from servicelib.instrumentation import MetricsBase, get_metrics_namespace from ..._meta import PROJECT_NAME @@ -31,7 +31,7 @@ _RATE_BPS_BUCKETS: Final[tuple[float, ...]] = tuple( - parse_obj_as(ByteSize, f"{m}MiB") + TypeAdapter(ByteSize).validate_python(f"{m}MiB") for m in ( 1, 30, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth_rpc.py b/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth_rpc.py index d86f09ec9c2b..c9edc8c0f1ce 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth_rpc.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth_rpc.py @@ -6,7 +6,7 @@ from models_library.products import ProductName from models_library.rabbitmq_basic_types import RPCMethodName from models_library.users import UserID -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq import get_rabbitmq_rpc_client @@ -26,10 +26,10 @@ async def get_or_create_api_key_and_secret( rpc_client = get_rabbitmq_rpc_client(app) result = await rpc_client.request( WEBSERVER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_or_create_api_keys"), + TypeAdapter(RPCMethodName).validate_python("get_or_create_api_keys"), product_name=product_name, user_id=user_id, name=name, expiration=expiration, ) - return ApiKeyGet.parse_obj(result) + return ApiKeyGet.model_validate(result) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/projects_networks.py b/services/director-v2/src/simcore_service_director_v2/modules/projects_networks.py index cba005a92ae2..e18dfc24121a 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/projects_networks.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/projects_networks.py @@ -17,7 +17,7 @@ from models_library.service_settings_labels import SimcoreServiceLabels from models_library.services import ServiceKeyVersion from models_library.users import UserID -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError from servicelib.rabbitmq import RabbitMQClient from servicelib.utils import logged_gather @@ -45,7 +45,7 @@ class _ToAdd(NamedTuple): def _network_name(project_id: ProjectID, user_defined: str) -> DockerNetworkName: network_name = f"{PROJECT_NETWORK_PREFIX}_{project_id}_{user_defined}" - return parse_obj_as(DockerNetworkName, network_name) + return TypeAdapter(DockerNetworkName).validate_python(network_name) async def requires_dynamic_sidecar( @@ -64,7 +64,7 @@ async def requires_dynamic_sidecar( simcore_service_labels: SimcoreServiceLabels = ( await director_v0_client.get_service_labels( - service=ServiceKeyVersion.parse_obj( + service=ServiceKeyVersion.model_validate( {"key": decoded_service_key, "version": service_version} ) ) @@ -184,10 +184,10 @@ async def _get_networks_with_aliases_for_default_network( be on the same network. Return an updated version of the projects_networks """ - new_networks_with_aliases: NetworksWithAliases = NetworksWithAliases.parse_obj({}) + new_networks_with_aliases: NetworksWithAliases = NetworksWithAliases.model_validate({}) default_network = _network_name(project_id, "default") - new_networks_with_aliases[default_network] = ContainerAliases.parse_obj({}) + new_networks_with_aliases[default_network] = ContainerAliases.model_validate({}) for node_uuid, node_content in new_workbench.items(): # only add dynamic-sidecar nodes @@ -200,7 +200,7 @@ async def _get_networks_with_aliases_for_default_network( # only add if network label is valid, otherwise it will be skipped try: - network_alias = parse_obj_as(DockerNetworkAlias, node_content.label) + network_alias = TypeAdapter(DockerNetworkAlias).validate_python(node_content.label) except ValidationError: message = LoggerRabbitMessage( user_id=user_id, @@ -248,7 +248,7 @@ async def update_from_workbench( ) ) except ProjectNetworkNotFoundError: - existing_projects_networks = ProjectsNetworks.parse_obj( + existing_projects_networks = ProjectsNetworks.model_validate( {"project_uuid": project_id, "networks_with_aliases": {}} ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/rabbitmq.py b/services/director-v2/src/simcore_service_director_v2/modules/rabbitmq.py index 2563a4133d75..dcda51ad0e51 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/rabbitmq.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/rabbitmq.py @@ -22,7 +22,7 @@ async def handler_out_of_credits(app: FastAPI, data: bytes) -> bool: - message = WalletCreditsLimitReachedMessage.parse_raw(data) + message = WalletCreditsLimitReachedMessage.model_validate_json(data) scheduler: "DynamicSidecarsScheduler" = app.state.dynamic_sidecar_scheduler # type: ignore[name-defined] # noqa: F821 settings: AppSettings = app.state.settings diff --git a/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py b/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py index 2c546ea3d842..f1b4280bdcc1 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py @@ -23,7 +23,6 @@ ) from models_library.services import ServiceKey, ServiceVersion from models_library.wallets import WalletID -from pydantic import parse_obj_as from ..core.errors import PricingPlanUnitNotFoundError from ..core.settings import AppSettings @@ -92,7 +91,7 @@ async def get_default_service_pricing_plan( raise PricingPlanUnitNotFoundError(msg) response.raise_for_status() - return parse_obj_as(PricingPlanGet, response.json()) + return PricingPlanGet.model_validate(response.json()) async def get_default_pricing_and_hardware_info( self, @@ -130,7 +129,7 @@ async def get_pricing_unit( }, ) response.raise_for_status() - return parse_obj_as(PricingUnitGet, response.json()) + return PricingUnitGet.model_validate(response.json()) async def get_wallet_credits( self, @@ -142,7 +141,7 @@ async def get_wallet_credits( params={"product_name": product_name, "wallet_id": wallet_id}, ) response.raise_for_status() - return parse_obj_as(WalletTotalCredits, response.json()) + return WalletTotalCredits.model_validate(response.json()) # # app diff --git a/services/director-v2/src/simcore_service_director_v2/modules/storage.py b/services/director-v2/src/simcore_service_director_v2/modules/storage.py index 98e188453337..b0cbb5e9629f 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/storage.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/storage.py @@ -72,5 +72,5 @@ async def get_s3_access(self, user_id: UserID) -> S3Settings: ) resp.raise_for_status() if resp.status_code == status.HTTP_200_OK: - return S3Settings.parse_obj(unenvelope_or_raise_error(resp)) + return S3Settings.model_validate(unenvelope_or_raise_error(resp)) raise HTTPException(status_code=resp.status_code, detail=resp.content) diff --git a/services/director-v2/src/simcore_service_director_v2/utils/clients.py b/services/director-v2/src/simcore_service_director_v2/utils/clients.py index d01d38a19071..e12cf2d09f0f 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/clients.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/clients.py @@ -1,5 +1,5 @@ import logging -from typing import Any, Union +from typing import Any import httpx from fastapi import HTTPException @@ -9,7 +9,7 @@ logger = logging.getLogger(__name__) -def unenvelope_or_raise_error(resp: httpx.Response) -> Union[list[Any], dict[str, Any]]: +def unenvelope_or_raise_error(resp: httpx.Response) -> list[Any] | dict[str, Any]: """ Director responses are enveloped If successful response, we un-envelop it and return data as a dict diff --git a/services/director-v2/src/simcore_service_director_v2/utils/comp_scheduler.py b/services/director-v2/src/simcore_service_director_v2/utils/comp_scheduler.py index e2310c4914af..15f3481da107 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/comp_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/comp_scheduler.py @@ -66,7 +66,7 @@ def create_service_resources_from_task(task: CompTaskAtDB) -> ServiceResourcesDi DockerGenericTag(f"{task.image.name}:{task.image.tag}"), { res_name: ResourceValue(limit=res_value, reservation=res_value) - for res_name, res_value in task.image.node_requirements.dict( + for res_name, res_value in task.image.node_requirements.model_dump( by_alias=True ).items() if res_value is not None diff --git a/services/director-v2/src/simcore_service_director_v2/utils/computations.py b/services/director-v2/src/simcore_service_director_v2/utils/computations.py index 0abbc18f5932..93e3197c71bc 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/computations.py @@ -1,12 +1,12 @@ -import datetime +import datetime as dt import logging from typing import Any +import arrow from models_library.projects_state import RunningState from models_library.services import ServiceKeyVersion from models_library.services_regex import SERVICE_KEY_RE from models_library.users import UserID -from pydantic import parse_obj_as from servicelib.utils import logged_gather from ..models.comp_tasks import CompTaskAtDB @@ -123,18 +123,18 @@ async def find_deprecated_tasks( ) ) service_key_version_to_details = { - ServiceKeyVersion.construct( + ServiceKeyVersion.model_construct( key=details["key"], version=details["version"] ): details for details in services_details } - today = datetime.datetime.now(tz=datetime.timezone.utc) + today = dt.datetime.now(tz=dt.UTC) def _is_service_deprecated(service: dict[str, Any]) -> bool: if deprecation_date := service.get("deprecated"): - deprecation_date = parse_obj_as( - datetime.datetime, deprecation_date - ).replace(tzinfo=datetime.timezone.utc) + deprecation_date = arrow.get(deprecation_date).datetime.replace( + tzinfo=dt.UTC + ) is_deprecated: bool = today > deprecation_date return is_deprecated return False diff --git a/services/director-v2/src/simcore_service_director_v2/utils/dask.py b/services/director-v2/src/simcore_service_director_v2/utils/dask.py index 452f1ba50a9a..36075eb3bf3c 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/dask.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/dask.py @@ -2,7 +2,7 @@ import collections import logging from collections.abc import Awaitable, Callable, Coroutine, Generator -from typing import Any, Final, NoReturn, Optional, ParamSpec, TypeVar, cast, get_args +from typing import Any, Final, NoReturn, ParamSpec, TypeVar, cast, get_args from uuid import uuid4 import dask_gateway # type: ignore[import-untyped] @@ -30,7 +30,7 @@ from models_library.projects_nodes_io import NodeID, NodeIDStr from models_library.services import ServiceKey, ServiceVersion from models_library.users import UserID -from pydantic import AnyUrl, ByteSize, ValidationError, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter, ValidationError from servicelib.logging_utils import log_catch, log_context from simcore_sdk import node_ports_v2 from simcore_sdk.node_ports_common.exceptions import ( @@ -61,7 +61,7 @@ ServiceKeyStr = str ServiceVersionStr = str -_PVType = Optional[_NPItemValue] +_PVType = _NPItemValue | None assert len(get_args(_PVType)) == len( # nosec get_args(PortValue) @@ -73,7 +73,7 @@ def _get_port_validation_errors(port_key: str, err: ValidationError) -> list[Err for error in errors: assert error["loc"][-1] != (port_key,) error["loc"] = error["loc"] + (port_key,) - return errors + return list(errors) def generate_dask_job_id( @@ -134,7 +134,7 @@ async def create_node_ports( db_manager=db_manager, ) except ValidationError as err: - raise PortsValidationError(project_id, node_id, err.errors()) from err + raise PortsValidationError(project_id, node_id, list(err.errors())) from err async def parse_output_data( @@ -229,7 +229,7 @@ async def compute_input_data( if ports_errors: raise PortsValidationError(project_id, node_id, ports_errors) - return TaskInputData.parse_obj(input_data) + return TaskInputData.model_validate(input_data) async def compute_output_data_schema( @@ -276,7 +276,7 @@ async def compute_output_data_schema( } ) - return TaskOutputDataSchema.parse_obj(output_data_schema) + return TaskOutputDataSchema.model_validate(output_data_schema) _LOGS_FILE_NAME = "logs.zip" @@ -314,7 +314,7 @@ def compute_task_labels( ValidationError """ product_name = run_metadata.get("product_name", UNDEFINED_DOCKER_LABEL) - standard_simcore_labels = StandardSimcoreDockerLabels.construct( + standard_simcore_labels = StandardSimcoreDockerLabels.model_construct( user_id=user_id, project_id=project_id, node_id=node_id, @@ -326,8 +326,7 @@ def compute_task_labels( memory_limit=node_requirements.ram, cpu_limit=node_requirements.cpu, ).to_simcore_runtime_docker_labels() - return standard_simcore_labels | parse_obj_as( - ContainerLabelsDict, + return standard_simcore_labels | TypeAdapter(ContainerLabelsDict).validate_python( { DockerLabelKey.from_key(k): f"{v}" for k, v in run_metadata.items() @@ -470,7 +469,7 @@ def from_node_reqs_to_dask_resources( node_reqs: NodeRequirements, ) -> dict[str, int | float]: """Dask resources are set such as {"CPU": X.X, "GPU": Y.Y, "RAM": INT}""" - dask_resources: dict[str, int | float] = node_reqs.dict( + dask_resources: dict[str, int | float] = node_reqs.model_dump( exclude_unset=True, by_alias=True, exclude_none=True, @@ -552,9 +551,9 @@ def _to_human_readable_resource_values(resources: dict[str, Any]) -> dict[str, A for res_name, res_value in resources.items(): if "RAM" in res_name: try: - human_readable_resources[res_name] = parse_obj_as( - ByteSize, res_value - ).human_readable() + human_readable_resources[res_name] = ( + TypeAdapter(ByteSize).validate_python(res_value).human_readable() + ) except ValidationError: _logger.warning( "could not parse %s:%s, please check what changed in how Dask prepares resources!", diff --git a/services/director-v2/src/simcore_service_director_v2/utils/dask_client_utils.py b/services/director-v2/src/simcore_service_director_v2/utils/dask_client_utils.py index 2deb203780b4..15e6e98dfce2 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/dask_client_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/dask_client_utils.py @@ -216,7 +216,7 @@ async def test_scheduler_endpoint( try: if _is_dask_scheduler(authentication): async with distributed.Client( - address=endpoint, timeout=f"{_PING_TIMEOUT_S}", asynchronous=True + address=f"{endpoint}", timeout=f"{_PING_TIMEOUT_S}", asynchronous=True ) as dask_client: if dask_client.status != _DASK_SCHEDULER_RUNNING_STATE: msg = "internal scheduler is not running!" diff --git a/services/director-v2/src/simcore_service_director_v2/utils/db.py b/services/director-v2/src/simcore_service_director_v2/utils/db.py index b4240a1289b2..af944c11dff3 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/db.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/db.py @@ -1,11 +1,10 @@ -import json import logging from typing import Any +from common_library.serialization import model_dump_with_secrets from fastapi import FastAPI from models_library.clusters import BaseCluster from models_library.projects_state import RunningState -from settings_library.utils_encoders import create_json_encoder_wo_secrets from simcore_postgres_database.models.comp_pipeline import StateType from ..api.dependencies.database import RepoType, get_base_repository @@ -30,14 +29,13 @@ def to_clusters_db(cluster: BaseCluster, *, only_update: bool) -> dict[str, Any]: - db_model: dict[str, Any] = json.loads( - cluster.json( - by_alias=True, - exclude={"id", "access_rights"}, - exclude_unset=only_update, - exclude_none=only_update, - encoder=create_json_encoder_wo_secrets(BaseCluster), - ) + db_model: dict[str, Any] = model_dump_with_secrets( + cluster, + show_secrets=True, + by_alias=True, + exclude={"id", "access_rights"}, + exclude_unset=only_update, + exclude_none=only_update, ) return db_model diff --git a/services/director-v2/src/simcore_service_director_v2/utils/osparc_variables.py b/services/director-v2/src/simcore_service_director_v2/utils/osparc_variables.py index 0dfef24cfef7..6704c8369efa 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/osparc_variables.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/osparc_variables.py @@ -4,7 +4,7 @@ from typing import Any, Final, NamedTuple, TypeAlias from models_library.utils.specs_substitution import SubstitutionValue -from pydantic import NonNegativeInt, parse_obj_as +from pydantic import NonNegativeInt, TypeAdapter from servicelib.utils import logged_gather ContextDict: TypeAlias = dict[str, Any] @@ -89,7 +89,7 @@ def copy( return {k: self._variables_getters[k] for k in selection} -_HANDLERS_TIMEOUT: Final[NonNegativeInt] = parse_obj_as(NonNegativeInt, 4) +_HANDLERS_TIMEOUT: Final[NonNegativeInt] = 4 async def resolve_variables_from_context( diff --git a/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py b/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py index 57d014a3c0f7..70249d3c1da4 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py @@ -31,7 +31,7 @@ async def publish_service_started_metrics( simcore_user_agent: str, task: CompTaskAtDB, ) -> None: - message = InstrumentationRabbitMessage.construct( + message = InstrumentationRabbitMessage.model_construct( metrics="service_started", user_id=user_id, project_id=task.project_id, @@ -53,7 +53,7 @@ async def publish_service_stopped_metrics( task: CompTaskAtDB, task_final_state: RunningState, ) -> None: - message = InstrumentationRabbitMessage.construct( + message = InstrumentationRabbitMessage.model_construct( metrics="service_stopped", user_id=user_id, project_id=task.project_id, @@ -153,7 +153,7 @@ async def publish_service_log( log: str, log_level: LogLevelInt, ) -> None: - message = LoggerRabbitMessage.construct( + message = LoggerRabbitMessage.model_construct( user_id=user_id, project_id=project_id, node_id=node_id, @@ -172,7 +172,7 @@ async def publish_service_progress( node_id: NodeID, progress: NonNegativeFloat, ) -> None: - message = ProgressRabbitMessageNode.construct( + message = ProgressRabbitMessageNode.model_construct( user_id=user_id, project_id=project_id, node_id=node_id, @@ -188,7 +188,7 @@ async def publish_project_log( log: str, log_level: LogLevelInt, ) -> None: - message = LoggerRabbitMessage.construct( + message = LoggerRabbitMessage.model_construct( user_id=user_id, project_id=project_id, node_id=None, diff --git a/services/director-v2/tests/conftest.py b/services/director-v2/tests/conftest.py index db64158d6d57..d04ff9134aab 100644 --- a/services/director-v2/tests/conftest.py +++ b/services/director-v2/tests/conftest.py @@ -198,7 +198,7 @@ def mock_env( async def client(mock_env: EnvVarsDict) -> AsyncIterator[TestClient]: settings = AppSettings.create_from_envs() app = init_app(settings) - print("Application settings\n", settings.json(indent=2)) + print("Application settings\n", settings.model_dump_json(indent=2)) # NOTE: this way we ensure the events are run in the application # since it starts the app on a test server with TestClient(app, raise_server_exceptions=True) as test_client: @@ -209,7 +209,7 @@ async def client(mock_env: EnvVarsDict) -> AsyncIterator[TestClient]: async def initialized_app(mock_env: EnvVarsDict) -> AsyncIterable[FastAPI]: settings = AppSettings.create_from_envs() app = init_app(settings) - print("Application settings\n", settings.json(indent=2)) + print("Application settings\n", settings.model_dump_json(indent=2)) async with LifespanManager(app): yield app @@ -239,7 +239,7 @@ def fake_workbench(fake_workbench_file: Path) -> NodesDict: workbench_dict = json.loads(fake_workbench_file.read_text()) workbench = {} for node_id, node_data in workbench_dict.items(): - workbench[node_id] = Node.parse_obj(node_data) + workbench[node_id] = Node.model_validate(node_data) return workbench @@ -336,7 +336,9 @@ async def wrapper(*args, **kwargs): @pytest.fixture def mock_osparc_variables_api_auth_rpc(mocker: MockerFixture) -> None: - fake_data = ApiKeyGet.parse_obj(ApiKeyGet.Config.schema_extra["examples"][0]) + fake_data = ApiKeyGet.model_validate( + ApiKeyGet.model_config["json_schema_extra"]["examples"][0] + ) async def _create( app: FastAPI, diff --git a/services/director-v2/tests/helpers/shared_comp_utils.py b/services/director-v2/tests/helpers/shared_comp_utils.py index ad7185e8fa72..8ee507f4a2bf 100644 --- a/services/director-v2/tests/helpers/shared_comp_utils.py +++ b/services/director-v2/tests/helpers/shared_comp_utils.py @@ -43,8 +43,8 @@ async def assert_computation_task_out_obj( assert task_out.iteration == iteration assert task_out.cluster_id == cluster_id # check pipeline details contents - received_task_out_pipeline = task_out.pipeline_details.dict() - expected_task_out_pipeline = exp_pipeline_details.dict() + received_task_out_pipeline = task_out.pipeline_details.model_dump() + expected_task_out_pipeline = exp_pipeline_details.model_dump() assert received_task_out_pipeline == expected_task_out_pipeline @@ -64,11 +64,11 @@ async def assert_and_wait_for_pipeline_status( MAX_TIMEOUT_S = 5 * MINUTE async def check_pipeline_state() -> ComputationGet: - response = await client.get(url, params={"user_id": user_id}) + response = await client.get(f"{url}", params={"user_id": user_id}) assert ( response.status_code == status.HTTP_200_OK ), f"response code is {response.status_code}, error: {response.text}" - task_out = ComputationGet.parse_obj(response.json()) + task_out = ComputationGet.model_validate(response.json()) assert task_out.id == project_uuid assert task_out.url.path == f"/v2/computations/{project_uuid}" print( @@ -100,4 +100,5 @@ async def check_pipeline_state() -> ComputationGet: return task_out # this is only to satisfy pylance - raise AssertionError("No computation task generated!") + msg = "No computation task generated!" + raise AssertionError(msg) diff --git a/services/director-v2/tests/integration/01/test_computation_api.py b/services/director-v2/tests/integration/01/test_computation_api.py index 16a6311da1b4..47f130a98beb 100644 --- a/services/director-v2/tests/integration/01/test_computation_api.py +++ b/services/director-v2/tests/integration/01/test_computation_api.py @@ -67,7 +67,7 @@ def mock_env( "COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED": "1", "COMPUTATIONAL_BACKEND_ENABLED": "1", "COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_URL": dask_scheduler_service, - "COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH": dask_scheduler_auth.json(), + "COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH": dask_scheduler_auth.model_dump_json(), "DYNAMIC_SIDECAR_IMAGE": dynamic_sidecar_docker_image_name, "SIMCORE_SERVICES_NETWORK_NAME": "test_swarm_network_name", "SWARM_STACK_NAME": "test_mocked_stack_name", @@ -109,7 +109,7 @@ def fake_workbench_computational_pipeline_details( ) -> PipelineDetails: adjacency_list = json.loads(fake_workbench_computational_adjacency_file.read_text()) node_states = json.loads(fake_workbench_node_states_file.read_text()) - return PipelineDetails.parse_obj( + return PipelineDetails.model_validate( {"adjacency_list": adjacency_list, "node_states": node_states, "progress": 0} ) @@ -720,7 +720,7 @@ async def test_abort_computation( assert ( response.status_code == status.HTTP_202_ACCEPTED ), f"response code is {response.status_code}, error: {response.text}" - task_out = ComputationGet.parse_obj(response.json()) + task_out = ComputationGet.model_validate(response.json()) assert task_out.url.path == f"/v2/computations/{sleepers_project.uuid}:stop" assert task_out.stop_url is None diff --git a/services/director-v2/tests/integration/02/conftest.py b/services/director-v2/tests/integration/02/conftest.py index 0d0df8a402f1..1cc8e4fd64e1 100644 --- a/services/director-v2/tests/integration/02/conftest.py +++ b/services/director-v2/tests/integration/02/conftest.py @@ -5,6 +5,7 @@ from uuid import uuid4 import aiodocker +from pydantic import TypeAdapter import pytest from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( PricingPlanGet, @@ -14,7 +15,6 @@ ServiceResourcesDict, ServiceResourcesDictHelpers, ) -from pydantic import parse_obj_as from pytest_mock.plugin import MockerFixture @@ -64,7 +64,7 @@ def mock_projects_networks_repository(mocker: MockerFixture) -> None: "simcore_service_director_v2.modules.db.repositories." "projects_networks.ProjectsNetworksRepository.get_projects_networks" ), - return_value=ProjectsNetworks.parse_obj( + return_value=ProjectsNetworks.model_validate( {"project_uuid": uuid4(), "networks_with_aliases": {}} ), ) @@ -72,17 +72,16 @@ def mock_projects_networks_repository(mocker: MockerFixture) -> None: @pytest.fixture def service_resources() -> ServiceResourcesDict: - return parse_obj_as( - ServiceResourcesDict, - ServiceResourcesDictHelpers.Config.schema_extra["examples"][0], + return TypeAdapter(ServiceResourcesDict).validate_python( + ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"][0], ) @pytest.fixture def mock_resource_usage_tracker(mocker: MockerFixture) -> None: base_module = "simcore_service_director_v2.modules.resource_usage_tracker_client" - service_pricing_plan = PricingPlanGet.parse_obj( - PricingPlanGet.Config.schema_extra["examples"][1] + service_pricing_plan = PricingPlanGet.model_validate( + PricingPlanGet.model_config["json_schema_extra"]["examples"][1] ) for unit in service_pricing_plan.pricing_units: unit.specific_info.aws_ec2_instances.clear() diff --git a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py index 720e7d0c3e14..a160afd664f9 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py +++ b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py @@ -47,7 +47,7 @@ from models_library.projects_pipeline import PipelineDetails from models_library.projects_state import RunningState from models_library.users import UserID -from pydantic import AnyHttpUrl, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.host import get_localhost_ip from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict @@ -403,7 +403,7 @@ def mock_env( "COMPUTATIONAL_BACKEND_ENABLED": "true", "COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED": "true", "COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_URL": dask_scheduler_service, - "COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH": dask_scheduler_auth.json(), + "COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH": dask_scheduler_auth.model_dump_json(), "DIRECTOR_V2_PROMETHEUS_INSTRUMENTATION_ENABLED": "1", }, ) @@ -446,13 +446,13 @@ async def projects_networks_db( # NOTE: director-v2 does not have access to the webserver which creates this # injecting all dynamic-sidecar started services on a default networks - container_aliases: ContainerAliases = ContainerAliases.parse_obj({}) + container_aliases: ContainerAliases = ContainerAliases.model_validate({}) for k, (node_uuid, node) in enumerate(current_study.workbench.items()): if not is_legacy(node): container_aliases[node_uuid] = f"networkable_alias_{k}" - networks_with_aliases: NetworksWithAliases = NetworksWithAliases.parse_obj({}) + networks_with_aliases: NetworksWithAliases = NetworksWithAliases.model_validate({}) default_network_name = f"{PROJECT_NETWORK_PREFIX}_{current_study.uuid}_test" networks_with_aliases[default_network_name] = container_aliases @@ -463,7 +463,7 @@ async def projects_networks_db( engine: Engine = initialized_app.state.engine async with engine.acquire() as conn: - row_data = projects_networks_to_insert.dict() + row_data = projects_networks_to_insert.model_dump() insert_stmt = pg_insert(projects_networks).values(**row_data) upsert_snapshot = insert_stmt.on_conflict_do_update( constraint=projects_networks.primary_key, set_=row_data @@ -841,7 +841,9 @@ async def _debug_progress_callback( Client( app=initialized_app, async_client=director_v2_client, - base_url=parse_obj_as(AnyHttpUrl, f"{director_v2_client.base_url}"), + base_url=TypeAdapter(AnyHttpUrl).validate_python( + f"{director_v2_client.base_url}" + ), ), task_id, task_timeout=60, @@ -972,7 +974,7 @@ async def test_nodeports_integration( task_out, project=current_study, exp_task_state=RunningState.SUCCESS, - exp_pipeline_details=PipelineDetails.parse_obj(fake_dy_success), + exp_pipeline_details=PipelineDetails.model_validate(fake_dy_success), iteration=1, cluster_id=DEFAULT_CLUSTER_ID, ) diff --git a/services/director-v2/tests/integration/02/utils.py b/services/director-v2/tests/integration/02/utils.py index 0c5f10c07bd4..02f6358e426c 100644 --- a/services/director-v2/tests/integration/02/utils.py +++ b/services/director-v2/tests/integration/02/utils.py @@ -20,7 +20,7 @@ ServiceResourcesDictHelpers, ) from models_library.users import UserID -from pydantic import PositiveInt, parse_obj_as +from pydantic import PositiveInt, TypeAdapter from pytest_simcore.helpers.host import get_localhost_ip from servicelib.common_headers import ( X_DYNAMIC_SIDECAR_REQUEST_DNS, @@ -263,7 +263,9 @@ async def patch_dynamic_service_url(app: FastAPI, node_uuid: str) -> str: proxy_service_name, target_port=dynamic_sidecar_proxy_settings.DYNAMIC_SIDECAR_CADDY_ADMIN_API_PORT, ) - assert proxy_published_port is not None, f"{sidecar_settings.json()=}" + assert ( + proxy_published_port is not None + ), f"{sidecar_settings.model_dump_json()=}" async with scheduler.scheduler._lock: # noqa: SLF001 localhost_ip = get_localhost_ip() @@ -303,7 +305,7 @@ async def _get_service_resources( url = f"{catalog_url}/v0/services/{encoded_key}/{service_version}/resources" async with httpx.AsyncClient() as client: response = await client.get(f"{url}") - return parse_obj_as(ServiceResourcesDict, response.json()) + return TypeAdapter(ServiceResourcesDict).validate_python(response.json()) async def _handle_redirection( @@ -458,7 +460,7 @@ async def assert_retrieve_service( size_bytes = json_result["data"]["size_bytes"] assert size_bytes > 0 - assert type(size_bytes) == int + assert isinstance(size_bytes, int) async def assert_stop_service( diff --git a/services/director-v2/tests/integration/conftest.py b/services/director-v2/tests/integration/conftest.py index 0e6f86320942..424cb1282577 100644 --- a/services/director-v2/tests/integration/conftest.py +++ b/services/director-v2/tests/integration/conftest.py @@ -98,7 +98,7 @@ async def _creator( response.raise_for_status() assert response.status_code == status.HTTP_201_CREATED - computation_task = ComputationGet.parse_obj(response.json()) + computation_task = ComputationGet.model_validate(response.json()) created_comp_tasks.append((user_id, computation_task)) return computation_task diff --git a/services/director-v2/tests/mocks/fake_task.json b/services/director-v2/tests/mocks/fake_task.json index b26ebfa9ba59..00a9dfe3501b 100644 --- a/services/director-v2/tests/mocks/fake_task.json +++ b/services/director-v2/tests/mocks/fake_task.json @@ -60,6 +60,7 @@ "end": "2008-03-24T07:02:09.279Z", "created": "1961-07-06T11:24:30.877Z", "modified": "2008-03-24T07:02:09.279Z", + "pricing_info": null, "last_heartbeat": null, "hardware_info": { "aws_ec2_instances": [] diff --git a/services/director-v2/tests/unit/conftest.py b/services/director-v2/tests/unit/conftest.py index ecd7da595445..2123a506ad7b 100644 --- a/services/director-v2/tests/unit/conftest.py +++ b/services/director-v2/tests/unit/conftest.py @@ -27,7 +27,8 @@ from models_library.service_settings_labels import SimcoreServiceLabels from models_library.services import RunID, ServiceKey, ServiceKeyVersion, ServiceVersion from models_library.services_enums import ServiceState -from pydantic import parse_obj_as +from models_library.utils._original_fastapi_encoders import jsonable_encoder +from pydantic import TypeAdapter from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from settings_library.s3 import S3Settings @@ -52,17 +53,17 @@ def simcore_services_network_name() -> str: @pytest.fixture def simcore_service_labels() -> SimcoreServiceLabels: - simcore_service_labels = SimcoreServiceLabels.parse_obj( - SimcoreServiceLabels.Config.schema_extra["examples"][1] + simcore_service_labels = SimcoreServiceLabels.model_validate( + SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][1] ) - simcore_service_labels.callbacks_mapping = parse_obj_as(CallbacksMapping, {}) + simcore_service_labels.callbacks_mapping = CallbacksMapping.model_validate({}) return simcore_service_labels @pytest.fixture def dynamic_service_create() -> DynamicServiceCreate: - return DynamicServiceCreate.parse_obj( - DynamicServiceCreate.Config.schema_extra["example"] + return DynamicServiceCreate.model_validate( + DynamicServiceCreate.model_config["json_schema_extra"]["example"] ) @@ -123,7 +124,7 @@ def scheduler_data_from_http_request( def mock_service_inspect( scheduler_data_from_http_request: ServiceDetails, ) -> Mapping[str, Any]: - service_details = json.loads(scheduler_data_from_http_request.json()) + service_details = json.loads(scheduler_data_from_http_request.model_dump_json()) service_details["compose_spec"] = json.dumps(service_details["compose_spec"]) return { "Spec": { @@ -200,7 +201,7 @@ def mocked_storage_service_api( respx_mock.post( "/simcore-s3:access", name="get_or_create_temporary_s3_access", - ).respond(json={"data": fake_s3_settings.dict(by_alias=True)}) + ).respond(json=jsonable_encoder({"data": fake_s3_settings}, by_alias=True)) yield respx_mock @@ -211,8 +212,10 @@ def mocked_storage_service_api( @pytest.fixture def mock_service_key_version() -> ServiceKeyVersion: return ServiceKeyVersion( - key=parse_obj_as(ServiceKey, "simcore/services/dynamic/myservice"), - version=parse_obj_as(ServiceVersion, "1.4.5"), + key=TypeAdapter(ServiceKey).validate_python( + "simcore/services/dynamic/myservice" + ), + version=TypeAdapter(ServiceVersion).validate_python("1.4.5"), ) @@ -221,7 +224,7 @@ def fake_service_specifications(faker: Faker) -> dict[str, Any]: # the service specifications follow the Docker service creation available # https://docs.docker.com/engine/api/v1.41/#operation/ServiceCreate return { - "sidecar": DockerServiceSpec.parse_obj( + "sidecar": DockerServiceSpec.model_validate( { "Labels": {"label_one": faker.pystr(), "label_two": faker.pystr()}, "TaskTemplate": { diff --git a/services/director-v2/tests/unit/test_core_settings.py b/services/director-v2/tests/unit/test_core_settings.py index 84d99057f3a3..2151d64cfa51 100644 --- a/services/director-v2/tests/unit/test_core_settings.py +++ b/services/director-v2/tests/unit/test_core_settings.py @@ -5,9 +5,10 @@ from typing import Any import pytest -from models_library.basic_types import LogLevel +from models_library.basic_types import BootModeEnum, LogLevel from pydantic import ValidationError from pytest_simcore.helpers.typing_env import EnvVarsDict +from settings_library.base import DefaultFromEnvFactoryError from settings_library.r_clone import S3Provider from simcore_service_director_v2.core.dynamic_services_settings.egress_proxy import ( EnvoyLogLevel, @@ -17,7 +18,7 @@ PlacementSettings, RCloneSettings, ) -from simcore_service_director_v2.core.settings import AppSettings, BootModeEnum +from simcore_service_director_v2.core.settings import AppSettings def _get_backend_type_options() -> set[str]: @@ -43,7 +44,7 @@ def test_enforce_r_clone_requirement(monkeypatch: pytest.MonkeyPatch) -> None: def test_settings_with_project_env_devel(project_env_devel_environment: dict[str, Any]): # loads from environ settings = AppSettings.create_from_envs() - print("captured settings: \n", settings.json(indent=2)) + print("captured settings: \n", settings.model_dump_json(indent=2)) assert settings.SC_BOOT_MODE == BootModeEnum.DEBUG assert settings.LOG_LEVEL == LogLevel.DEBUG @@ -60,7 +61,7 @@ def test_settings_with_repository_env_devel( ) # defined in docker-compose settings = AppSettings.create_from_envs() - print("captured settings: \n", settings.json(indent=2)) + print("captured settings: \n", settings.model_dump_json(indent=2)) assert settings @@ -185,7 +186,7 @@ def test_services_custom_constraint_failures( monkeypatch: pytest.MonkeyPatch, ) -> None: monkeypatch.setenv("DIRECTOR_V2_SERVICES_CUSTOM_CONSTRAINTS", custom_constraints) - with pytest.raises(Exception): + with pytest.raises(DefaultFromEnvFactoryError): AppSettings.create_from_envs() diff --git a/services/director-v2/tests/unit/test_models_clusters.py b/services/director-v2/tests/unit/test_models_clusters.py index 0a5d29283bb7..b08a988fc680 100644 --- a/services/director-v2/tests/unit/test_models_clusters.py +++ b/services/director-v2/tests/unit/test_models_clusters.py @@ -13,7 +13,7 @@ WorkerMetrics, ) from models_library.clusters import ClusterTypeInModel -from pydantic import BaseModel, parse_obj_as +from pydantic import BaseModel, ByteSize, TypeAdapter from simcore_postgres_database.models.clusters import ClusterType @@ -61,13 +61,12 @@ def test_scheduler_constructor_with_no_workers_has_correct_dict(faker: Faker): def test_worker_constructor_corrects_negative_used_resources(faker: Faker): worker = Worker( - id=faker.pyint(min_value=1), + id=f"{faker.pyint(min_value=1)}", name=faker.name(), - resources=parse_obj_as(AvailableResources, {}), - used_resources=parse_obj_as(UsedResources, {"CPU": -0.0000234}), - memory_limit=faker.pyint(min_value=1), - metrics=parse_obj_as( - WorkerMetrics, + resources=TypeAdapter(AvailableResources).validate_python({}), + used_resources=TypeAdapter(UsedResources).validate_python({"CPU": -0.0000234}), + memory_limit=ByteSize(faker.pyint(min_value=1)), + metrics=WorkerMetrics.model_validate( { "cpu": faker.pyfloat(min_value=0), "memory": faker.pyint(min_value=0), diff --git a/services/director-v2/tests/unit/test_models_dynamic_services.py b/services/director-v2/tests/unit/test_models_dynamic_services.py index dd0df8a0eed6..99a22ece3bb7 100644 --- a/services/director-v2/tests/unit/test_models_dynamic_services.py +++ b/services/director-v2/tests/unit/test_models_dynamic_services.py @@ -138,7 +138,7 @@ def test_running_service_details_make_status( print(running_service_details) assert running_service_details - running_service_details_dict = running_service_details.dict( + running_service_details_dict = running_service_details.model_dump( exclude_unset=True, by_alias=True ) @@ -219,7 +219,7 @@ def test_regression_legacy_service_compatibility() -> None: "user_id": "1", "project_id": "b1ec5c8e-f5bb-11eb-b1d5-02420a000006", } - service_details = RunningDynamicServiceDetails.parse_obj(api_response) + service_details = RunningDynamicServiceDetails.model_validate(api_response) assert service_details diff --git a/services/director-v2/tests/unit/test_models_schemas_dynamic_services_scheduler.py b/services/director-v2/tests/unit/test_models_schemas_dynamic_services_scheduler.py index 607b8231f78d..0bbd9bca5268 100644 --- a/services/director-v2/tests/unit/test_models_schemas_dynamic_services_scheduler.py +++ b/services/director-v2/tests/unit/test_models_schemas_dynamic_services_scheduler.py @@ -5,7 +5,7 @@ from pathlib import Path import pytest -from pydantic import parse_file_as +from pydantic import TypeAdapter from simcore_service_director_v2.models.dynamic_services_scheduler import SchedulerData @@ -20,13 +20,13 @@ def test_regression_as_label_data(scheduler_data: SchedulerData) -> None: # old tested implementation scheduler_data_copy = deepcopy(scheduler_data) scheduler_data_copy.compose_spec = json.dumps(scheduler_data_copy.compose_spec) - json_encoded = scheduler_data_copy.json() + json_encoded = scheduler_data_copy.model_dump_json() # using pydantic's internals label_data = scheduler_data.as_label_data() - parsed_json_encoded = SchedulerData.parse_raw(json_encoded) - parsed_label_data = SchedulerData.parse_raw(label_data) + parsed_json_encoded = SchedulerData.model_validate_json(json_encoded) + parsed_label_data = SchedulerData.model_validate_json(label_data) assert parsed_json_encoded == parsed_label_data @@ -35,4 +35,6 @@ def test_ensure_legacy_format_compatibility(legacy_scheduler_data_format: Path): # PRs applying changes to the legacy format: # - https://github.com/ITISFoundation/osparc-simcore/pull/3610 - assert parse_file_as(list[SchedulerData], legacy_scheduler_data_format) + assert TypeAdapter(list[SchedulerData]).validate_json( + legacy_scheduler_data_format.read_text() + ) diff --git a/services/director-v2/tests/unit/test_modules_dask_client.py b/services/director-v2/tests/unit/test_modules_dask_client.py index 68f8464b829d..f45040c143a5 100644 --- a/services/director-v2/tests/unit/test_modules_dask_client.py +++ b/services/director-v2/tests/unit/test_modules_dask_client.py @@ -55,7 +55,6 @@ from models_library.resource_tracker import HardwareInfo from models_library.users import UserID from pydantic import AnyUrl, ByteSize, SecretStr, TypeAdapter -from pydantic.tools import parse_obj_as from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.background_task import periodic_task @@ -164,7 +163,7 @@ async def factory() -> DaskClient: client = await DaskClient.create( app=minimal_app, settings=minimal_app.state.settings.DIRECTOR_V2_COMPUTATIONAL_BACKEND, - endpoint=parse_obj_as(AnyUrl, dask_spec_local_cluster.scheduler_address), + endpoint=TypeAdapter(AnyUrl).validate_python(dask_spec_local_cluster.scheduler_address), authentication=NoAuthentication(), tasks_file_link_type=tasks_file_link_type, cluster_type=ClusterTypeInModel.ON_PREMISE, @@ -205,7 +204,7 @@ async def factory() -> DaskClient: client = await DaskClient.create( app=minimal_app, settings=minimal_app.state.settings.DIRECTOR_V2_COMPUTATIONAL_BACKEND, - endpoint=parse_obj_as(AnyUrl, local_dask_gateway_server.address), + endpoint=TypeAdapter(AnyUrl).validate_python(local_dask_gateway_server.address), authentication=SimpleAuthentication( username="pytest_user", password=SecretStr(local_dask_gateway_server.password), @@ -299,7 +298,7 @@ def cpu_image(node_id: NodeID) -> ImageParams: tag="1.5.5", node_requirements=NodeRequirements( CPU=1, - RAM=parse_obj_as(ByteSize, "128 MiB"), + RAM=TypeAdapter(ByteSize).validate_python("128 MiB"), GPU=None, ), ) # type: ignore @@ -327,7 +326,7 @@ def gpu_image(node_id: NodeID) -> ImageParams: node_requirements=NodeRequirements( CPU=1, GPU=1, - RAM=parse_obj_as(ByteSize, "256 MiB"), + RAM=TypeAdapter(ByteSize).validate_python("256 MiB"), ), ) # type: ignore return ImageParams( @@ -367,11 +366,11 @@ def _mocked_node_ports(mocker: MockerFixture) -> None: mocker.patch( "simcore_service_director_v2.modules.dask_client.dask_utils.compute_input_data", - return_value=TaskInputData.parse_obj({}), + return_value=TaskInputData.model_validate({}), ) mocker.patch( "simcore_service_director_v2.modules.dask_client.dask_utils.compute_output_data_schema", - return_value=TaskOutputDataSchema.parse_obj({}), + return_value=TaskOutputDataSchema.model_validate({}), ) mocker.patch( "simcore_service_director_v2.modules.dask_client.dask_utils.compute_service_log_file_upload_link", @@ -470,8 +469,7 @@ def comp_run_metadata(faker: Faker) -> RunMetadataDict: @pytest.fixture def task_labels(comp_run_metadata: RunMetadataDict) -> ContainerLabelsDict: - return parse_obj_as( - ContainerLabelsDict, + return TypeAdapter(ContainerLabelsDict).validate_python( { k.replace("_", "-").lower(): v for k, v in comp_run_metadata.items() @@ -482,7 +480,9 @@ def task_labels(comp_run_metadata: RunMetadataDict) -> ContainerLabelsDict: @pytest.fixture def hardware_info() -> HardwareInfo: - return HardwareInfo.parse_obj(HardwareInfo.Config.schema_extra["examples"][0]) + return HardwareInfo.model_validate( + HardwareInfo.model_config["json_schema_extra"]["examples"][0] + ) @pytest.fixture @@ -529,7 +529,7 @@ def fake_sidecar_fct( event = distributed.Event(_DASK_EVENT_NAME) event.wait(timeout=25) - return TaskOutputData.parse_obj({"some_output_key": 123}) + return TaskOutputData.model_validate({"some_output_key": 123}) # NOTE: We pass another fct so it can run in our localy created dask cluster # NOTE2: since there is only 1 task here, it's ok to pass the nodeID @@ -645,7 +645,7 @@ def fake_sidecar_fct( task = worker.state.tasks.get(worker.get_current_task()) assert task is not None - return TaskOutputData.parse_obj({"some_output_key": 123}) + return TaskOutputData.model_validate({"some_output_key": 123}) # NOTE: We pass another fct so it can run in our localy created dask cluster published_computation_task = await dask_client.send_computation_tasks( @@ -737,7 +737,7 @@ def fake_remote_fct( print("--> raising cancellation error now") raise TaskCancelledError - return TaskOutputData.parse_obj({"some_output_key": 123}) + return TaskOutputData.model_validate({"some_output_key": 123}) published_computation_task = await dask_client.send_computation_tasks( user_id=user_id, @@ -947,7 +947,7 @@ async def test_too_many_resources_send_computation_task( tag="1.4.5", node_requirements=NodeRequirements( CPU=10000000000000000, - RAM=parse_obj_as(ByteSize, "128 MiB"), + RAM=TypeAdapter(ByteSize).validate_python("128 MiB"), GPU=None, ), ) # type: ignore @@ -1083,7 +1083,7 @@ def fake_remote_fct( if fail_remote_fct: err_msg = "We fail because we're told to!" raise ValueError(err_msg) - return TaskOutputData.parse_obj({"some_output_key": 123}) + return TaskOutputData.model_validate({"some_output_key": 123}) published_computation_task = await dask_client.send_computation_tasks( user_id=user_id, @@ -1174,7 +1174,7 @@ def fake_remote_fct( published_event = Event(name=_DASK_START_EVENT) published_event.set() - return TaskOutputData.parse_obj({"some_output_key": 123}) + return TaskOutputData.model_validate({"some_output_key": 123}) # run the computation published_computation_task = await dask_client.send_computation_tasks( @@ -1250,7 +1250,7 @@ def fake_sidecar_fct( event = distributed.Event(_DASK_EVENT_NAME) event.wait(timeout=25) - return TaskOutputData.parse_obj({"some_output_key": 123}) + return TaskOutputData.model_validate({"some_output_key": 123}) # NOTE: We pass another fct so it can run in our localy created dask cluster published_computation_task = await dask_client.send_computation_tasks( diff --git a/services/director-v2/tests/unit/test_modules_dask_clients_pool.py b/services/director-v2/tests/unit/test_modules_dask_clients_pool.py index 3dd97cc47538..3bd1e3188784 100644 --- a/services/director-v2/tests/unit/test_modules_dask_clients_pool.py +++ b/services/director-v2/tests/unit/test_modules_dask_clients_pool.py @@ -9,6 +9,8 @@ import pytest from _dask_helpers import DaskGatewayServer +from common_library.json_serialization import json_dumps +from common_library.serialization import model_dump_with_secrets from distributed.deploy.spec import SpecCluster from faker import Faker from models_library.clusters import ( @@ -24,7 +26,6 @@ from pydantic import SecretStr from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict -from settings_library.utils_encoders import create_json_encoder_wo_secrets from simcore_postgres_database.models.clusters import ClusterType from simcore_service_director_v2.core.application import init_app from simcore_service_director_v2.core.errors import ( @@ -87,7 +88,7 @@ def creator(num_clusters: int) -> list[Cluster]: fake_clusters = [] for n in range(num_clusters): fake_clusters.append( - Cluster.parse_obj( + Cluster.model_validate( { "id": faker.pyint(), "name": faker.name(), @@ -126,10 +127,15 @@ def creator(): ) monkeypatch.setenv( "COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH", - SimpleAuthentication( - username=faker.user_name(), - password=SecretStr(local_dask_gateway_server.password), - ).json(encoder=create_json_encoder_wo_secrets(SimpleAuthentication)), + json_dumps( + model_dump_with_secrets( + SimpleAuthentication( + username=faker.user_name(), + password=SecretStr(local_dask_gateway_server.password), + ), + show_secrets=True, + ) + ), ) return creator @@ -194,11 +200,11 @@ async def test_dask_clients_pool_acquisition_creates_client_on_demand( cluster_type=ClusterTypeInModel.ON_PREMISE, ) ) - async with clients_pool.acquire(cluster) as dask_client: + async with clients_pool.acquire(cluster): # on start it is created mocked_dask_client.create.assert_has_calls(mocked_creation_calls) - async with clients_pool.acquire(cluster) as dask_client: + async with clients_pool.acquire(cluster): # the connection already exists, so there is no new call to create mocked_dask_client.create.assert_has_calls(mocked_creation_calls) @@ -278,5 +284,5 @@ def just_a_quick_fct(x, y): ) future = dask_client.backend.client.submit(just_a_quick_fct, 12, 23) assert future - result = await future.result(timeout=10) # type: ignore + result = await future.result(timeout=10) assert result == 35 diff --git a/services/director-v2/tests/unit/test_modules_db_repositories_services_environments.py b/services/director-v2/tests/unit/test_modules_db_repositories_services_environments.py index 82e397bd3f18..32c39f416eec 100644 --- a/services/director-v2/tests/unit/test_modules_db_repositories_services_environments.py +++ b/services/director-v2/tests/unit/test_modules_db_repositories_services_environments.py @@ -1,5 +1,5 @@ from models_library.osparc_variable_identifier import OsparcVariableIdentifier -from pydantic import parse_obj_as +from pydantic import TypeAdapter from simcore_postgres_database.models.services_environments import VENDOR_SECRET_PREFIX @@ -8,4 +8,4 @@ def test_vendor_secret_names_are_osparc_environments(): # packages simcore_postgres_database and models_library which are indenpendent assert VENDOR_SECRET_PREFIX.endswith("_") - parse_obj_as(OsparcVariableIdentifier, f"${VENDOR_SECRET_PREFIX}FAKE_SECRET") + TypeAdapter(OsparcVariableIdentifier).validate_python(f"${VENDOR_SECRET_PREFIX}FAKE_SECRET") diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_public.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_public.py index a38a5a06197a..c748fc1cd1b0 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_public.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_public.py @@ -4,6 +4,9 @@ from contextlib import contextmanager from typing import Any, AsyncIterable, Callable, Iterator from unittest.mock import AsyncMock +from models_library.api_schemas_dynamic_sidecar.containers import ( + ActivityInfoOrNone +) import pytest from common_library.json_serialization import json_dumps @@ -11,7 +14,7 @@ from fastapi import FastAPI, status from httpx import HTTPError, Response from models_library.sidecar_volumes import VolumeCategory, VolumeStatus -from pydantic import AnyHttpUrl, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.fastapi.http_client_thin import ClientHttpError, UnexpectedStatusError @@ -33,7 +36,7 @@ @pytest.fixture def dynamic_sidecar_endpoint() -> AnyHttpUrl: - return parse_obj_as(AnyHttpUrl, "http://missing-host:1111") + return TypeAdapter(AnyHttpUrl).validate_python("http://missing-host:1111") @pytest.fixture @@ -352,21 +355,21 @@ async def test_update_volume_state( @pytest.mark.parametrize( - "mock_json", + "mock_dict", [{"seconds_inactive": 1}, {"seconds_inactive": 0}, None], ) async def test_get_service_activity( get_patched_client: Callable, dynamic_sidecar_endpoint: AnyHttpUrl, - mock_json: dict[str, Any], + mock_dict: dict[str, Any], ) -> None: with get_patched_client( "get_containers_activity", return_value=Response( - status_code=status.HTTP_200_OK, text=json_dumps(mock_json) + status_code=status.HTTP_200_OK, text=json_dumps(mock_dict) ), ) as client: - assert await client.get_service_activity(dynamic_sidecar_endpoint) == mock_json + assert await client.get_service_activity(dynamic_sidecar_endpoint) == TypeAdapter(ActivityInfoOrNone).validate_python(mock_dict) async def test_free_reserved_disk_space( diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py index 6403be5a78ed..6584020dcb60 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py @@ -11,7 +11,7 @@ from httpx import Response from models_library.services_creation import CreateServiceMetricsAdditionalParams from models_library.sidecar_volumes import VolumeCategory, VolumeStatus -from pydantic import AnyHttpUrl, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter from pytest_simcore.helpers.typing_env import EnvVarsDict from respx import MockRouter, Route from respx.types import SideEffectTypes @@ -63,7 +63,7 @@ async def thin_client(mocked_app: FastAPI) -> AsyncIterable[ThinSidecarsClient]: @pytest.fixture def dynamic_sidecar_endpoint() -> AnyHttpUrl: - return parse_obj_as(AnyHttpUrl, "http://missing-host:1111") + return TypeAdapter(AnyHttpUrl).validate_python("http://missing-host:1111") @pytest.fixture @@ -116,7 +116,7 @@ async def test_get_containers( mock_response = Response(status.HTTP_200_OK) mock_request( "GET", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers?only_status={str(only_status).lower()}", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}/containers?only_status={str(only_status).lower()}", mock_response, None, ) @@ -139,7 +139,7 @@ async def test_post_patch_containers_ports_io( mock_response = Response(status.HTTP_204_NO_CONTENT) mock_request( "PATCH", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/ports/io", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}/containers/ports/io", mock_response, None, ) @@ -162,7 +162,7 @@ async def test_post_containers_ports_outputs_dirs( mock_response = Response(status.HTTP_204_NO_CONTENT) mock_request( "POST", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/ports/outputs/dirs", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}/containers/ports/outputs/dirs", mock_response, None, ) @@ -191,7 +191,7 @@ async def test_get_containers_name( mock_request( "GET", ( - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}" + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}" f"/containers/name?filters={encoded_filters}" ), mock_response, @@ -216,7 +216,7 @@ async def test_post_containers_networks_attach( container_id = "a_container_id" mock_request( "POST", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/{container_id}/networks:attach", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}/containers/{container_id}/networks:attach", mock_response, None, ) @@ -239,7 +239,7 @@ async def test_post_containers_networks_detach( container_id = "a_container_id" mock_request( "POST", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/{container_id}/networks:detach", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}/containers/{container_id}/networks:detach", mock_response, None, ) @@ -262,7 +262,7 @@ async def test_put_volumes( mock_response = Response(status.HTTP_204_NO_CONTENT) mock_request( "PUT", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/volumes/{volume_category}", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}/volumes/{volume_category}", mock_response, None, ) @@ -353,7 +353,7 @@ async def test_post_containers_tasks( mock_response = Response(status.HTTP_202_ACCEPTED, json="mocked_task_id") mock_request( "POST", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}{mock_endpoint}", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}{mock_endpoint}", mock_response, None, ) @@ -371,7 +371,7 @@ async def test_get_containers_inactivity( mock_response = Response(status.HTTP_200_OK, json={}) mock_request( "GET", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/activity", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}/containers/activity", mock_response, None, ) @@ -388,7 +388,7 @@ async def test_post_disk_reserved_free( mock_response = Response(status.HTTP_204_NO_CONTENT) mock_request( "POST", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/disk/reserved:free", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}/disk/reserved:free", mock_response, None, ) @@ -405,7 +405,7 @@ async def test_post_containers_compose_spec( mock_response = Response(status.HTTP_202_ACCEPTED) mock_request( "POST", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/compose-spec", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}/containers/compose-spec", mock_response, None, ) diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py index 8b390e7b9737..340c7ad3e44b 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py @@ -6,6 +6,7 @@ from typing import Any from uuid import uuid4 +from pydantic import TypeAdapter import pytest import yaml from models_library.docker import to_simcore_runtime_docker_label_key @@ -21,7 +22,6 @@ ServiceResourcesDict, ) from models_library.users import UserID -from pydantic import parse_obj_as from servicelib.resources import CPU_RESOURCE_LIMIT_KEY, MEM_RESOURCE_LIMIT_KEY from simcore_service_director_v2.modules.dynamic_sidecar import docker_compose_specs @@ -74,8 +74,7 @@ def test_parse_and_export_of_compose_environment_section(): [ pytest.param( {"version": "2.3", "services": {DEFAULT_SINGLE_SERVICE_NAME: {}}}, - parse_obj_as( - ServiceResourcesDict, + TypeAdapter(ServiceResourcesDict).validate_python( { DEFAULT_SINGLE_SERVICE_NAME: { "image": "simcore/services/dynamic/jupyter-math:2.0.5", @@ -90,8 +89,7 @@ def test_parse_and_export_of_compose_environment_section(): ), pytest.param( {"version": "3.7", "services": {DEFAULT_SINGLE_SERVICE_NAME: {}}}, - parse_obj_as( - ServiceResourcesDict, + TypeAdapter(ServiceResourcesDict).validate_python( { DEFAULT_SINGLE_SERVICE_NAME: { "image": "simcore/services/dynamic/jupyter-math:2.0.5", @@ -156,7 +154,7 @@ async def test_inject_resource_limits_and_reservations( [ pytest.param( json.loads( - SimcoreServiceLabels.Config.schema_extra["examples"][2][ + SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][2][ "simcore.service.compose-spec" ] ), @@ -200,7 +198,7 @@ def test_regression_service_has_no_reservations(): "version": "3.7", "services": {DEFAULT_SINGLE_SERVICE_NAME: {}}, } - service_resources: ServiceResourcesDict = parse_obj_as(ServiceResourcesDict, {}) + service_resources: ServiceResourcesDict = TypeAdapter(ServiceResourcesDict).validate_python({}) spec_before = deepcopy(service_spec) docker_compose_specs._update_resource_limits_and_reservations( diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py index 13d617ed82d7..84711cf09366 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py @@ -162,7 +162,11 @@ def mocked_director_v0( ), name="service labels", ).respond( - json={"data": SimcoreServiceLabels.Config.schema_extra["examples"][0]} + json={ + "data": SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][0] + } ) yield mock @@ -525,9 +529,9 @@ async def test_mark_all_services_in_wallet_for_removal( wallet_id = scheduler_data.wallet_info.wallet_id can_remove = scheduler_data.dynamic_sidecar.service_removal_state.can_remove match wallet_id: - case WalletID(1): + case 1: assert can_remove is True - case WalletID(2): + case 2: assert can_remove is False case _: pytest.fail("unexpected case") diff --git a/services/director-v2/tests/unit/test_modules_notifier.py b/services/director-v2/tests/unit/test_modules_notifier.py index 46d0879cebc4..cf6d8e1b01cd 100644 --- a/services/director-v2/tests/unit/test_modules_notifier.py +++ b/services/director-v2/tests/unit/test_modules_notifier.py @@ -18,7 +18,7 @@ from models_library.projects_nodes_io import NodeID from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import NonNegativeInt, parse_obj_as +from pydantic import NonNegativeInt from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.utils import logged_gather @@ -111,7 +111,7 @@ def _get_on_no_more_credits_event( # emulates front-end receiving message async def on_no_more_credits(data): - assert parse_obj_as(ServiceNoMoreCredits, data) is not None + assert ServiceNoMoreCredits.model_validate(data) is not None on_event_spy = AsyncMock(wraps=on_no_more_credits) socketio_client.on(SOCKET_IO_SERVICE_NO_MORE_CREDITS_EVENT, on_event_spy) diff --git a/services/director-v2/tests/unit/test_modules_osparc_variables.py b/services/director-v2/tests/unit/test_modules_osparc_variables.py index 9ed659f00ad5..635904292b82 100644 --- a/services/director-v2/tests/unit/test_modules_osparc_variables.py +++ b/services/director-v2/tests/unit/test_modules_osparc_variables.py @@ -21,7 +21,7 @@ from models_library.users import UserID from models_library.utils.specs_substitution import SubstitutionValue from models_library.utils.string_substitution import OSPARC_IDENTIFIER_PREFIX -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.faker_compose_specs import generate_fake_docker_compose from simcore_postgres_database.models.services_environments import VENDOR_SECRET_PREFIX @@ -48,8 +48,8 @@ def session_context(faker: Faker) -> ContextDict: return ContextDict( app=FastAPI(), - service_key=parse_obj_as(ServiceKey, "simcore/services/dynamic/foo"), - service_version=parse_obj_as(ServiceVersion, "1.2.3"), + service_key=TypeAdapter(ServiceKey).validate_python("simcore/services/dynamic/foo"), + service_version=TypeAdapter(ServiceVersion).validate_python("1.2.3"), compose_spec=generate_fake_docker_compose(faker), product_name=faker.word(), project_id=faker.uuid4(), @@ -101,7 +101,7 @@ async def request_user_email(app: FastAPI, user_id: UserID) -> SubstitutionValue # All values extracted from the context MUST be SubstitutionValue assert { - key: parse_obj_as(SubstitutionValue, value) for key, value in environs.items() + key: TypeAdapter(SubstitutionValue).validate_python(value) for key, value in environs.items() } for osparc_variable_name, context_name in [ diff --git a/services/director-v2/tests/unit/test_modules_project_networks.py b/services/director-v2/tests/unit/test_modules_project_networks.py index 585d8131b8e3..848b3629e107 100644 --- a/services/director-v2/tests/unit/test_modules_project_networks.py +++ b/services/director-v2/tests/unit/test_modules_project_networks.py @@ -40,8 +40,8 @@ def using( attach: list[Any], ) -> "Example": return cls( - existing_networks_with_aliases=NetworksWithAliases.parse_obj(existing), - new_networks_with_aliases=NetworksWithAliases.parse_obj(new), + existing_networks_with_aliases=NetworksWithAliases.model_validate(existing), + new_networks_with_aliases=NetworksWithAliases.model_validate(new), expected_calls=MockedCalls(detach=detach, attach=attach), ) @@ -184,7 +184,7 @@ def dy_workbench_with_networkable_labels(mocks_dir: Path) -> NodesDict: for node_uuid, node_data in dy_workbench.items(): node_data["label"] = f"label_{uuid4()}" - parsed_workbench[node_uuid] = Node.parse_obj(node_data) + parsed_workbench[node_uuid] = Node.model_validate(node_data) return parsed_workbench diff --git a/services/director-v2/tests/unit/test_modules_rabbitmq.py b/services/director-v2/tests/unit/test_modules_rabbitmq.py index 1d557d673a83..972f836f5752 100644 --- a/services/director-v2/tests/unit/test_modules_rabbitmq.py +++ b/services/director-v2/tests/unit/test_modules_rabbitmq.py @@ -44,7 +44,7 @@ def message(faker: Faker) -> WalletCreditsLimitReachedMessage: async def test_handler_out_of_credits( mock_app: FastAPI, message: WalletCreditsLimitReachedMessage, ignore_limits ): - await handler_out_of_credits(mock_app, message.json().encode()) + await handler_out_of_credits(mock_app, message.model_dump_json().encode()) removal_mark_count = ( mock_app.state.dynamic_sidecar_scheduler.mark_all_services_in_wallet_for_removal.call_count diff --git a/services/director-v2/tests/unit/test_schemas_dynamic_services_scheduler.py b/services/director-v2/tests/unit/test_schemas_dynamic_services_scheduler.py index 8d58d96f675c..6347ebab5f49 100644 --- a/services/director-v2/tests/unit/test_schemas_dynamic_services_scheduler.py +++ b/services/director-v2/tests/unit/test_schemas_dynamic_services_scheduler.py @@ -40,11 +40,11 @@ def assert_copy_has_changes(original: SchedulerData) -> Iterator[SchedulerData]: async def test_parse_saved_fake_scheduler_data(fake_scheduler_data: str) -> None: - assert SchedulerData.parse_raw(fake_scheduler_data) + assert SchedulerData.model_validate_json(fake_scheduler_data) def test_nested_compare(fake_scheduler_data: str) -> None: - scheduler_data = SchedulerData.parse_raw(fake_scheduler_data) + scheduler_data = SchedulerData.model_validate_json(fake_scheduler_data) with assert_copy_has_changes(scheduler_data) as to_change: to_change.paths_mapping.inputs_path = Path("/tmp") diff --git a/services/director-v2/tests/unit/test_utils_client_decorators.py b/services/director-v2/tests/unit/test_utils_client_decorators.py index 066bedad11b8..5b630f788c74 100644 --- a/services/director-v2/tests/unit/test_utils_client_decorators.py +++ b/services/director-v2/tests/unit/test_utils_client_decorators.py @@ -35,10 +35,10 @@ async def a_request(method: str, **kwargs) -> Response: await a_request( "POST", url=url, - params=dict(kettle="boiling"), - data=dict(kettle_number="royal_01"), + params={"kettle": "boiling"}, + data={"kettle_number": "royal_01"}, ) - assert status.HTTP_503_SERVICE_UNAVAILABLE == exec_info.value.status_code + assert exec_info.value.status_code == status.HTTP_503_SERVICE_UNAVAILABLE # ERROR test_utils_client_decorators:client_decorators.py:76 AService service error: # |Request| diff --git a/services/director-v2/tests/unit/test_utils_comp_scheduler.py b/services/director-v2/tests/unit/test_utils_comp_scheduler.py index 970cdad75b77..dfb7c0326b11 100644 --- a/services/director-v2/tests/unit/test_utils_comp_scheduler.py +++ b/services/director-v2/tests/unit/test_utils_comp_scheduler.py @@ -78,8 +78,8 @@ def test_get_resource_tracking_run_id( @pytest.mark.parametrize( "task", [ - CompTaskAtDB.parse_obj(example) - for example in CompTaskAtDB.Config.schema_extra["examples"] + CompTaskAtDB.model_validate(example) + for example in CompTaskAtDB.model_config["json_schema_extra"]["examples"] ], ids=str, ) diff --git a/services/director-v2/tests/unit/test_utils_computation.py b/services/director-v2/tests/unit/test_utils_computation.py index 184a65d0db7d..14c9ffa34f3b 100644 --- a/services/director-v2/tests/unit/test_utils_computation.py +++ b/services/director-v2/tests/unit/test_utils_computation.py @@ -27,7 +27,7 @@ def fake_task_file(mocks_dir: Path): @pytest.fixture(scope="session") def fake_task(fake_task_file: Path) -> CompTaskAtDB: - return CompTaskAtDB.parse_file(fake_task_file) + return CompTaskAtDB.model_validate_json(fake_task_file.read_text()) # NOTE: these parametrizations are made to mimic something like a sleepers project diff --git a/services/director-v2/tests/unit/test_utils_dags.py b/services/director-v2/tests/unit/test_utils_dags.py index 3ab2c68fea17..125f3153db46 100644 --- a/services/director-v2/tests/unit/test_utils_dags.py +++ b/services/director-v2/tests/unit/test_utils_dags.py @@ -476,7 +476,7 @@ def pipeline_test_params( for x in range(_MANY_NODES) }, [ - CompTaskAtDB.construct( + CompTaskAtDB.model_construct( project_id=uuid4(), node_id=f"node_{x}", schema=NodeSchema(inputs={}, outputs={}), @@ -493,7 +493,7 @@ def pipeline_test_params( ) for x in range(_MANY_NODES) ], - PipelineDetails.construct( + PipelineDetails.model_construct( adjacency_list={f"node_{x}": [] for x in range(_MANY_NODES)}, progress=1.0, node_states={ @@ -527,7 +527,7 @@ def pipeline_test_params( }, [ # NOTE: we use construct here to be able to use non uuid names to simplify test setup - CompTaskAtDB.construct( + CompTaskAtDB.model_construct( project_id=uuid4(), node_id="node_1", schema=NodeSchema(inputs={}, outputs={}), @@ -541,7 +541,7 @@ def pipeline_test_params( modified=datetime.datetime.now(tz=datetime.timezone.utc), last_heartbeat=None, ), - CompTaskAtDB.construct( + CompTaskAtDB.model_construct( project_id=uuid4(), node_id="node_2", schema=NodeSchema(inputs={}, outputs={}), @@ -555,7 +555,7 @@ def pipeline_test_params( modified=datetime.datetime.now(tz=datetime.timezone.utc), last_heartbeat=None, ), - CompTaskAtDB.construct( + CompTaskAtDB.model_construct( project_id=uuid4(), node_id="node_3", schema=NodeSchema(inputs={}, outputs={}), @@ -571,7 +571,7 @@ def pipeline_test_params( progress=1.00, ), ], - PipelineDetails.construct( + PipelineDetails.model_construct( adjacency_list={ "node_1": ["node_2", "node_3"], "node_2": ["node_3"], @@ -597,5 +597,6 @@ async def test_compute_pipeline_details( pipeline_test_params.comp_tasks, ) assert ( - received_details.dict() == pipeline_test_params.expected_pipeline_details.dict() + received_details.model_dump() + == pipeline_test_params.expected_pipeline_details.model_dump() ) diff --git a/services/director-v2/tests/unit/test_utils_distributed_identifier.py b/services/director-v2/tests/unit/test_utils_distributed_identifier.py index ce200feef977..518552af1e15 100644 --- a/services/director-v2/tests/unit/test_utils_distributed_identifier.py +++ b/services/director-v2/tests/unit/test_utils_distributed_identifier.py @@ -10,7 +10,7 @@ from uuid import UUID, uuid4 import pytest -from pydantic import BaseModel, NonNegativeInt, StrBytes +from pydantic import BaseModel, NonNegativeInt from pytest_mock import MockerFixture from servicelib.redis import RedisClientSDK from servicelib.utils import logged_gather @@ -132,14 +132,16 @@ def _serialize_identifier(cls, identifier: UserDefinedID) -> str: return f"{identifier._id}" # noqa: SLF001 @classmethod - def _deserialize_cleanup_context(cls, raw: StrBytes) -> AnEmptyTextCleanupContext: - return AnEmptyTextCleanupContext.parse_raw(raw) + def _deserialize_cleanup_context( + cls, raw: str | bytes + ) -> AnEmptyTextCleanupContext: + return AnEmptyTextCleanupContext.model_validate_json(raw) @classmethod def _serialize_cleanup_context( cls, cleanup_context: AnEmptyTextCleanupContext ) -> str: - return cleanup_context.json() + return cleanup_context.model_dump_json() async def is_used( self, identifier: UserDefinedID, cleanup_context: AnEmptyTextCleanupContext diff --git a/services/director-v2/tests/unit/with_dbs/conftest.py b/services/director-v2/tests/unit/with_dbs/conftest.py index 8dd5527f00a3..a3234328c9f8 100644 --- a/services/director-v2/tests/unit/with_dbs/conftest.py +++ b/services/director-v2/tests/unit/with_dbs/conftest.py @@ -6,7 +6,6 @@ import datetime -import json from collections.abc import Awaitable, Callable, Iterator from typing import Any, cast from uuid import uuid4 @@ -59,7 +58,7 @@ def creator(**pipeline_kwargs) -> CompPipelineAtDB: ) assert result - new_pipeline = CompPipelineAtDB.from_orm(result.first()) + new_pipeline = CompPipelineAtDB.model_validate(result.first()) created_pipeline_ids.append(f"{new_pipeline.project_id}") return new_pipeline @@ -92,7 +91,9 @@ def creator( "inputs": ( { key: ( - json.loads(value.json(by_alias=True, exclude_unset=True)) + value.model_dump( + mode="json", by_alias=True, exclude_unset=True + ) if isinstance(value, BaseModel) else value ) @@ -104,7 +105,9 @@ def creator( "outputs": ( { key: ( - json.loads(value.json(by_alias=True, exclude_unset=True)) + value.model_dump( + mode="json", by_alias=True, exclude_unset=True + ) if isinstance(value, BaseModel) else value ) @@ -113,9 +116,9 @@ def creator( if node_data.outputs else {} ), - "image": Image(name=node_data.key, tag=node_data.version).dict( # type: ignore + "image": Image(name=node_data.key, tag=node_data.version).model_dump( by_alias=True, exclude_unset=True - ), # type: ignore + ), "node_class": to_node_class(node_data.key), "internal_id": internal_id + 1, "submit": datetime.datetime.now(tz=datetime.UTC), @@ -134,7 +137,7 @@ def creator( .values(**task_config) .returning(sa.literal_column("*")) ) - new_task = CompTaskAtDB.from_orm(result.first()) + new_task = CompTaskAtDB.model_validate(result.first()) created_tasks.append(new_task) created_task_ids.extend([t.task_id for t in created_tasks if t.task_id]) return created_tasks @@ -205,7 +208,7 @@ def creator( .values(**jsonable_encoder(run_config)) .returning(sa.literal_column("*")) ) - new_run = CompRunsAtDB.from_orm(result.first()) + new_run = CompRunsAtDB.model_validate(result.first()) created_run_ids.append(new_run.run_id) return new_run @@ -223,10 +226,10 @@ def cluster( created_cluster_ids: list[str] = [] def creator(user: dict[str, Any], **cluster_kwargs) -> Cluster: - cluster_config = Cluster.Config.schema_extra["examples"][1] + cluster_config = Cluster.model_config["json_schema_extra"]["examples"][1] cluster_config["owner"] = user["primary_gid"] cluster_config.update(**cluster_kwargs) - new_cluster = Cluster.parse_obj(cluster_config) + new_cluster = Cluster.model_validate(cluster_config) assert new_cluster with postgres_db.connect() as conn: diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py index 634574846134..19ab0ea2df35 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py @@ -2,7 +2,6 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name -import json import random from collections.abc import Callable, Iterator from typing import Any @@ -11,6 +10,7 @@ import pytest import sqlalchemy as sa from _dask_helpers import DaskGatewayServer +from common_library.serialization import model_dump_with_secrets from distributed.deploy.spec import SpecCluster from faker import Faker from httpx import URL @@ -30,9 +30,8 @@ ClusterAuthentication, SimpleAuthentication, ) -from pydantic import AnyHttpUrl, SecretStr, parse_obj_as +from pydantic import AnyHttpUrl, SecretStr, TypeAdapter from pytest_simcore.helpers.typing_env import EnvVarsDict -from settings_library.utils_encoders import create_json_encoder_wo_secrets from simcore_postgres_database.models.clusters import ClusterType, clusters from starlette import status @@ -70,7 +69,7 @@ def creator() -> dict[str, Any]: "username": faker.user_name(), "password": faker.password(), } - assert SimpleAuthentication.parse_obj(simple_auth) + assert SimpleAuthentication.model_validate(simple_auth) return simple_auth return creator @@ -94,7 +93,9 @@ async def test_list_clusters( # there is no cluster at the moment, the list shall contain the default cluster response = await async_client.get(list_clusters_url) assert response.status_code == status.HTTP_200_OK - returned_clusters_list = parse_obj_as(list[ClusterGet], response.json()) + returned_clusters_list = TypeAdapter(list[ClusterGet]).validate_python( + response.json() + ) assert ( len(returned_clusters_list) == 1 ), f"no default cluster in {returned_clusters_list=}" @@ -109,7 +110,9 @@ async def test_list_clusters( response = await async_client.get(list_clusters_url) assert response.status_code == status.HTTP_200_OK - returned_clusters_list = parse_obj_as(list[ClusterGet], response.json()) + returned_clusters_list = TypeAdapter(list[ClusterGet]).validate_python( + response.json() + ) assert ( len(returned_clusters_list) == NUM_CLUSTERS + 1 ) # the default cluster comes on top of the NUM_CLUSTERS @@ -121,7 +124,9 @@ async def test_list_clusters( user_2 = registered_user() response = await async_client.get(f"/v2/clusters?user_id={user_2['id']}") assert response.status_code == status.HTTP_200_OK - returned_clusters_list = parse_obj_as(list[ClusterGet], response.json()) + returned_clusters_list = TypeAdapter(list[ClusterGet]).validate_python( + response.json() + ) assert ( len(returned_clusters_list) == 1 ), f"no default cluster in {returned_clusters_list=}" @@ -147,7 +152,7 @@ async def test_list_clusters( response = await async_client.get(f"/v2/clusters?user_id={user_2['id']}") assert response.status_code == status.HTTP_200_OK - user_2_clusters = parse_obj_as(list[ClusterGet], response.json()) + user_2_clusters = TypeAdapter(list[ClusterGet]).validate_python(response.json()) # we should find 3 clusters + the default cluster assert len(user_2_clusters) == 3 + 1 for name in [ @@ -187,11 +192,11 @@ async def test_get_cluster( f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}" ) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" - returned_cluster = parse_obj_as(ClusterGet, response.json()) + returned_cluster = ClusterGet.model_validate(response.json()) assert returned_cluster - assert the_cluster.dict(exclude={"authentication"}) == returned_cluster.dict( + assert the_cluster.model_dump( exclude={"authentication"} - ) + ) == returned_cluster.model_dump(exclude={"authentication"}) user_2 = registered_user() # getting the same cluster for user 2 shall return 403 @@ -283,7 +288,7 @@ async def test_get_default_cluster( get_cluster_url = URL(f"/v2/clusters/default?user_id={user_1['id']}") response = await async_client.get(get_cluster_url) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" - returned_cluster = parse_obj_as(ClusterGet, response.json()) + returned_cluster = ClusterGet.model_validate(response.json()) assert returned_cluster assert returned_cluster.id == 0 assert returned_cluster.name == "Default cluster" @@ -307,24 +312,24 @@ async def test_create_cluster( authentication=cluster_simple_authentication(), name=faker.name(), type=random.choice(list(ClusterType)), + owner=faker.pyint(min_value=1), ) response = await async_client.post( create_cluster_url, - json=json.loads( - cluster_data.json( - by_alias=True, - exclude_unset=True, - encoder=create_json_encoder_wo_secrets(ClusterCreate), - ) + json=model_dump_with_secrets( + cluster_data, + show_secrets=True, + by_alias=True, + exclude_unset=True, ), ) assert response.status_code == status.HTTP_201_CREATED, f"received: {response.text}" - created_cluster = parse_obj_as(ClusterGet, response.json()) + created_cluster = ClusterGet.model_validate(response.json()) assert created_cluster - assert cluster_data.dict( + assert cluster_data.model_dump( exclude={"id", "owner", "access_rights", "authentication"} - ) == created_cluster.dict( + ) == created_cluster.model_dump( exclude={"id", "owner", "access_rights", "authentication"} ) @@ -354,10 +359,8 @@ async def test_update_own_cluster( # try to modify one that does not exist response = await async_client.patch( f"/v2/clusters/15615165165165?user_id={user_1['id']}", - json=json.loads( - ClusterPatch().json( - **_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch) - ) + json=model_dump_with_secrets( + ClusterPatch(), show_secrets=True, **_PATCH_EXPORT ), ) assert response.status_code == status.HTTP_404_NOT_FOUND @@ -371,23 +374,21 @@ async def test_update_own_cluster( f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}" ) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" - original_cluster = parse_obj_as(ClusterGet, response.json()) + original_cluster = ClusterGet.model_validate(response.json()) # now we modify nothing response = await async_client.patch( f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}", - json=json.loads( - ClusterPatch().json( - **_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch) - ) + json=model_dump_with_secrets( + ClusterPatch(), show_secrets=True, **_PATCH_EXPORT ), ) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" - returned_cluster = parse_obj_as(ClusterGet, response.json()) - assert returned_cluster.dict() == original_cluster.dict() + returned_cluster = ClusterGet.model_validate(response.json()) + assert returned_cluster.model_dump() == original_cluster.model_dump() # modify some simple things - expected_modified_cluster = original_cluster.copy() + expected_modified_cluster = original_cluster.model_copy() for cluster_patch in [ ClusterPatch(name=faker.name()), ClusterPatch(description=faker.text()), @@ -396,10 +397,8 @@ async def test_update_own_cluster( ClusterPatch(endpoint=faker.uri()), ClusterPatch(authentication=cluster_simple_authentication()), ]: - jsonable_cluster_patch = json.loads( - cluster_patch.json( - **_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch) - ) + jsonable_cluster_patch = model_dump_with_secrets( + cluster_patch, show_secrets=True, **_PATCH_EXPORT ) print(f"--> patching cluster with {jsonable_cluster_patch}") response = await async_client.patch( @@ -407,13 +406,15 @@ async def test_update_own_cluster( json=jsonable_cluster_patch, ) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" - returned_cluster = parse_obj_as(ClusterGet, response.json()) - expected_modified_cluster = expected_modified_cluster.copy( - update=cluster_patch.dict(**_PATCH_EXPORT) + returned_cluster = ClusterGet.model_validate(response.json()) + expected_modified_cluster = expected_modified_cluster.model_copy( + update=cluster_patch.model_dump(**_PATCH_EXPORT) ) - assert returned_cluster.dict( + assert returned_cluster.model_dump( exclude={"authentication": {"password"}} - ) == expected_modified_cluster.dict(exclude={"authentication": {"password"}}) + ) == expected_modified_cluster.model_dump( + exclude={"authentication": {"password"}} + ) # we can change the access rights, the owner rights are always kept user_2 = registered_user() @@ -427,34 +428,32 @@ async def test_update_own_cluster( cluster_patch = ClusterPatch(accessRights={user_2["primary_gid"]: rights}) response = await async_client.patch( f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}", - json=cluster_patch.dict(**_PATCH_EXPORT), + json=cluster_patch.model_dump(**_PATCH_EXPORT), ) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" - returned_cluster = ClusterGet.parse_obj(response.json()) + returned_cluster = ClusterGet.model_validate(response.json()) expected_modified_cluster.access_rights[user_2["primary_gid"]] = rights - assert returned_cluster.dict( + assert returned_cluster.model_dump( + exclude={"authentication": {"password"}} + ) == expected_modified_cluster.model_dump( exclude={"authentication": {"password"}} - ) == expected_modified_cluster.dict(exclude={"authentication": {"password"}}) + ) # we can change the owner since we are admin cluster_patch = ClusterPatch(owner=user_2["primary_gid"]) response = await async_client.patch( f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}", - json=json.loads( - cluster_patch.json( - **_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch) - ) - ), + json=model_dump_with_secrets(cluster_patch, show_secrets=True, **_PATCH_EXPORT), ) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" - returned_cluster = ClusterGet.parse_obj(response.json()) + returned_cluster = ClusterGet.model_validate(response.json()) expected_modified_cluster.owner = user_2["primary_gid"] expected_modified_cluster.access_rights[ user_2["primary_gid"] ] = CLUSTER_ADMIN_RIGHTS - assert returned_cluster.dict( + assert returned_cluster.model_dump( exclude={"authentication": {"password"}} - ) == expected_modified_cluster.dict(exclude={"authentication": {"password"}}) + ) == expected_modified_cluster.model_dump(exclude={"authentication": {"password"}}) # we should not be able to reduce the rights of the new owner cluster_patch = ClusterPatch( @@ -462,11 +461,7 @@ async def test_update_own_cluster( ) response = await async_client.patch( f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}", - json=json.loads( - cluster_patch.json( - **_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch) - ) - ), + json=model_dump_with_secrets(cluster_patch, show_secrets=True, **_PATCH_EXPORT), ) assert ( response.status_code == status.HTTP_403_FORBIDDEN @@ -486,10 +481,8 @@ async def test_update_default_cluster_fails( # try to modify one that does not exist response = await async_client.patch( f"/v2/clusters/default?user_id={user_1['id']}", - json=json.loads( - ClusterPatch().json( - **_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch) - ) + json=model_dump_with_secrets( + ClusterPatch(), show_secrets=True, **_PATCH_EXPORT ), ) assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY @@ -545,7 +538,7 @@ async def test_update_another_cluster( f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}" ) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" - parse_obj_as(ClusterGet, response.json()) + ClusterGet.model_validate(response.json()) # let's try to modify stuff as we are user 2 for cluster_patch in [ @@ -558,11 +551,8 @@ async def test_update_another_cluster( ]: response = await async_client.patch( f"/v2/clusters/{the_cluster.id}?user_id={user_2['id']}", - json=json.loads( - cluster_patch.json( - **_PATCH_EXPORT, - encoder=create_json_encoder_wo_secrets(ClusterPatch), - ) + json=model_dump_with_secrets( + cluster_patch, show_secrets=True, **_PATCH_EXPORT ), ) assert ( @@ -581,11 +571,8 @@ async def test_update_another_cluster( cluster_patch = ClusterPatch(accessRights={user_3["primary_gid"]: rights}) response = await async_client.patch( f"/v2/clusters/{the_cluster.id}?user_id={user_2['id']}", - json=json.loads( - cluster_patch.json( - **_PATCH_EXPORT, - encoder=create_json_encoder_wo_secrets(ClusterPatch), - ) + json=model_dump_with_secrets( + cluster_patch, show_secrets=True, **_PATCH_EXPORT ), ) assert ( @@ -602,11 +589,8 @@ async def test_update_another_cluster( cluster_patch = ClusterPatch(accessRights={user_3["primary_gid"]: rights}) response = await async_client.patch( f"/v2/clusters/{the_cluster.id}?user_id={user_2['id']}", - json=json.loads( - cluster_patch.json( - **_PATCH_EXPORT, - encoder=create_json_encoder_wo_secrets(ClusterPatch), - ) + json=model_dump_with_secrets( + cluster_patch, show_secrets=True, **_PATCH_EXPORT ), ) assert ( @@ -729,16 +713,14 @@ async def test_ping_invalid_cluster_raises_422( # calling with correct data but non existing cluster also raises some_fake_cluster = ClusterPing( endpoint=faker.url(), - authentication=parse_obj_as( - ClusterAuthentication, cluster_simple_authentication() + authentication=TypeAdapter(ClusterAuthentication).validate_python( + cluster_simple_authentication() ), ) response = await async_client.post( "/v2/clusters:ping", - json=json.loads( - some_fake_cluster.json( - by_alias=True, encoder=create_json_encoder_wo_secrets(ClusterPing) - ) + json=model_dump_with_secrets( + some_fake_cluster, show_secrets=True, by_alias=True ), ) with pytest.raises(httpx.HTTPStatusError): @@ -751,20 +733,19 @@ async def test_ping_cluster( local_dask_gateway_server: DaskGatewayServer, ): valid_cluster = ClusterPing( - endpoint=parse_obj_as(AnyHttpUrl, local_dask_gateway_server.address), + endpoint=TypeAdapter(AnyHttpUrl).validate_python( + local_dask_gateway_server.address + ), authentication=SimpleAuthentication( username="pytest_user", - password=parse_obj_as(SecretStr, local_dask_gateway_server.password), + password=TypeAdapter(SecretStr).validate_python( + local_dask_gateway_server.password + ), ), ) response = await async_client.post( "/v2/clusters:ping", - json=json.loads( - valid_cluster.json( - by_alias=True, - encoder=create_json_encoder_wo_secrets(SimpleAuthentication), - ) - ), + json=model_dump_with_secrets(valid_cluster, show_secrets=True, by_alias=True), ) response.raise_for_status() assert response.status_code == status.HTTP_204_NO_CONTENT @@ -792,7 +773,9 @@ async def test_ping_specific_cluster( endpoint=local_dask_gateway_server.address, authentication=SimpleAuthentication( username="pytest_user", - password=parse_obj_as(SecretStr, local_dask_gateway_server.password), + password=TypeAdapter(SecretStr).validate_python( + local_dask_gateway_server.password + ), ), ) for n in range(111) diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py index 2b509ab1a6ff..5dd1abaa594e 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py @@ -85,7 +85,7 @@ async def test_local_dask_gateway_server(local_dask_gateway_server: DaskGatewayS async with cluster.get_client() as client: print(f"--> created new client {client=}, submitting a job") - res = await client.submit(lambda x: x + 1, 1) # type: ignore + res = await client.submit(lambda x: x + 1, 1) assert res == 2 print(f"--> scaling cluster {cluster=} back to 0") @@ -114,12 +114,12 @@ async def test_get_default_cluster_details( f"/v2/clusters/default/details?user_id={user_1['id']}" ) assert response.status_code == status.HTTP_200_OK - default_cluster_out = ClusterDetailsGet.parse_obj(response.json()) + default_cluster_out = ClusterDetailsGet.model_validate(response.json()) response = await async_client.get( f"/v2/clusters/{0}/details?user_id={user_1['id']}" ) assert response.status_code == status.HTTP_200_OK - assert default_cluster_out == ClusterDetailsGet.parse_obj(response.json()) + assert default_cluster_out == ClusterDetailsGet.model_validate(response.json()) async def _get_cluster_details( @@ -130,7 +130,7 @@ async def _get_cluster_details( ) assert response.status_code == status.HTTP_200_OK print(f"<-- received cluster details response {response=}") - cluster_out = ClusterDetailsGet.parse_obj(response.json()) + cluster_out = ClusterDetailsGet.model_validate(response.json()) assert cluster_out print(f"<-- received cluster details {cluster_out=}") assert cluster_out.scheduler, "the cluster's scheduler is not started!" @@ -155,7 +155,7 @@ async def test_get_cluster_details( authentication=SimpleAuthentication( username=gateway_username, password=SecretStr(local_dask_gateway_server.password), - ).dict(by_alias=True), + ).model_dump(by_alias=True), ) # in its present state, the cluster should have no workers cluster_out = await _get_cluster_details( diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py index 1135465ef61b..add9c4d77d38 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py @@ -6,7 +6,7 @@ # pylint: disable=unused-variable # pylint:disable=too-many-positional-arguments -import datetime +import datetime as dt import json import re import urllib.parse @@ -23,6 +23,7 @@ import respx from faker import Faker from fastapi import FastAPI, status +from models_library.api_schemas_catalog.services import ServiceGet from models_library.api_schemas_clusters_keeper.ec2_instances import EC2InstanceTypeGet from models_library.api_schemas_directorv2.comp_tasks import ( ComputationCreate, @@ -33,7 +34,6 @@ PricingPlanGet, PricingUnitGet, ) -from models_library.basic_types import VersionStr from models_library.clusters import DEFAULT_CLUSTER_ID, Cluster, ClusterID from models_library.projects import ProjectAtDB from models_library.projects_nodes import NodeID, NodeState @@ -49,7 +49,7 @@ ) from models_library.utils.fastapi_encoders import jsonable_encoder from models_library.wallets import WalletInfo -from pydantic import AnyHttpUrl, ByteSize, PositiveInt, ValidationError, parse_obj_as +from pydantic import AnyHttpUrl, ByteSize, PositiveInt, TypeAdapter, ValidationError from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from settings_library.rabbit import RabbitSettings @@ -111,23 +111,24 @@ def fake_service_details(mocks_dir: Path) -> ServiceMetaDataPublished: @pytest.fixture def fake_service_extras() -> ServiceExtras: - extra_example = ServiceExtras.Config.schema_extra["examples"][2] - random_extras = ServiceExtras(**extra_example) + extra_example = ServiceExtras.model_config["json_schema_extra"]["examples"][2] # type: ignore + random_extras = ServiceExtras(**extra_example) # type: ignore assert random_extras is not None return random_extras @pytest.fixture def fake_service_resources() -> ServiceResourcesDict: - return parse_obj_as( - ServiceResourcesDict, - ServiceResourcesDictHelpers.Config.schema_extra["examples"][0], + return TypeAdapter(ServiceResourcesDict).validate_python( + ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"][0], # type: ignore ) @pytest.fixture def fake_service_labels() -> dict[str, Any]: - return choice(SimcoreServiceLabels.Config.schema_extra["examples"]) # noqa: S311 + return choice( # noqa: S311 + SimcoreServiceLabels.model_config["json_schema_extra"]["examples"] # type: ignore + ) @pytest.fixture @@ -143,13 +144,14 @@ def mocked_director_service_fcts( assert_all_called=False, assert_all_mocked=True, ) as respx_mock: - assert VersionStr.regex respx_mock.get( re.compile( r"/services/simcore%2Fservices%2F(comp|dynamic|frontend)%2F[^/]+/\d+.\d+.\d+$" ), name="get_service", - ).respond(json={"data": [fake_service_details.dict(by_alias=True)]}) + ).respond( + json={"data": [fake_service_details.model_dump(mode="json", by_alias=True)]} + ) respx_mock.get( re.compile( r"/services/simcore%2Fservices%2F(comp|dynamic|frontend)%2F[^/]+/\d+.\d+.\d+/labels" @@ -162,7 +164,9 @@ def mocked_director_service_fcts( r"/service_extras/(simcore)%2F(services)%2F(comp|dynamic|frontend)%2F.+/(.+)" ), name="get_service_extras", - ).respond(json={"data": fake_service_extras.dict(by_alias=True)}) + ).respond( + json={"data": fake_service_extras.model_dump(mode="json", by_alias=True)} + ) yield respx_mock @@ -175,7 +179,7 @@ def mocked_catalog_service_fcts( ) -> Iterator[respx.MockRouter]: def _mocked_service_resources(request) -> httpx.Response: return httpx.Response( - 200, json=jsonable_encoder(fake_service_resources, by_alias=True) + httpx.codes.OK, json=jsonable_encoder(fake_service_resources, by_alias=True) ) def _mocked_services_details( @@ -184,7 +188,7 @@ def _mocked_services_details( return httpx.Response( 200, json=jsonable_encoder( - fake_service_details.copy( + fake_service_details.model_copy( update={ "key": urllib.parse.unquote(service_key), "version": service_version, @@ -225,19 +229,30 @@ def mocked_catalog_service_fcts_deprecated( def _mocked_services_details( request, service_key: str, service_version: str ) -> httpx.Response: + data_published = fake_service_details.model_copy( + update={ + "key": urllib.parse.unquote(service_key), + "version": service_version, + "deprecated": ( + dt.datetime.now(tz=dt.UTC) - dt.timedelta(days=1) + ).isoformat(), + } + ).model_dump(by_alias=True) + + deprecated = { + "deprecated": ( + dt.datetime.now(tz=dt.UTC) - dt.timedelta(days=1) + ).isoformat() + } + + data = {**ServiceGet.model_config["json_schema_extra"]["examples"][0], **data_published, **deprecated} # type: ignore + + payload = ServiceGet.model_validate(data) + return httpx.Response( - 200, + httpx.codes.OK, json=jsonable_encoder( - fake_service_details.copy( - update={ - "key": urllib.parse.unquote(service_key), - "version": service_version, - "deprecated": ( - datetime.datetime.now(tz=datetime.timezone.utc) - - datetime.timedelta(days=1) - ).isoformat(), - } - ), + payload, by_alias=True, ), ) @@ -259,7 +274,7 @@ def _mocked_services_details( @pytest.fixture( - params=PricingPlanGet.Config.schema_extra["examples"], + params=PricingPlanGet.model_config["json_schema_extra"]["examples"], ids=["with ec2 restriction", "without"], ) def default_pricing_plan(request: pytest.FixtureRequest) -> PricingPlanGet: @@ -303,7 +318,7 @@ def _mocked_get_pricing_unit(request, pricing_plan_id: int) -> httpx.Response: ( default_pricing_plan.pricing_units[0] if default_pricing_plan.pricing_units - else PricingUnitGet.Config.schema_extra["examples"][0] + else PricingUnitGet.model_config["json_schema_extra"]["examples"][0] ), by_alias=True, ), @@ -422,7 +437,7 @@ def fake_ec2_cpus() -> PositiveInt: @pytest.fixture def fake_ec2_ram() -> ByteSize: - return parse_obj_as(ByteSize, "4GiB") + return TypeAdapter(ByteSize).validate_python("4GiB") @pytest.fixture @@ -463,7 +478,9 @@ def mocked_clusters_keeper_service_get_instance_type_details_with_invalid_name( ) -@pytest.fixture(params=ServiceResourcesDictHelpers.Config.schema_extra["examples"]) +@pytest.fixture( + params=ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"] +) def project_nodes_overrides(request: pytest.FixtureRequest) -> dict[str, Any]: return request.param @@ -572,7 +589,11 @@ async def test_create_computation_with_wallet( @pytest.mark.parametrize( "default_pricing_plan", - [PricingPlanGet.construct(**PricingPlanGet.Config.schema_extra["examples"][0])], + [ + PricingPlanGet.model_construct( + **PricingPlanGet.model_config["json_schema_extra"]["examples"][0] + ) + ], ) async def test_create_computation_with_wallet_with_invalid_pricing_unit_name_raises_422( minimal_configuration: None, @@ -610,7 +631,11 @@ async def test_create_computation_with_wallet_with_invalid_pricing_unit_name_rai @pytest.mark.parametrize( "default_pricing_plan", - [PricingPlanGet.construct(**PricingPlanGet.Config.schema_extra["examples"][0])], + [ + PricingPlanGet.model_construct( + **PricingPlanGet.model_config["json_schema_extra"]["examples"][0] # type: ignore + ) + ], ) async def test_create_computation_with_wallet_with_no_clusters_keeper_raises_503( minimal_configuration: None, @@ -711,9 +736,9 @@ async def test_start_computation_with_project_node_resources_defined( proj = await project( user, project_nodes_overrides={ - "required_resources": ServiceResourcesDictHelpers.Config.schema_extra[ - "examples" - ][0] + "required_resources": ServiceResourcesDictHelpers.model_config[ + "json_schema_extra" + ]["examples"][0] }, workbench=fake_workbench_without_outputs, ) @@ -864,7 +889,7 @@ async def test_get_computation_from_empty_project( ) response = await async_client.get(get_computation_url) assert response.status_code == status.HTTP_200_OK, response.text - returned_computation = ComputationGet.parse_obj(response.json()) + returned_computation = ComputationGet.model_validate(response.json()) assert returned_computation expected_computation = ComputationGet( id=proj.uuid, @@ -872,8 +897,8 @@ async def test_get_computation_from_empty_project( pipeline_details=PipelineDetails( adjacency_list={}, node_states={}, progress=None ), - url=parse_obj_as( - AnyHttpUrl, f"{async_client.base_url.join(get_computation_url)}" + url=TypeAdapter(AnyHttpUrl).validate_python( + f"{async_client.base_url.join(get_computation_url)}" ), stop_url=None, result=None, @@ -883,7 +908,7 @@ async def test_get_computation_from_empty_project( stopped=None, submitted=None, ) - assert returned_computation.dict() == expected_computation.dict() + assert returned_computation.model_dump() == expected_computation.model_dump() async def test_get_computation_from_not_started_computation_task( @@ -913,14 +938,14 @@ async def test_get_computation_from_not_started_computation_task( comp_tasks = tasks(user=user, project=proj) response = await async_client.get(get_computation_url) assert response.status_code == status.HTTP_200_OK, response.text - returned_computation = ComputationGet.parse_obj(response.json()) + returned_computation = ComputationGet.model_validate(response.json()) assert returned_computation expected_computation = ComputationGet( id=proj.uuid, state=RunningState.NOT_STARTED, pipeline_details=PipelineDetails( - adjacency_list=parse_obj_as( - dict[NodeID, list[NodeID]], fake_workbench_adjacency + adjacency_list=TypeAdapter(dict[NodeID, list[NodeID]]).validate_python( + fake_workbench_adjacency ), progress=0, node_states={ @@ -938,8 +963,8 @@ async def test_get_computation_from_not_started_computation_task( if t.node_class == NodeClass.COMPUTATIONAL }, ), - url=parse_obj_as( - AnyHttpUrl, f"{async_client.base_url.join(get_computation_url)}" + url=TypeAdapter(AnyHttpUrl).validate_python( + f"{async_client.base_url.join(get_computation_url)}" ), stop_url=None, result=None, @@ -950,12 +975,12 @@ async def test_get_computation_from_not_started_computation_task( submitted=None, ) _CHANGED_FIELDS = {"submitted"} - assert returned_computation.dict( + assert returned_computation.model_dump( exclude=_CHANGED_FIELDS - ) == expected_computation.dict(exclude=_CHANGED_FIELDS) - assert returned_computation.dict( + ) == expected_computation.model_dump(exclude=_CHANGED_FIELDS) + assert returned_computation.model_dump( include=_CHANGED_FIELDS - ) != expected_computation.dict(include=_CHANGED_FIELDS) + ) != expected_computation.model_dump(include=_CHANGED_FIELDS) async def test_get_computation_from_published_computation_task( @@ -983,7 +1008,7 @@ async def test_get_computation_from_published_computation_task( ) response = await async_client.get(get_computation_url) assert response.status_code == status.HTTP_200_OK, response.text - returned_computation = ComputationGet.parse_obj(response.json()) + returned_computation = ComputationGet.model_validate(response.json()) assert returned_computation expected_stop_url = async_client.base_url.join( f"/v2/computations/{proj.uuid}:stop?user_id={user['id']}" @@ -992,8 +1017,8 @@ async def test_get_computation_from_published_computation_task( id=proj.uuid, state=RunningState.PUBLISHED, pipeline_details=PipelineDetails( - adjacency_list=parse_obj_as( - dict[NodeID, list[NodeID]], fake_workbench_adjacency + adjacency_list=TypeAdapter(dict[NodeID, list[NodeID]]).validate_python( + fake_workbench_adjacency ), node_states={ t.node_id: NodeState( @@ -1011,10 +1036,10 @@ async def test_get_computation_from_published_computation_task( }, progress=0, ), - url=parse_obj_as( - AnyHttpUrl, f"{async_client.base_url.join(get_computation_url)}" + url=TypeAdapter(AnyHttpUrl).validate_python( + f"{async_client.base_url.join(get_computation_url)}" ), - stop_url=parse_obj_as(AnyHttpUrl, f"{expected_stop_url}"), + stop_url=TypeAdapter(AnyHttpUrl).validate_python(f"{expected_stop_url}"), result=None, iteration=1, cluster_id=DEFAULT_CLUSTER_ID, @@ -1024,9 +1049,9 @@ async def test_get_computation_from_published_computation_task( ) _CHANGED_FIELDS = {"submitted"} - assert returned_computation.dict( + assert returned_computation.model_dump( exclude=_CHANGED_FIELDS - ) == expected_computation.dict(exclude=_CHANGED_FIELDS) - assert returned_computation.dict( + ) == expected_computation.model_dump(exclude=_CHANGED_FIELDS) + assert returned_computation.model_dump( include=_CHANGED_FIELDS - ) != expected_computation.dict(include=_CHANGED_FIELDS) + ) != expected_computation.model_dump(include=_CHANGED_FIELDS) diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py index 6f75f43c59fc..10bd1ba3a2fb 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py @@ -9,6 +9,7 @@ from uuid import uuid4 import httpx +from pydantic import TypeAdapter import pytest from faker import Faker from fastapi import FastAPI, status @@ -21,7 +22,6 @@ from models_library.projects import ProjectAtDB, ProjectID from models_library.projects_nodes_io import NodeID from models_library.users import UserID -from pydantic import parse_obj_as, parse_raw_as from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_director_v2.core.settings import AppSettings @@ -69,7 +69,7 @@ def _get_app(async_client: httpx.AsyncClient) -> FastAPI: settings: AppSettings = app.state.settings assert settings - print(settings.json(indent=1)) + print(settings.model_dump_json(indent=1)) return async_client @@ -162,7 +162,7 @@ async def test_get_all_tasks_log_files( # test expected response according to OAS! assert resp.status_code == status.HTTP_200_OK - log_files = parse_raw_as(list[TaskLogFileGet], resp.text) + log_files = TypeAdapter(list[TaskLogFileGet]).validate_json(resp.text) assert log_files assert all(l.download_link for l in log_files) @@ -180,7 +180,7 @@ async def test_get_task_logs_file( ) assert resp.status_code == status.HTTP_200_OK - log_file = TaskLogFileGet.parse_raw(resp.text) + log_file = TaskLogFileGet.model_validate_json(resp.text) assert log_file.download_link @@ -197,7 +197,7 @@ async def test_get_tasks_outputs( assert resp.status_code == status.HTTP_200_OK - tasks_outputs = parse_obj_as(TasksOutputs, resp.json()) + tasks_outputs = TasksOutputs.model_validate(resp.json()) assert selection == set(tasks_outputs.nodes_outputs.keys()) outputs = tasks_outputs.nodes_outputs[node_id] diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py b/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py index cc0246bfec9b..5787fa119e1c 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py @@ -162,12 +162,18 @@ async def mock_retrieve_features( assert_all_mocked=True, ) as respx_mock: if is_legacy: - service_details = RunningDynamicServiceDetails.parse_obj( - RunningDynamicServiceDetails.Config.schema_extra["examples"][0] + service_details = RunningDynamicServiceDetails.model_validate( + RunningDynamicServiceDetails.model_config["json_schema_extra"][ + "examples" + ][0] ) respx_mock.post( f"{service_details.legacy_service_url}/retrieve", name="retrieve" - ).respond(json=RetrieveDataOutEnveloped.Config.schema_extra["examples"][0]) + ).respond( + json=RetrieveDataOutEnveloped.model_config["json_schema_extra"][ + "examples" + ][0] + ) yield respx_mock # no cleanup required @@ -185,7 +191,7 @@ async def mock_retrieve_features( ] = scheduler_data_from_http_request respx_mock.post( - f"{scheduler_data_from_http_request.endpoint}/v1/containers/ports/inputs:pull", + f"{scheduler_data_from_http_request.endpoint}v1/containers/ports/inputs:pull", name="service_pull_input_ports", ).respond(json="mocked_task_id", status_code=status.HTTP_202_ACCEPTED) @@ -230,7 +236,9 @@ def mocked_director_v0_service_api( name="running interactive service", ).respond( json={ - "data": RunningDynamicServiceDetails.Config.schema_extra["examples"][0] + "data": RunningDynamicServiceDetails.model_config["json_schema_extra"][ + "examples" + ][0] } ) @@ -246,8 +254,10 @@ def get_stack_status(node_uuid: NodeID) -> RunningDynamicServiceDetails: if exp_status_code == status.HTTP_307_TEMPORARY_REDIRECT: raise DynamicSidecarNotFoundError(node_uuid) - return RunningDynamicServiceDetails.parse_obj( - RunningDynamicServiceDetails.Config.schema_extra["examples"][0] + return RunningDynamicServiceDetails.model_validate( + RunningDynamicServiceDetails.model_config["json_schema_extra"]["examples"][ + 0 + ] ) module_base = "simcore_service_director_v2.modules.dynamic_sidecar.scheduler" @@ -279,8 +289,12 @@ def remove_service(node_uuid: NodeID, *ars: Any, **kwargs: Any) -> None: [ pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][0], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][0], exp_status_code=status.HTTP_307_TEMPORARY_REDIRECT, is_legacy=True, ), @@ -288,8 +302,12 @@ def remove_service(node_uuid: NodeID, *ars: Any, **kwargs: Any) -> None: ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][1], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][1], exp_status_code=status.HTTP_201_CREATED, is_legacy=False, ), @@ -297,8 +315,12 @@ def remove_service(node_uuid: NodeID, *ars: Any, **kwargs: Any) -> None: ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][2], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][2], exp_status_code=status.HTTP_201_CREATED, is_legacy=False, ), @@ -316,12 +338,12 @@ def test_create_dynamic_services( exp_status_code: int, is_legacy: bool, ): - post_data = DynamicServiceCreate.parse_obj(service) + post_data = DynamicServiceCreate.model_validate(service) response = client.post( "/v2/dynamic_services", headers=dynamic_sidecar_headers, - json=json.loads(post_data.json()), + json=json.loads(post_data.model_dump_json()), follow_redirects=False, ) assert ( @@ -351,8 +373,12 @@ def test_create_dynamic_services( [ pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][0], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][0], exp_status_code=status.HTTP_307_TEMPORARY_REDIRECT, is_legacy=True, ), @@ -360,8 +386,12 @@ def test_create_dynamic_services( ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][1], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][1], exp_status_code=status.HTTP_200_OK, is_legacy=False, ), @@ -369,8 +399,12 @@ def test_create_dynamic_services( ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][2], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][2], exp_status_code=status.HTTP_200_OK, is_legacy=False, ), @@ -409,8 +443,12 @@ def test_get_service_status( [ pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][0], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][0], exp_status_code=status.HTTP_307_TEMPORARY_REDIRECT, is_legacy=True, ), @@ -418,8 +456,12 @@ def test_get_service_status( ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][1], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][1], exp_status_code=status.HTTP_204_NO_CONTENT, is_legacy=False, ), @@ -427,8 +469,12 @@ def test_get_service_status( ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][2], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][2], exp_status_code=status.HTTP_204_NO_CONTENT, is_legacy=False, ), @@ -481,8 +527,12 @@ def dynamic_sidecar_scheduler(minimal_app: FastAPI) -> DynamicSidecarsScheduler: [ pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][1], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][1], exp_status_code=status.HTTP_201_CREATED, is_legacy=False, ) @@ -500,12 +550,12 @@ def test_delete_service_waiting_for_manual_intervention( is_legacy: bool, dynamic_sidecar_scheduler: DynamicSidecarsScheduler, ): - post_data = DynamicServiceCreate.parse_obj(service) + post_data = DynamicServiceCreate.model_validate(service) response = client.post( "/v2/dynamic_services", headers=dynamic_sidecar_headers, - json=json.loads(post_data.json()), + json=json.loads(post_data.model_dump_json()), ) assert ( response.status_code == exp_status_code @@ -528,8 +578,12 @@ def test_delete_service_waiting_for_manual_intervention( [ pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][0], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][0], exp_status_code=status.HTTP_200_OK, is_legacy=True, ), @@ -537,8 +591,12 @@ def test_delete_service_waiting_for_manual_intervention( ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][1], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][1], exp_status_code=status.HTTP_200_OK, is_legacy=False, ), @@ -546,8 +604,12 @@ def test_delete_service_waiting_for_manual_intervention( ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][2], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][2], exp_status_code=status.HTTP_200_OK, is_legacy=False, ), @@ -571,7 +633,8 @@ def test_retrieve( response.status_code == exp_status_code ), f"expected status code {exp_status_code}, received {response.status_code}: {response.text}" assert ( - response.json() == RetrieveDataOutEnveloped.Config.schema_extra["examples"][0] + response.json() + == RetrieveDataOutEnveloped.model_config["json_schema_extra"]["examples"][0] ) diff --git a/services/director-v2/tests/unit/with_dbs/test_cli.py b/services/director-v2/tests/unit/with_dbs/test_cli.py index 43beec859003..1892e2a5a38f 100644 --- a/services/director-v2/tests/unit/with_dbs/test_cli.py +++ b/services/director-v2/tests/unit/with_dbs/test_cli.py @@ -106,8 +106,10 @@ def node_id(faker: Faker) -> NodeID: def mock_get_node_state(mocker: MockerFixture) -> None: mocker.patch( "simcore_service_director_v2.cli._core._get_dy_service_state", - return_value=DynamicServiceGet.parse_obj( - RunningDynamicServiceDetails.Config.schema_extra["examples"][0] + return_value=DynamicServiceGet.model_validate( + RunningDynamicServiceDetails.model_config["json_schema_extra"]["examples"][ + 0 + ] ), ) diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py index fbc90204f836..8567c8ccca02 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py @@ -41,7 +41,7 @@ RabbitResourceTrackingStoppedMessage, ) from models_library.users import UserID -from pydantic import parse_obj_as, parse_raw_as +from pydantic import TypeAdapter from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.rabbitmq import RabbitMQClient @@ -124,7 +124,7 @@ async def _assert_comp_run_db( & (comp_runs.c.project_uuid == f"{pub_project.project.uuid}") ) # there is only one entry ) - run_entry = CompRunsAtDB.parse_obj(await result.first()) + run_entry = CompRunsAtDB.model_validate(await result.first()) assert ( run_entry.result == expected_state ), f"comp_runs: expected state '{expected_state}, found '{run_entry.result}'" @@ -146,7 +146,7 @@ async def _assert_comp_tasks_db( & (comp_tasks.c.node_id.in_([f"{n}" for n in task_ids])) ) # there is only one entry ) - tasks = parse_obj_as(list[CompTaskAtDB], await result.fetchall()) + tasks = TypeAdapter(list[CompTaskAtDB]).validate_python(await result.fetchall()) assert all( t.state == expected_state for t in tasks ), f"expected state: {expected_state}, found: {[t.state for t in tasks]}" @@ -376,7 +376,7 @@ async def test_misconfigured_pipeline_is_not_scheduled( & (comp_runs.c.project_uuid == f"{sleepers_project.uuid}") ) # there is only one entry ) - run_entry = CompRunsAtDB.parse_obj(await result.first()) + run_entry = CompRunsAtDB.model_validate(await result.first()) assert run_entry.result == RunningState.PUBLISHED # let the scheduler kick in await run_comp_scheduler(scheduler) @@ -390,7 +390,7 @@ async def test_misconfigured_pipeline_is_not_scheduled( & (comp_runs.c.project_uuid == f"{sleepers_project.uuid}") ) # there is only one entry ) - run_entry = CompRunsAtDB.parse_obj(await result.first()) + run_entry = CompRunsAtDB.model_validate(await result.first()) assert run_entry.result == RunningState.ABORTED assert run_entry.metadata == run_metadata @@ -609,7 +609,7 @@ async def _trigger_progress_event( ), ) await cast(DaskScheduler, scheduler)._task_progress_change_handler( # noqa: SLF001 - event.json() + event.model_dump_json() ) @@ -737,18 +737,20 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta mocked_dask_client.get_tasks_status.reset_mock() mocked_dask_client.get_task_result.assert_not_called() messages = await _assert_message_received( - instrumentation_rabbit_client_parser, 1, InstrumentationRabbitMessage.parse_raw + instrumentation_rabbit_client_parser, + 1, + InstrumentationRabbitMessage.model_validate_json, ) assert messages[0].metrics == "service_started" assert messages[0].service_uuid == exp_started_task.node_id def _parser(x) -> RabbitResourceTrackingMessages: - return parse_raw_as(RabbitResourceTrackingMessages, x) + return TypeAdapter(RabbitResourceTrackingMessages).validate_json(x) messages = await _assert_message_received( resource_tracking_rabbit_client_parser, 1, - RabbitResourceTrackingStartedMessage.parse_raw, + RabbitResourceTrackingStartedMessage.model_validate_json, ) assert messages[0].node_id == exp_started_task.node_id @@ -767,7 +769,7 @@ async def _return_1st_task_success(job_ids: list[str]) -> list[DaskClientTaskSta mocked_dask_client.get_tasks_status.side_effect = _return_1st_task_success async def _return_random_task_result(job_id) -> TaskOutputData: - return TaskOutputData.parse_obj({"out_1": None, "out_2": 45}) + return TaskOutputData.model_validate({"out_1": None, "out_2": 45}) mocked_dask_client.get_task_result.side_effect = _return_random_task_result await run_comp_scheduler(scheduler) @@ -780,14 +782,16 @@ async def _return_random_task_result(job_id) -> TaskOutputData: expected_progress=1, ) messages = await _assert_message_received( - instrumentation_rabbit_client_parser, 1, InstrumentationRabbitMessage.parse_raw + instrumentation_rabbit_client_parser, + 1, + InstrumentationRabbitMessage.model_validate_json, ) assert messages[0].metrics == "service_stopped" assert messages[0].service_uuid == exp_started_task.node_id messages = await _assert_message_received( resource_tracking_rabbit_client_parser, 1, - RabbitResourceTrackingStoppedMessage.parse_raw, + RabbitResourceTrackingStoppedMessage.model_validate_json, ) completed_tasks = [exp_started_task] @@ -882,14 +886,16 @@ async def _return_2nd_task_running(job_ids: list[str]) -> list[DaskClientTaskSta mocked_dask_client.get_tasks_status.reset_mock() mocked_dask_client.get_task_result.assert_not_called() messages = await _assert_message_received( - instrumentation_rabbit_client_parser, 1, InstrumentationRabbitMessage.parse_raw + instrumentation_rabbit_client_parser, + 1, + InstrumentationRabbitMessage.model_validate_json, ) assert messages[0].metrics == "service_started" assert messages[0].service_uuid == exp_started_task.node_id messages = await _assert_message_received( resource_tracking_rabbit_client_parser, 1, - RabbitResourceTrackingStartedMessage.parse_raw, + RabbitResourceTrackingStartedMessage.model_validate_json, ) assert messages[0].node_id == exp_started_task.node_id @@ -926,14 +932,16 @@ async def _return_2nd_task_failed(job_ids: list[str]) -> list[DaskClientTaskStat mocked_parse_output_data_fct.assert_not_called() expected_pending_tasks.remove(exp_started_task) messages = await _assert_message_received( - instrumentation_rabbit_client_parser, 1, InstrumentationRabbitMessage.parse_raw + instrumentation_rabbit_client_parser, + 1, + InstrumentationRabbitMessage.model_validate_json, ) assert messages[0].metrics == "service_stopped" assert messages[0].service_uuid == exp_started_task.node_id messages = await _assert_message_received( resource_tracking_rabbit_client_parser, 1, - RabbitResourceTrackingStoppedMessage.parse_raw, + RabbitResourceTrackingStoppedMessage.model_validate_json, ) # ------------------------------------------------------------------------------- @@ -970,7 +978,9 @@ async def _return_3rd_task_success(job_ids: list[str]) -> list[DaskClientTaskSta ) mocked_dask_client.get_task_result.assert_called_once_with(exp_started_task.job_id) messages = await _assert_message_received( - instrumentation_rabbit_client_parser, 2, InstrumentationRabbitMessage.parse_raw + instrumentation_rabbit_client_parser, + 2, + InstrumentationRabbitMessage.model_validate_json, ) # NOTE: the service was fast and went directly to success assert messages[0].metrics == "service_started" @@ -1032,7 +1042,7 @@ async def test_task_progress_triggers( await cast( DaskScheduler, scheduler )._task_progress_change_handler( # noqa: SLF001 - progress_event.json() + progress_event.model_dump_json() ) # NOTE: not sure whether it should switch to STARTED.. it would make sense await _assert_comp_tasks_db( @@ -1186,7 +1196,7 @@ class RebootState: pytest.param( RebootState( dask_task_status=DaskClientTaskState.SUCCESS, - task_result=TaskOutputData.parse_obj({"whatever_output": 123}), + task_result=TaskOutputData.model_validate({"whatever_output": 123}), expected_task_state_group1=RunningState.SUCCESS, expected_task_progress_group1=1, expected_task_state_group2=RunningState.SUCCESS, @@ -1339,7 +1349,7 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta messages = await _assert_message_received( resource_tracking_rabbit_client_parser, 1, - RabbitResourceTrackingStartedMessage.parse_raw, + RabbitResourceTrackingStartedMessage.model_validate_json, ) assert messages[0].node_id == exp_started_task.node_id @@ -1351,7 +1361,7 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta messages = await _assert_message_received( resource_tracking_rabbit_client_parser, 1, - RabbitResourceTrackingHeartbeatMessage.parse_raw, + RabbitResourceTrackingHeartbeatMessage.model_validate_json, ) assert isinstance(messages[0], RabbitResourceTrackingHeartbeatMessage) @@ -1363,7 +1373,7 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta messages = await _assert_message_received( resource_tracking_rabbit_client_parser, 1, - RabbitResourceTrackingHeartbeatMessage.parse_raw, + RabbitResourceTrackingHeartbeatMessage.model_validate_json, ) assert isinstance(messages[0], RabbitResourceTrackingHeartbeatMessage) diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py index 0536261ed629..f36a8f8f7f64 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py @@ -195,7 +195,7 @@ def dynamic_sidecar_service_spec( f"{to_simcore_runtime_docker_label_key('service_port')}": "80", f"{to_simcore_runtime_docker_label_key('service_key')}": "simcore/services/dynamic/3dviewer", f"{to_simcore_runtime_docker_label_key('service_version')}": "2.4.5", - DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL: scheduler_data_from_http_request.json(), + DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL: scheduler_data_from_http_request.model_dump_json(), }, } @@ -330,8 +330,10 @@ def service_name() -> str: @pytest.fixture( params=[ - SimcoreServiceLabels.parse_obj(example) - for example in SimcoreServiceLabels.Config.schema_extra["examples"] + SimcoreServiceLabels.model_validate(example) + for example in SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ] ], ) def labels_example(request: pytest.FixtureRequest) -> SimcoreServiceLabels: @@ -391,11 +393,11 @@ def test_settings__valid_network_names( monkeypatch: pytest.MonkeyPatch, dynamic_services_scheduler_settings: DynamicServicesSchedulerSettings, ) -> None: - items = dynamic_services_scheduler_settings.dict() + items = dynamic_services_scheduler_settings.model_dump() items["SIMCORE_SERVICES_NETWORK_NAME"] = simcore_services_network_name # validate network names - DynamicServicesSchedulerSettings.parse_obj(items) + DynamicServicesSchedulerSettings.model_validate(items) async def test_failed_docker_client_request(docker_swarm: None): @@ -727,7 +729,7 @@ async def test_update_scheduler_data_label( # fetch stored data in labels service_inspect = await async_docker_client.services.inspect(mock_service) labels = service_inspect["Spec"]["Labels"] - scheduler_data = SchedulerData.parse_raw( + scheduler_data = SchedulerData.model_validate_json( labels[DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL] ) assert scheduler_data == mock_scheduler_data diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py index edca0dfe03cf..16032677a98a 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py @@ -4,7 +4,6 @@ import json -from collections.abc import Mapping from typing import Any, cast from unittest.mock import Mock @@ -49,7 +48,9 @@ @pytest.fixture def mock_s3_settings() -> S3Settings: - return S3Settings.parse_obj(S3Settings.Config.schema_extra["examples"][0]) + return S3Settings.model_validate( + S3Settings.model_config["json_schema_extra"]["examples"][0] + ) @pytest.fixture @@ -115,14 +116,16 @@ def swarm_network_id() -> str: @pytest.fixture def simcore_service_labels() -> SimcoreServiceLabels: # overwrites global fixture - return SimcoreServiceLabels.parse_obj( - SimcoreServiceLabels.Config.schema_extra["examples"][2] + return SimcoreServiceLabels.model_validate( + SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][2] ) @pytest.fixture def hardware_info() -> HardwareInfo: - return HardwareInfo.parse_obj(HardwareInfo.Config.schema_extra["examples"][0]) + return HardwareInfo.model_validate( + HardwareInfo.model_config["json_schema_extra"]["examples"][0] + ) @pytest.fixture @@ -137,7 +140,7 @@ def expected_dynamic_sidecar_spec( return { "endpoint_spec": {}, "labels": { - "io.simcore.scheduler-data": SchedulerData.parse_obj( + "io.simcore.scheduler-data": SchedulerData.model_validate( { "compose_spec": '{"version": "2.3", "services": {"rt-web": {"image": ' '"${SIMCORE_REGISTRY}/simcore/services/dynamic/sim4life:${SERVICE_VERSION}", ' @@ -180,9 +183,9 @@ def expected_dynamic_sidecar_spec( "state_exclude": ["/tmp/strip_me/*", "*.py"], # noqa: S108 "state_paths": ["/tmp/save_1", "/tmp_save_2"], # noqa: S108 }, - "callbacks_mapping": CallbacksMapping.Config.schema_extra[ - "examples" - ][3], + "callbacks_mapping": CallbacksMapping.model_config[ + "json_schema_extra" + ]["examples"][3], "product_name": osparc_product_name, "project_id": "dd1d04d9-d704-4f7e-8f0f-1ca60cc771fe", "proxy_service_name": "dy-proxy_75c7f3f4-18f9-4678-8610-54a2ade78eaa", @@ -190,8 +193,12 @@ def expected_dynamic_sidecar_spec( "request_scheme": "http", "request_simcore_user_agent": request_simcore_user_agent, "restart_policy": "on-inputs-downloaded", - "wallet_info": WalletInfo.Config.schema_extra["examples"][0], - "pricing_info": PricingInfo.Config.schema_extra["examples"][0], + "wallet_info": WalletInfo.model_config["json_schema_extra"][ + "examples" + ][0], + "pricing_info": PricingInfo.model_config["json_schema_extra"][ + "examples" + ][0], "hardware_info": hardware_info, "service_name": "dy-sidecar_75c7f3f4-18f9-4678-8610-54a2ade78eaa", "service_port": 65534, @@ -244,19 +251,19 @@ def expected_dynamic_sidecar_spec( "NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS": "3", "DYNAMIC_SIDECAR_LOG_LEVEL": "DEBUG", "DY_DEPLOYMENT_REGISTRY_SETTINGS": ( - '{"REGISTRY_AUTH": false, "REGISTRY_PATH": null, ' - '"REGISTRY_URL": "foo.bar.com", "REGISTRY_USER": ' - '"test", "REGISTRY_PW": "test", "REGISTRY_SSL": false}' + '{"REGISTRY_AUTH":false,"REGISTRY_PATH":null,' + '"REGISTRY_URL":"foo.bar.com","REGISTRY_USER":' + '"test","REGISTRY_PW":"test","REGISTRY_SSL":false}' ), "DY_DOCKER_HUB_REGISTRY_SETTINGS": "null", "DY_SIDECAR_AWS_S3_CLI_SETTINGS": ( - '{"AWS_S3_CLI_S3": {"S3_ACCESS_KEY": "12345678", "S3_BUCKET_NAME": "simcore", ' - '"S3_ENDPOINT": "http://172.17.0.1:9001", "S3_REGION": "us-east-1", "S3_SECRET_KEY": "12345678"}}' + '{"AWS_S3_CLI_S3":{"S3_ACCESS_KEY":"12345678","S3_BUCKET_NAME":"simcore",' + '"S3_ENDPOINT":"http://172.17.0.1:9001/","S3_REGION":"us-east-1","S3_SECRET_KEY":"12345678"}}' ), "DY_SIDECAR_CALLBACKS_MAPPING": ( - '{"metrics": {"service": "rt-web", "command": "ls", "timeout": 1.0}, "before_shutdown"' - ': [{"service": "rt-web", "command": "ls", "timeout": 1.0}, {"service": "s4l-core", ' - '"command": ["ls", "-lah"], "timeout": 1.0}], "inactivity": null}' + '{"metrics":{"service":"rt-web","command":"ls","timeout":1.0},"before_shutdown"' + ':[{"service":"rt-web","command":"ls","timeout":1.0},{"service":"s4l-core",' + '"command":["ls","-lah"],"timeout":1.0}],"inactivity":null}' ), "DY_SIDECAR_SERVICE_KEY": "simcore/services/dynamic/3dviewer", "DY_SIDECAR_SERVICE_VERSION": "2.4.5", @@ -435,12 +442,12 @@ async def test_get_dynamic_proxy_spec( == minimal_app.state.settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR ) - expected_dynamic_sidecar_spec_model = AioDockerServiceSpec.parse_obj( + expected_dynamic_sidecar_spec_model = AioDockerServiceSpec.model_validate( expected_dynamic_sidecar_spec ) - assert expected_dynamic_sidecar_spec_model.TaskTemplate - assert expected_dynamic_sidecar_spec_model.TaskTemplate.ContainerSpec - assert expected_dynamic_sidecar_spec_model.TaskTemplate.ContainerSpec.Env + assert expected_dynamic_sidecar_spec_model.task_template + assert expected_dynamic_sidecar_spec_model.task_template.container_spec + assert expected_dynamic_sidecar_spec_model.task_template.container_spec.env for count in range(1, 11): # loop to check it does not repeat copies print(f"{count:*^50}") @@ -463,7 +470,7 @@ async def test_get_dynamic_proxy_spec( rpc_client=Mock(), ) - exclude_keys: Mapping[int | str, Any] = { + exclude_keys = { "Labels": True, "TaskTemplate": {"ContainerSpec": {"Env": True}}, } @@ -471,62 +478,64 @@ async def test_get_dynamic_proxy_spec( # NOTE: some flakiness here # state_exclude is a set and does not preserve order # when dumping to json it gets converted to a list - assert dynamic_sidecar_spec.TaskTemplate - assert dynamic_sidecar_spec.TaskTemplate.ContainerSpec - assert dynamic_sidecar_spec.TaskTemplate.ContainerSpec.Env - assert dynamic_sidecar_spec.TaskTemplate.ContainerSpec.Env[ + assert dynamic_sidecar_spec.task_template + assert dynamic_sidecar_spec.task_template.container_spec + assert dynamic_sidecar_spec.task_template.container_spec.env + assert dynamic_sidecar_spec.task_template.container_spec.env[ "DY_SIDECAR_STATE_EXCLUDE" ] - dynamic_sidecar_spec.TaskTemplate.ContainerSpec.Env[ + dynamic_sidecar_spec.task_template.container_spec.env[ "DY_SIDECAR_STATE_EXCLUDE" ] = json.dumps( sorted( json.loads( - dynamic_sidecar_spec.TaskTemplate.ContainerSpec.Env[ + dynamic_sidecar_spec.task_template.container_spec.env[ "DY_SIDECAR_STATE_EXCLUDE" ] ) ) ) - assert expected_dynamic_sidecar_spec_model.TaskTemplate.ContainerSpec.Env[ + assert expected_dynamic_sidecar_spec_model.task_template.container_spec.env[ "DY_SIDECAR_STATE_EXCLUDE" ] - expected_dynamic_sidecar_spec_model.TaskTemplate.ContainerSpec.Env[ + expected_dynamic_sidecar_spec_model.task_template.container_spec.env[ "DY_SIDECAR_STATE_EXCLUDE" ] = json.dumps( sorted( json.loads( - expected_dynamic_sidecar_spec_model.TaskTemplate.ContainerSpec.Env[ + expected_dynamic_sidecar_spec_model.task_template.container_spec.env[ "DY_SIDECAR_STATE_EXCLUDE" ] ) ) ) - assert dynamic_sidecar_spec.dict( - exclude=exclude_keys - ) == expected_dynamic_sidecar_spec_model.dict(exclude=exclude_keys) - assert dynamic_sidecar_spec.Labels - assert expected_dynamic_sidecar_spec_model.Labels - assert sorted(dynamic_sidecar_spec.Labels.keys()) == sorted( - expected_dynamic_sidecar_spec_model.Labels.keys() + assert dynamic_sidecar_spec.model_dump( + exclude=exclude_keys # type: ignore[arg-type] + ) == expected_dynamic_sidecar_spec_model.model_dump( + exclude=exclude_keys # type: ignore[arg-type] + ) + assert dynamic_sidecar_spec.labels + assert expected_dynamic_sidecar_spec_model.labels + assert sorted(dynamic_sidecar_spec.labels.keys()) == sorted( + expected_dynamic_sidecar_spec_model.labels.keys() ) assert ( - dynamic_sidecar_spec.Labels["io.simcore.scheduler-data"] - == expected_dynamic_sidecar_spec_model.Labels["io.simcore.scheduler-data"] + dynamic_sidecar_spec.labels["io.simcore.scheduler-data"] + == expected_dynamic_sidecar_spec_model.labels["io.simcore.scheduler-data"] ) - assert dynamic_sidecar_spec.Labels == expected_dynamic_sidecar_spec_model.Labels + assert dynamic_sidecar_spec.labels == expected_dynamic_sidecar_spec_model.labels dynamic_sidecar_spec_accumulated = dynamic_sidecar_spec # check reference after multiple runs assert dynamic_sidecar_spec_accumulated is not None assert ( - dynamic_sidecar_spec_accumulated.dict() - == expected_dynamic_sidecar_spec_model.dict() + dynamic_sidecar_spec_accumulated.model_dump() + == expected_dynamic_sidecar_spec_model.model_dump() ) @@ -561,22 +570,22 @@ async def test_merge_dynamic_sidecar_specs_with_user_specific_specs( rpc_client=Mock(), ) assert dynamic_sidecar_spec - dynamic_sidecar_spec_dict = dynamic_sidecar_spec.dict() - expected_dynamic_sidecar_spec_dict = AioDockerServiceSpec.parse_obj( + dynamic_sidecar_spec_dict = dynamic_sidecar_spec.model_dump() + expected_dynamic_sidecar_spec_dict = AioDockerServiceSpec.model_validate( expected_dynamic_sidecar_spec - ).dict() + ).model_dump() # ensure some entries are sorted the same to prevent flakyness for sorted_dict in [dynamic_sidecar_spec_dict, expected_dynamic_sidecar_spec_dict]: for key in ["DY_SIDECAR_STATE_EXCLUDE", "DY_SIDECAR_STATE_PATHS"]: # this is a json of a list assert isinstance( - sorted_dict["TaskTemplate"]["ContainerSpec"]["Env"][key], str + sorted_dict["task_template"]["container_spec"]["env"][key], str ) unsorted_list = json.loads( - sorted_dict["TaskTemplate"]["ContainerSpec"]["Env"][key] + sorted_dict["task_template"]["container_spec"]["env"][key] ) assert isinstance(unsorted_list, list) - sorted_dict["TaskTemplate"]["ContainerSpec"]["Env"][key] = json.dumps( + sorted_dict["task_template"]["container_spec"]["env"][key] = json.dumps( unsorted_list.sort() ) assert dynamic_sidecar_spec_dict == expected_dynamic_sidecar_spec_dict @@ -591,13 +600,15 @@ async def test_merge_dynamic_sidecar_specs_with_user_specific_specs( ) assert user_service_specs assert "sidecar" in user_service_specs - user_aiodocker_service_spec = AioDockerServiceSpec.parse_obj( + user_aiodocker_service_spec = AioDockerServiceSpec.model_validate( user_service_specs["sidecar"] ) assert user_aiodocker_service_spec - orig_dict = dynamic_sidecar_spec.dict(by_alias=True, exclude_unset=True) - user_dict = user_aiodocker_service_spec.dict(by_alias=True, exclude_unset=True) + orig_dict = dynamic_sidecar_spec.model_dump(by_alias=True, exclude_unset=True) + user_dict = user_aiodocker_service_spec.model_dump( + by_alias=True, exclude_unset=True + ) another_merged_dict = nested_update( orig_dict, diff --git a/services/director-v2/tests/unit/with_dbs/test_utils_dask.py b/services/director-v2/tests/unit/with_dbs/test_utils_dask.py index 9a2a93d3a333..977828e4753b 100644 --- a/services/director-v2/tests/unit/with_dbs/test_utils_dask.py +++ b/services/director-v2/tests/unit/with_dbs/test_utils_dask.py @@ -36,9 +36,8 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimCoreFileLink, SimcoreS3FileID from models_library.users import UserID -from pydantic import ByteSize +from pydantic import ByteSize, TypeAdapter from pydantic.networks import AnyUrl -from pydantic.tools import parse_obj_as from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_sdk.node_ports_v2 import FileLinkType @@ -95,16 +94,15 @@ async def mocked_node_ports_filemanager_fcts( 0, FileUploadSchema( urls=[ - parse_obj_as( - AnyUrl, + TypeAdapter(AnyUrl).validate_python( f"{URL(faker.uri()).with_scheme(choice(tasks_file_link_scheme))}", # noqa: S311 ) ], - chunk_size=parse_obj_as(ByteSize, "5GiB"), + chunk_size=TypeAdapter(ByteSize).validate_python("5GiB"), links=FileUploadLinks( - abort_upload=parse_obj_as(AnyUrl, "https://www.fakeabort.com"), - complete_upload=parse_obj_as( - AnyUrl, "https://www.fakecomplete.com" + abort_upload=TypeAdapter(AnyUrl).validate_python("https://www.fakeabort.com"), + complete_upload=TypeAdapter(AnyUrl).validate_python( + "https://www.fakecomplete.com" ), ), ), @@ -198,7 +196,7 @@ def generate_simcore_file_link() -> dict[str, Any]: path=create_simcore_file_id( faker.uuid4(), faker.uuid4(), faker.file_name() ), - ).dict(by_alias=True, exclude_unset=True) + ).model_dump(by_alias=True, exclude_unset=True) TYPE_TO_FAKE_CALLABLE_MAP = { "number": faker.pyfloat, @@ -234,7 +232,7 @@ def fake_task_output_data( ) for key, value in fake_io_data.items() } - data = parse_obj_as(TaskOutputData, converted_data) + data = TypeAdapter(TaskOutputData).validate_python(converted_data) assert data return data @@ -318,7 +316,7 @@ async def test_compute_input_data( sleeper_task.node_id, faker.file_name(), ), - ).dict(by_alias=True, exclude_unset=True) + ).model_dump(by_alias=True, exclude_unset=True) if value_type["type"] == "data:*/*" else fake_io_data[key] ) @@ -334,7 +332,7 @@ def return_fake_input_value(*args, **kwargs): fake_inputs.values(), fake_io_schema.values(), strict=True ): if value_type["type"] == "data:*/*": - yield parse_obj_as(AnyUrl, faker.url()) + yield TypeAdapter(AnyUrl).validate_python(faker.url()) else: yield value @@ -448,7 +446,7 @@ async def test_clean_task_output_and_log_files_if_invalid( path=create_simcore_file_id( published_project.project.uuid, sleeper_task.node_id, faker.file_name() ), - ).dict(by_alias=True, exclude_unset=True) + ).model_dump(by_alias=True, exclude_unset=True) for key, value_type in fake_io_schema.items() if value_type["type"] == "data:*/*" } @@ -494,7 +492,7 @@ def _add_is_directory(entry: mock._Call) -> mock._Call: # noqa: SLF001 @pytest.mark.parametrize( - "req_example", NodeRequirements.Config.schema_extra["examples"] + "req_example", NodeRequirements.model_config["json_schema_extra"]["examples"] ) def test_node_requirements_correctly_convert_to_dask_resources( req_example: dict[str, Any] diff --git a/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py b/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py index ccd3f304a0ac..a041f70ecc77 100644 --- a/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py +++ b/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py @@ -129,7 +129,7 @@ async def test_publish_service_started_metrics( task=random.choice(tasks), # noqa: S311 ) await _assert_message_received( - mocked_message_parser, 1, InstrumentationRabbitMessage.parse_raw + mocked_message_parser, 1, InstrumentationRabbitMessage.model_validate_json ) @@ -154,7 +154,7 @@ async def test_publish_service_stopped_metrics( task_final_state=random.choice(list(RunningState)), # noqa: S311 ) await _assert_message_received( - mocked_message_parser, 1, InstrumentationRabbitMessage.parse_raw + mocked_message_parser, 1, InstrumentationRabbitMessage.model_validate_json ) @@ -177,7 +177,7 @@ async def test_publish_service_resource_tracking_started( RabbitResourceTrackingBaseMessage.get_channel_name(), mocked_message_parser ) random_service_run_id = faker.pystr() - before_publication_time = datetime.datetime.now(datetime.timezone.utc) + before_publication_time = datetime.datetime.now(datetime.UTC) await publish_service_resource_tracking_started( publisher, service_run_id=random_service_run_id, @@ -205,9 +205,11 @@ async def test_publish_service_resource_tracking_started( service_resources={}, service_additional_metadata=faker.pydict(), ) - after_publication_time = datetime.datetime.now(datetime.timezone.utc) + after_publication_time = datetime.datetime.now(datetime.UTC) received_messages = await _assert_message_received( - mocked_message_parser, 1, RabbitResourceTrackingStartedMessage.parse_raw + mocked_message_parser, + 1, + RabbitResourceTrackingStartedMessage.model_validate_json, ) assert isinstance(received_messages[0], RabbitResourceTrackingStartedMessage) assert received_messages[0].service_run_id == random_service_run_id @@ -231,7 +233,7 @@ async def test_publish_service_resource_tracking_stopped( RabbitResourceTrackingBaseMessage.get_channel_name(), mocked_message_parser ) random_service_run_id = faker.pystr() - before_publication_time = datetime.datetime.now(datetime.timezone.utc) + before_publication_time = datetime.datetime.now(datetime.UTC) await publish_service_resource_tracking_stopped( publisher, service_run_id=random_service_run_id, @@ -239,9 +241,11 @@ async def test_publish_service_resource_tracking_stopped( list(SimcorePlatformStatus) ), ) - after_publication_time = datetime.datetime.now(datetime.timezone.utc) + after_publication_time = datetime.datetime.now(datetime.UTC) received_messages = await _assert_message_received( - mocked_message_parser, 1, RabbitResourceTrackingStoppedMessage.parse_raw + mocked_message_parser, + 1, + RabbitResourceTrackingStoppedMessage.model_validate_json, ) assert isinstance(received_messages[0], RabbitResourceTrackingStoppedMessage) assert received_messages[0].service_run_id == random_service_run_id @@ -265,14 +269,16 @@ async def test_publish_service_resource_tracking_heartbeat( RabbitResourceTrackingBaseMessage.get_channel_name(), mocked_message_parser ) random_service_run_id = faker.pystr() - before_publication_time = datetime.datetime.now(datetime.timezone.utc) + before_publication_time = datetime.datetime.now(datetime.UTC) await publish_service_resource_tracking_heartbeat( publisher, service_run_id=random_service_run_id, ) - after_publication_time = datetime.datetime.now(datetime.timezone.utc) + after_publication_time = datetime.datetime.now(datetime.UTC) received_messages = await _assert_message_received( - mocked_message_parser, 1, RabbitResourceTrackingHeartbeatMessage.parse_raw + mocked_message_parser, + 1, + RabbitResourceTrackingHeartbeatMessage.model_validate_json, ) assert isinstance(received_messages[0], RabbitResourceTrackingHeartbeatMessage) assert received_messages[0].service_run_id == random_service_run_id diff --git a/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py b/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py index a1b64635e58b..cb5631ec20d2 100644 --- a/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py +++ b/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py @@ -313,7 +313,7 @@ async def s3_client(s3_settings: S3Settings) -> AsyncIterable[S3Client]: session = aioboto3.Session() session_client = session.client( "s3", - endpoint_url=s3_settings.S3_ENDPOINT, + endpoint_url=f"{s3_settings.S3_ENDPOINT}", aws_access_key_id=s3_settings.S3_ACCESS_KEY, aws_secret_access_key=s3_settings.S3_SECRET_KEY, region_name=s3_settings.S3_REGION,