diff --git a/services/autoscaling/requirements/_base.txt b/services/autoscaling/requirements/_base.txt index ee6878dd43b..924cfeac561 100644 --- a/services/autoscaling/requirements/_base.txt +++ b/services/autoscaling/requirements/_base.txt @@ -48,6 +48,8 @@ aiormq==6.8.0 # via aio-pika aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -131,20 +133,8 @@ email-validator==2.1.1 # via pydantic fast-depends==2.4.2 # via faststream -fastapi==0.99.1 +fastapi==0.115.0 # via - # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator @@ -353,7 +343,7 @@ psutil==6.0.0 # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # distributed -pydantic==1.10.15 +pydantic==2.9.2 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -364,7 +354,6 @@ pydantic==1.10.15 # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -381,6 +370,26 @@ pydantic==1.10.15 # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.6.2 @@ -391,6 +400,8 @@ python-dateutil==2.9.0.post0 # via # arrow # botocore +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.1 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -467,7 +478,7 @@ sortedcontainers==2.4.0 # via # -c requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed -starlette==0.27.0 +starlette==0.38.6 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -533,6 +544,7 @@ typing-extensions==4.11.0 # faststream # opentelemetry-sdk # pydantic + # pydantic-core # typer # types-aiobotocore # types-aiobotocore-ec2 diff --git a/services/autoscaling/requirements/_test.txt b/services/autoscaling/requirements/_test.txt index 47379c4d69f..7784681d8b9 100644 --- a/services/autoscaling/requirements/_test.txt +++ b/services/autoscaling/requirements/_test.txt @@ -1,3 +1,7 @@ +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic antlr4-python3-runtime==4.13.2 # via moto anyio==4.3.0 @@ -181,11 +185,15 @@ py-partiql-parser==0.5.6 # via moto pycparser==2.22 # via cffi -pydantic==1.10.15 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic pyparsing==3.1.4 # via moto pytest==8.3.3 @@ -217,7 +225,9 @@ python-dateutil==2.9.0.post0 # faker # moto python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.1 # via # -c requirements/../../../requirements/constraints.txt @@ -315,6 +325,7 @@ typing-extensions==4.11.0 # aws-sam-translator # cfn-lint # pydantic + # pydantic-core # types-aiobotocore # types-aiobotocore-ec2 # types-aiobotocore-iam diff --git a/services/autoscaling/src/simcore_service_autoscaling/_meta.py b/services/autoscaling/src/simcore_service_autoscaling/_meta.py index 22d3ea19043..c421cfae966 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/_meta.py +++ b/services/autoscaling/src/simcore_service_autoscaling/_meta.py @@ -2,6 +2,7 @@ from models_library.basic_types import VersionStr, VersionTag from packaging.version import Version +from pydantic import TypeAdapter from servicelib.utils_meta import PackageInfo info: Final = PackageInfo(package_name="simcore-service-autoscaling") @@ -10,7 +11,9 @@ APP_NAME: Final[str] = info.project_name API_VERSION: Final[VersionStr] = info.__version__ VERSION: Final[Version] = info.version -API_VTAG: Final[VersionTag] = VersionTag(info.api_prefix_path_tag) +API_VTAG: Final[VersionTag] = TypeAdapter(VersionTag).validate_python( + info.api_prefix_path_tag +) SUMMARY: Final[str] = info.get_summary() diff --git a/services/autoscaling/src/simcore_service_autoscaling/constants.py b/services/autoscaling/src/simcore_service_autoscaling/constants.py index 086c47b906f..39e9f7f7d62 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/constants.py +++ b/services/autoscaling/src/simcore_service_autoscaling/constants.py @@ -2,19 +2,19 @@ from typing import Final from aws_library.ec2._models import AWSTagKey, AWSTagValue, EC2Tags -from pydantic import parse_obj_as +from pydantic import TypeAdapter -BUFFER_MACHINE_PULLING_EC2_TAG_KEY: Final[AWSTagKey] = parse_obj_as( - AWSTagKey, "pulling" -) -BUFFER_MACHINE_PULLING_COMMAND_ID_EC2_TAG_KEY: Final[AWSTagKey] = parse_obj_as( - AWSTagKey, "ssm-command-id" -) +BUFFER_MACHINE_PULLING_EC2_TAG_KEY: Final[AWSTagKey] = TypeAdapter( + AWSTagKey +).validate_python("pulling") +BUFFER_MACHINE_PULLING_COMMAND_ID_EC2_TAG_KEY: Final[AWSTagKey] = TypeAdapter( + AWSTagKey +).validate_python("ssm-command-id") PREPULL_COMMAND_NAME: Final[str] = "docker images pulling" DOCKER_JOIN_COMMAND_NAME: Final[str] = "docker swarm join" -DOCKER_JOIN_COMMAND_EC2_TAG_KEY: Final[AWSTagKey] = parse_obj_as( - AWSTagKey, "io.simcore.autoscaling.joined_command_sent" +DOCKER_JOIN_COMMAND_EC2_TAG_KEY: Final[AWSTagKey] = TypeAdapter(AWSTagKey).validate_python( + "io.simcore.autoscaling.joined_command_sent" ) @@ -22,18 +22,18 @@ "docker compose -f /docker-pull.compose.yml -p buffering pull" ) -PRE_PULLED_IMAGES_EC2_TAG_KEY: Final[AWSTagKey] = parse_obj_as( - AWSTagKey, "io.simcore.autoscaling.pre_pulled_images" -) +PRE_PULLED_IMAGES_EC2_TAG_KEY: Final[AWSTagKey] = TypeAdapter( + AWSTagKey +).validate_python("io.simcore.autoscaling.pre_pulled_images") -BUFFER_MACHINE_TAG_KEY: Final[AWSTagKey] = parse_obj_as( - AWSTagKey, "io.simcore.autoscaling.buffer_machine" +BUFFER_MACHINE_TAG_KEY: Final[AWSTagKey] = TypeAdapter(AWSTagKey).validate_python( + "io.simcore.autoscaling.buffer_machine" ) DEACTIVATED_BUFFER_MACHINE_EC2_TAGS: Final[EC2Tags] = { - BUFFER_MACHINE_TAG_KEY: parse_obj_as(AWSTagValue, "true") + BUFFER_MACHINE_TAG_KEY: TypeAdapter(AWSTagValue).validate_python("true") } ACTIVATED_BUFFER_MACHINE_EC2_TAGS: Final[EC2Tags] = { - BUFFER_MACHINE_TAG_KEY: parse_obj_as(AWSTagValue, "false") + BUFFER_MACHINE_TAG_KEY: TypeAdapter(AWSTagValue).validate_python("false") } PRE_PULLED_IMAGES_RE: Final[re.Pattern] = re.compile( rf"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_\((\d+)\)" diff --git a/services/autoscaling/src/simcore_service_autoscaling/core/errors.py b/services/autoscaling/src/simcore_service_autoscaling/core/errors.py index 398b1278806..e4294631224 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/core/errors.py +++ b/services/autoscaling/src/simcore_service_autoscaling/core/errors.py @@ -1,12 +1,7 @@ -from typing import Any - -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class AutoscalingRuntimeError(OsparcErrorMixin, RuntimeError): - def __init__(self, **ctx: Any) -> None: - super().__init__(**ctx) - msg_template: str = "Autoscaling unexpected error" diff --git a/services/autoscaling/src/simcore_service_autoscaling/core/settings.py b/services/autoscaling/src/simcore_service_autoscaling/core/settings.py index 23af4b958bf..c3479cf967c 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/core/settings.py +++ b/services/autoscaling/src/simcore_service_autoscaling/core/settings.py @@ -1,6 +1,6 @@ import datetime from functools import cached_property -from typing import Any, ClassVar, Final, cast +from typing import Annotated, Final, cast from aws_library.ec2 import EC2InstanceBootSpecific, EC2Tags from fastapi import FastAPI @@ -14,14 +14,16 @@ from models_library.clusters import InternalClusterAuthentication from models_library.docker import DockerLabelKey from pydantic import ( + AliasChoices, AnyUrl, Field, NonNegativeInt, PositiveInt, - parse_obj_as, - root_validator, - validator, + TypeAdapter, + field_validator, + model_validator, ) +from pydantic_settings import SettingsConfigDict from settings_library.base import BaseCustomSettings from settings_library.docker_registry import RegistrySettings from settings_library.ec2 import EC2Settings @@ -42,10 +44,9 @@ class AutoscalingSSMSettings(SSMSettings): class AutoscalingEC2Settings(EC2Settings): - class Config(EC2Settings.Config): - env_prefix = AUTOSCALING_ENV_PREFIX - - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = SettingsConfigDict( + env_prefix=AUTOSCALING_ENV_PREFIX, + json_schema_extra={ "examples": [ { f"{AUTOSCALING_ENV_PREFIX}EC2_ACCESS_KEY_ID": "my_access_key_id", @@ -54,7 +55,8 @@ class Config(EC2Settings.Config): f"{AUTOSCALING_ENV_PREFIX}EC2_SECRET_ACCESS_KEY": "my_secret_access_key", } ], - } + }, + ) class EC2InstancesSettings(BaseCustomSettings): @@ -94,7 +96,7 @@ class EC2InstancesSettings(BaseCustomSettings): EC2_INSTANCES_SECURITY_GROUP_IDS: list[str] = Field( ..., - min_items=1, + min_length=1, description="A security group acts as a virtual firewall for your EC2 instances to control incoming and outgoing traffic" " (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-security-groups.html), " " this is required to start a new EC2 instance", @@ -131,7 +133,7 @@ class EC2InstancesSettings(BaseCustomSettings): description="ARN the EC2 instance should be attached to (example: arn:aws:iam::XXXXX:role/NAME), to disable pass an empty string", ) - @validator("EC2_INSTANCES_TIME_BEFORE_DRAINING") + @field_validator("EC2_INSTANCES_TIME_BEFORE_DRAINING") @classmethod def ensure_draining_delay_time_is_in_range( cls, value: datetime.timedelta @@ -142,7 +144,7 @@ def ensure_draining_delay_time_is_in_range( value = datetime.timedelta(minutes=1) return value - @validator("EC2_INSTANCES_TIME_BEFORE_TERMINATION") + @field_validator("EC2_INSTANCES_TIME_BEFORE_TERMINATION") @classmethod def ensure_termination_delay_time_is_in_range( cls, value: datetime.timedelta @@ -153,14 +155,14 @@ def ensure_termination_delay_time_is_in_range( value = datetime.timedelta(minutes=59) return value - @validator("EC2_INSTANCES_ALLOWED_TYPES") + @field_validator("EC2_INSTANCES_ALLOWED_TYPES") @classmethod def check_valid_instance_names( cls, value: dict[str, EC2InstanceBootSpecific] ) -> dict[str, EC2InstanceBootSpecific]: # NOTE: needed because of a flaw in BaseCustomSettings # issubclass raises TypeError if used on Aliases - parse_obj_as(list[InstanceTypeType], list(value)) + TypeAdapter(list[InstanceTypeType]).validate_python(list(value)) return value @@ -182,7 +184,7 @@ class NodesMonitoringSettings(BaseCustomSettings): class DaskMonitoringSettings(BaseCustomSettings): - DASK_MONITORING_URL: AnyUrl = Field( + DASK_MONITORING_URL: Annotated[str, AnyUrl] = Field( ..., description="the url to the osparc-dask-scheduler" ) DASK_SCHEDULER_AUTH: InternalClusterAuthentication = Field( @@ -218,36 +220,39 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): # RUNTIME ----------------------------------------------------------- AUTOSCALING_DEBUG: bool = Field( - default=False, description="Debug mode", env=["AUTOSCALING_DEBUG", "DEBUG"] + default=False, + description="Debug mode", + validation_alias=AliasChoices("AUTOSCALING_DEBUG", "DEBUG"), ) - AUTOSCALING_REMOTE_DEBUG_PORT: PortInt = PortInt(3000) + AUTOSCALING_REMOTE_DEBUG_PORT: PortInt = 3000 AUTOSCALING_LOGLEVEL: LogLevel = Field( - LogLevel.INFO, env=["AUTOSCALING_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"] + LogLevel.INFO, + validation_alias=AliasChoices("AUTOSCALING_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), ) AUTOSCALING_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=[ + validation_alias=AliasChoices( "AUTOSCALING_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) AUTOSCALING_EC2_ACCESS: AutoscalingEC2Settings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) AUTOSCALING_SSM_ACCESS: AutoscalingSSMSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) AUTOSCALING_EC2_INSTANCES: EC2InstancesSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) AUTOSCALING_NODES_MONITORING: NodesMonitoringSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) AUTOSCALING_POLL_INTERVAL: datetime.timedelta = Field( @@ -256,13 +261,21 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", ) - AUTOSCALING_RABBITMQ: RabbitSettings | None = Field(auto_default_from_env=True) + AUTOSCALING_RABBITMQ: RabbitSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) - AUTOSCALING_REDIS: RedisSettings = Field(auto_default_from_env=True) + AUTOSCALING_REDIS: RedisSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) - AUTOSCALING_REGISTRY: RegistrySettings | None = Field(auto_default_from_env=True) + AUTOSCALING_REGISTRY: RegistrySettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) - AUTOSCALING_DASK: DaskMonitoringSettings | None = Field(auto_default_from_env=True) + AUTOSCALING_DASK: DaskMonitoringSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) AUTOSCALING_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True @@ -273,7 +286,8 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): "but a docker node label named osparc-services-ready is attached", ) AUTOSCALING_TRACING: TracingSettings | None = Field( - auto_default_from_env=True, description="settings for opentelemetry tracing" + description="settings for opentelemetry tracing", + json_schema_extra={"auto_default_from_env":True}, ) AUTOSCALING_DOCKER_JOIN_DRAINED: bool = Field( @@ -291,21 +305,27 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): def LOG_LEVEL(self): # noqa: N802 return self.AUTOSCALING_LOGLEVEL - @validator("AUTOSCALING_LOGLEVEL") + @field_validator("AUTOSCALING_LOGLEVEL") @classmethod def valid_log_level(cls, value: str) -> str: return cls.validate_log_level(value) - - @root_validator() + + @field_validator("AUTOSCALING_POLL_INTERVAL", mode="before") @classmethod - def exclude_both_dynamic_computational_mode(cls, values): + def _validate_poll_interval_in_s(cls, v): + if isinstance(v, str) and v.isnumeric(): + return int(v) + return v + + @model_validator(mode="after") + def exclude_both_dynamic_computational_mode(self, v): if ( - values.get("AUTOSCALING_DASK") is not None - and values.get("AUTOSCALING_NODES_MONITORING") is not None + v.AUTOSCALING_DASK is not None + and v.AUTOSCALING_NODES_MONITORING is not None ): msg = "Autoscaling cannot be set to monitor both computational and dynamic services (both AUTOSCALING_DASK and AUTOSCALING_NODES_MONITORING are currently set!)" raise ValueError(msg) - return values + return v def get_application_settings(app: FastAPI) -> ApplicationSettings: diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_computational.py b/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_computational.py index ecddfc5e8ec..bb993d0a25d 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_computational.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_computational.py @@ -151,10 +151,10 @@ async def compute_cluster_used_resources( for i in instances ) ) - counter = collections.Counter({k: 0 for k in Resources.__fields__}) + counter = collections.Counter({k: 0 for k in Resources.model_fields}) for result in list_of_used_resources: counter.update(result.dict()) - return Resources.parse_obj(dict(counter)) + return Resources.model_validate(dict(counter)) @staticmethod async def compute_cluster_total_resources( diff --git a/services/autoscaling/src/simcore_service_autoscaling/utils/buffer_machines_pool_core.py b/services/autoscaling/src/simcore_service_autoscaling/utils/buffer_machines_pool_core.py index 133708001ae..c25f72ee546 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/utils/buffer_machines_pool_core.py +++ b/services/autoscaling/src/simcore_service_autoscaling/utils/buffer_machines_pool_core.py @@ -1,11 +1,11 @@ from collections.abc import Iterable from operator import itemgetter -from aws_library.ec2 import AWSTagKey, AWSTagValue, EC2Tags +from aws_library.ec2 import AWS_TAG_VALUE_MAX_LENGTH, AWSTagKey, AWSTagValue, EC2Tags from fastapi import FastAPI from models_library.docker import DockerGenericTag from models_library.utils.json_serialization import json_dumps -from pydantic import parse_obj_as, parse_raw_as +from pydantic import TypeAdapter from ..constants import ( ACTIVATED_BUFFER_MACHINE_EC2_TAGS, @@ -29,8 +29,10 @@ def get_deactivated_buffer_ec2_tags( base_ec2_tags = ( auto_scaling_mode.get_ec2_tags(app) | DEACTIVATED_BUFFER_MACHINE_EC2_TAGS ) - base_ec2_tags[AWSTagKey("Name")] = AWSTagValue( - f"{base_ec2_tags[AWSTagKey('Name')]}-buffer" + base_ec2_tags[TypeAdapter(AWSTagKey).validate_python("Name")] = TypeAdapter( + AWSTagValue + ).validate_python( + f"{base_ec2_tags[TypeAdapter(AWSTagKey).validate_python('Name')]}-buffer" ) return base_ec2_tags @@ -43,20 +45,26 @@ def dump_pre_pulled_images_as_tags(images: Iterable[DockerGenericTag]) -> EC2Tag # AWS Tag Values are limited to 256 characaters so we chunk the images # into smaller chunks jsonized_images = json_dumps(images) - assert AWSTagValue.max_length # nosec - if len(jsonized_images) > AWSTagValue.max_length: + assert AWS_TAG_VALUE_MAX_LENGTH # nosec + if len(jsonized_images) > AWS_TAG_VALUE_MAX_LENGTH: # let's chunk the string - chunk_size = AWSTagValue.max_length + chunk_size = AWS_TAG_VALUE_MAX_LENGTH chunks = [ jsonized_images[i : i + chunk_size] for i in range(0, len(jsonized_images), chunk_size) ] return { - AWSTagKey(f"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_({i})"): AWSTagValue(c) + TypeAdapter(AWSTagKey) + .validate_python(f"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_({i})"): TypeAdapter( + AWSTagValue + ) + .validate_python(c) for i, c in enumerate(chunks) } return { - PRE_PULLED_IMAGES_EC2_TAG_KEY: parse_obj_as(AWSTagValue, json_dumps(images)) + PRE_PULLED_IMAGES_EC2_TAG_KEY: TypeAdapter(AWSTagValue).validate_python( + json_dumps(images) + ) } @@ -64,7 +72,9 @@ def load_pre_pulled_images_from_tags(tags: EC2Tags) -> list[DockerGenericTag]: # AWS Tag values are limited to 256 characters so we chunk the images if PRE_PULLED_IMAGES_EC2_TAG_KEY in tags: # read directly - return parse_raw_as(list[DockerGenericTag], tags[PRE_PULLED_IMAGES_EC2_TAG_KEY]) + return TypeAdapter(list[DockerGenericTag]).validate_json( + tags[PRE_PULLED_IMAGES_EC2_TAG_KEY] + ) assembled_json = "".join( map( @@ -80,5 +90,5 @@ def load_pre_pulled_images_from_tags(tags: EC2Tags) -> list[DockerGenericTag]: ) ) if assembled_json: - return parse_raw_as(list[DockerGenericTag], assembled_json) + return TypeAdapter(list[DockerGenericTag]).validate_json(assembled_json) return [] diff --git a/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py b/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py index 6449952decd..d870254b58f 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py +++ b/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py @@ -29,7 +29,7 @@ Task, TaskState, ) -from pydantic import ByteSize, ValidationError, parse_obj_as +from pydantic import ByteSize, TypeAdapter, ValidationError from servicelib.docker_utils import to_datetime from servicelib.logging_utils import log_context from servicelib.utils import logged_gather @@ -59,11 +59,11 @@ _PENDING_DOCKER_TASK_MESSAGE: Final[str] = "pending task scheduling" _INSUFFICIENT_RESOURCES_DOCKER_TASK_ERR: Final[str] = "insufficient resources on" _NOT_SATISFIED_SCHEDULING_CONSTRAINTS_TASK_ERR: Final[str] = "no suitable node" -_OSPARC_SERVICE_READY_LABEL_KEY: Final[DockerLabelKey] = parse_obj_as( - DockerLabelKey, "io.simcore.osparc-services-ready" +_OSPARC_SERVICE_READY_LABEL_KEY: Final[DockerLabelKey] = TypeAdapter(DockerLabelKey).validate_python( + "io.simcore.osparc-services-ready", ) -_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY: Final[DockerLabelKey] = parse_obj_as( - DockerLabelKey, f"{_OSPARC_SERVICE_READY_LABEL_KEY}-last-changed" +_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY: Final[DockerLabelKey] = TypeAdapter(DockerLabelKey).validate_python( + f"{_OSPARC_SERVICE_READY_LABEL_KEY}-last-changed", ) _OSPARC_SERVICE_READY_LABEL_KEYS: Final[list[DockerLabelKey]] = [ _OSPARC_SERVICE_READY_LABEL_KEY, @@ -71,12 +71,12 @@ ] -_OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY: Final[DockerLabelKey] = parse_obj_as( - DockerLabelKey, "io.simcore.osparc-node-found-empty" +_OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY: Final[DockerLabelKey] = TypeAdapter(DockerLabelKey).validate_python( + "io.simcore.osparc-node-found-empty", ) -_OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY: Final[DockerLabelKey] = parse_obj_as( - DockerLabelKey, "io.simcore.osparc-node-termination-started" +_OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY: Final[DockerLabelKey] = TypeAdapter(DockerLabelKey).validate_python( + "io.simcore.osparc-node-termination-started", ) @@ -86,15 +86,13 @@ async def get_monitored_nodes( node_label_filters = [f"{label}=true" for label in node_labels] + [ f"{label}" for label in _OSPARC_SERVICE_READY_LABEL_KEYS ] - return parse_obj_as( - list[Node], + return TypeAdapter(list[Node]).validate_python( await docker_client.nodes.list(filters={"node.label": node_label_filters}), ) async def get_worker_nodes(docker_client: AutoscalingDocker) -> list[Node]: - return parse_obj_as( - list[Node], + return TypeAdapter(list[Node]).validate_python( await docker_client.nodes.list( filters={ "role": ["worker"], @@ -161,8 +159,8 @@ async def _associated_service_has_no_node_placement_contraints( docker_client: AutoscalingDocker, task: Task ) -> bool: assert task.ServiceID # nosec - service_inspect = parse_obj_as( - Service, await docker_client.services.inspect(task.ServiceID) + service_inspect = TypeAdapter(Service).validate_python( + await docker_client.services.inspect(task.ServiceID), ) assert service_inspect.Spec # nosec assert service_inspect.Spec.TaskTemplate # nosec @@ -209,8 +207,7 @@ async def pending_service_tasks_with_insufficient_resources( - have an error message with "insufficient resources" - are not scheduled on any node """ - tasks = parse_obj_as( - list[Task], + tasks = TypeAdapter(list[Task]).validate_python( await docker_client.tasks.list( filters={ "desired-state": "running", @@ -264,7 +261,7 @@ async def compute_cluster_total_resources(nodes: list[Node]) -> Resources: } ) - return Resources.parse_obj(dict(cluster_resources_counter)) + return Resources.model_validate(dict(cluster_resources_counter)) def get_max_resources_from_docker_task(task: Task) -> Resources: @@ -285,8 +282,7 @@ def get_max_resources_from_docker_task(task: Task) -> Resources: ), ) / _NANO_CPU, - ram=parse_obj_as( - ByteSize, + ram=TypeAdapter(ByteSize).validate_python( max( task.Spec.Resources.Reservations and task.Spec.Resources.Reservations.MemoryBytes @@ -305,8 +301,8 @@ async def get_task_instance_restriction( ) -> InstanceTypeType | None: with contextlib.suppress(ValidationError): assert task.ServiceID # nosec - service_inspect = parse_obj_as( - Service, await docker_client.services.inspect(task.ServiceID) + service_inspect = TypeAdapter(Service).validate_python( + await docker_client.services.inspect(task.ServiceID), ) assert service_inspect.Spec # nosec assert service_inspect.Spec.TaskTemplate # nosec @@ -326,8 +322,8 @@ async def get_task_instance_restriction( ) for constraint in service_placement_constraints: if constraint.startswith(node_label_to_find): - return parse_obj_as( - InstanceTypeType, constraint.removeprefix(node_label_to_find) # type: ignore[arg-type] + return TypeAdapter(InstanceTypeType).validate_python( + constraint.removeprefix(node_label_to_find), ) return None @@ -351,8 +347,7 @@ async def compute_node_used_resources( task_filters: dict[str, str | list[DockerLabelKey]] = {"node": node.ID} if service_labels is not None: task_filters |= {"label": service_labels} - all_tasks_on_node = parse_obj_as( - list[Task], + all_tasks_on_node = TypeAdapter(list[Task]).validate_python( await docker_client.tasks.list(filters=task_filters), ) for task in all_tasks_on_node: @@ -370,7 +365,7 @@ async def compute_node_used_resources( "cpus": task_reservations.get("NanoCPUs", 0) / _NANO_CPU, } ) - return Resources.parse_obj(dict(cluster_resources_counter)) + return Resources.model_validate(dict(cluster_resources_counter)) async def compute_cluster_used_resources( @@ -380,11 +375,11 @@ async def compute_cluster_used_resources( list_of_used_resources = await logged_gather( *(compute_node_used_resources(docker_client, node) for node in nodes) ) - counter = collections.Counter({k: 0 for k in Resources.__fields__}) + counter = collections.Counter({k: 0 for k in Resources.model_fields}) for result in list_of_used_resources: counter.update(result.dict()) - return Resources.parse_obj(dict(counter)) + return Resources.model_validate(dict(counter)) _COMMAND_TIMEOUT_S = 10 @@ -504,7 +499,7 @@ async def find_node_with_name( if not list_of_nodes: return None # note that there might be several nodes with a common_prefixed name. so now we want exact matching - parsed_list_of_nodes = parse_obj_as(list[Node], list_of_nodes) + parsed_list_of_nodes = TypeAdapter(list[Node]).validate_python(list_of_nodes) for node in parsed_list_of_nodes: assert node.Description # nosec if node.Description.Hostname == name: @@ -525,8 +520,8 @@ async def tag_node( ): assert node.ID # nosec - latest_version_node = parse_obj_as( - Node, await docker_client.nodes.inspect(node_id=node.ID) + latest_version_node = TypeAdapter(Node).validate_python( + await docker_client.nodes.inspect(node_id=node.ID), ) assert latest_version_node.Version # nosec assert latest_version_node.Version.Index # nosec @@ -543,7 +538,7 @@ async def tag_node( "Role": latest_version_node.Spec.Role.value, }, ) - return parse_obj_as(Node, await docker_client.nodes.inspect(node_id=node.ID)) + return TypeAdapter(Node).validate_python(await docker_client.nodes.inspect(node_id=node.ID)) async def set_node_availability( diff --git a/services/autoscaling/tests/unit/conftest.py b/services/autoscaling/tests/unit/conftest.py index b705ea85b78..a385b4b02f8 100644 --- a/services/autoscaling/tests/unit/conftest.py +++ b/services/autoscaling/tests/unit/conftest.py @@ -173,7 +173,7 @@ def app_with_docker_join_drained( @pytest.fixture(scope="session") def fake_ssm_settings() -> SSMSettings: - return SSMSettings(**SSMSettings.Config.schema_extra["examples"][0]) + return SSMSettings(**SSMSettings.model_config["json_schema_extra"]["examples"][0]) @pytest.fixture @@ -236,7 +236,9 @@ def app_environment( "EC2_INSTANCES_ALLOWED_TYPES": json.dumps( { ec2_type_name: random.choice( # noqa: S311 - EC2InstanceBootSpecific.Config.schema_extra["examples"] + EC2InstanceBootSpecific.model_config["json_schema_extra"][ + "examples" + ] ) for ec2_type_name in aws_allowed_ec2_instance_type_names } @@ -267,7 +269,9 @@ def mocked_ec2_instances_envs( "EC2_INSTANCES_ALLOWED_TYPES": json.dumps( { ec2_type_name: random.choice( # noqa: S311 - EC2InstanceBootSpecific.Config.schema_extra["examples"] + EC2InstanceBootSpecific.model_config["json_schema_extra"][ + "examples" + ] ) | {"ami_id": aws_ami_id} for ec2_type_name in aws_allowed_ec2_instance_type_names @@ -762,7 +766,7 @@ def host_memory_total() -> ByteSize: def osparc_docker_label_keys( faker: Faker, ) -> StandardSimcoreDockerLabels: - return StandardSimcoreDockerLabels.parse_obj( + return StandardSimcoreDockerLabels.model_validate( { "user_id": faker.pyint(), "project_id": faker.uuid4(), diff --git a/services/autoscaling/tests/unit/test_api_health.py b/services/autoscaling/tests/unit/test_api_health.py index 353aabf31a4..e3c22afddac 100644 --- a/services/autoscaling/tests/unit/test_api_health.py +++ b/services/autoscaling/tests/unit/test_api_health.py @@ -42,7 +42,7 @@ async def test_status_no_rabbit( response = await async_client.get("/status") response.raise_for_status() assert response.status_code == status.HTTP_200_OK - status_response = _StatusGet.parse_obj(response.json()) + status_response = _StatusGet.model_validate(response.json()) assert status_response assert status_response.rabbitmq.is_enabled is False @@ -66,7 +66,7 @@ async def test_status_no_ssm( response = await async_client.get("/status") response.raise_for_status() assert response.status_code == status.HTTP_200_OK - status_response = _StatusGet.parse_obj(response.json()) + status_response = _StatusGet.model_validate(response.json()) assert status_response assert status_response.rabbitmq.is_enabled is False @@ -94,7 +94,7 @@ async def test_status( response = await async_client.get("/status") response.raise_for_status() assert response.status_code == status.HTTP_200_OK - status_response = _StatusGet.parse_obj(response.json()) + status_response = _StatusGet.model_validate(response.json()) assert status_response assert status_response.rabbitmq.is_enabled is True @@ -114,7 +114,7 @@ async def test_status( response = await async_client.get("/status") response.raise_for_status() assert response.status_code == status.HTTP_200_OK - status_response = _StatusGet.parse_obj(response.json()) + status_response = _StatusGet.model_validate(response.json()) assert status_response assert status_response.rabbitmq.is_enabled is True diff --git a/services/autoscaling/tests/unit/test_modules_dask.py b/services/autoscaling/tests/unit/test_modules_dask.py index 76dab6883e0..87f4ff6d175 100644 --- a/services/autoscaling/tests/unit/test_modules_dask.py +++ b/services/autoscaling/tests/unit/test_modules_dask.py @@ -42,7 +42,9 @@ _authentication_types = [ NoAuthentication(), - TLSAuthentication.construct(**TLSAuthentication.Config.schema_extra["examples"][0]), + TLSAuthentication.construct( + **TLSAuthentication.model_config["json_schema_extra"]["examples"][0] + ), ] diff --git a/services/autoscaling/tests/unit/test_utils_docker.py b/services/autoscaling/tests/unit/test_utils_docker.py index 8e5b8cd90a8..6323a4ba31c 100644 --- a/services/autoscaling/tests/unit/test_utils_docker.py +++ b/services/autoscaling/tests/unit/test_utils_docker.py @@ -872,7 +872,7 @@ async def test_get_docker_swarm_join_script_returning_unexpected_command_raises( def test_get_docker_login_on_start_bash_command(): registry_settings = RegistrySettings( - **RegistrySettings.Config.schema_extra["examples"][0] + **RegistrySettings.model_config["json_schema_extra"]["examples"][0] ) returned_command = get_docker_login_on_start_bash_command(registry_settings) assert ( diff --git a/services/autoscaling/tests/unit/test_utils_rabbitmq.py b/services/autoscaling/tests/unit/test_utils_rabbitmq.py index 1c5920f9dc7..302002ddae2 100644 --- a/services/autoscaling/tests/unit/test_utils_rabbitmq.py +++ b/services/autoscaling/tests/unit/test_utils_rabbitmq.py @@ -19,7 +19,7 @@ ProgressRabbitMessageNode, ProgressType, ) -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_mock.plugin import MockerFixture from servicelib.rabbitmq import BIND_TO_ALL_TOPICS, RabbitMQClient from settings_library.rabbit import RabbitSettings @@ -79,8 +79,7 @@ async def test_post_task_log_message( "running", ) assert service_with_labels.Spec - service_tasks = parse_obj_as( - list[Task], + service_tasks = TypeAdapter(list[Task]).validate_python( await async_docker_client.tasks.list( filters={"service": service_with_labels.Spec.Name} ), @@ -104,7 +103,7 @@ async def test_post_task_log_message( messages=[f"[cluster] {log_message}"], log_level=0, ) - .json() + .model_dump_json() .encode() ) print("... message received") @@ -126,8 +125,7 @@ async def test_post_task_log_message_does_not_raise_if_service_has_no_labels( ): service_without_labels = await create_service(task_template, {}, "running") assert service_without_labels.Spec - service_tasks = parse_obj_as( - list[Task], + service_tasks = TypeAdapter(list[Task]).validate_python( await async_docker_client.tasks.list( filters={"service": service_without_labels.Spec.Name} ), @@ -171,8 +169,7 @@ async def test_post_task_progress_message( "running", ) assert service_with_labels.Spec - service_tasks = parse_obj_as( - list[Task], + service_tasks = TypeAdapter(list[Task]).validate_python( await async_docker_client.tasks.list( filters={"service": service_with_labels.Spec.Name} ), @@ -196,7 +193,7 @@ async def test_post_task_progress_message( progress_type=ProgressType.CLUSTER_UP_SCALING, report=ProgressReport(actual_value=progress_value, total=1), ) - .json() + .model_dump_json() .encode() ) print("... message received") @@ -218,8 +215,7 @@ async def test_post_task_progress_does_not_raise_if_service_has_no_labels( ): service_without_labels = await create_service(task_template, {}, "running") assert service_without_labels.Spec - service_tasks = parse_obj_as( - list[Task], + service_tasks = TypeAdapter(list[Task]).validate_python( await async_docker_client.tasks.list( filters={"service": service_without_labels.Spec.Name} ),