Skip to content

Commit 53c6c89

Browse files
fix code
1 parent 35557ba commit 53c6c89

File tree

12 files changed

+112
-89
lines changed

12 files changed

+112
-89
lines changed

services/autoscaling/src/simcore_service_autoscaling/_meta.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
from models_library.basic_types import VersionStr, VersionTag
44
from packaging.version import Version
5+
from pydantic import TypeAdapter
56
from servicelib.utils_meta import PackageInfo
67

78
info: Final = PackageInfo(package_name="simcore-service-autoscaling")
@@ -10,7 +11,9 @@
1011
APP_NAME: Final[str] = info.project_name
1112
API_VERSION: Final[VersionStr] = info.__version__
1213
VERSION: Final[Version] = info.version
13-
API_VTAG: Final[VersionTag] = VersionTag(info.api_prefix_path_tag)
14+
API_VTAG: Final[VersionTag] = TypeAdapter(VersionTag).validate_python(
15+
info.api_prefix_path_tag
16+
)
1417
SUMMARY: Final[str] = info.get_summary()
1518

1619

services/autoscaling/src/simcore_service_autoscaling/constants.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -2,32 +2,32 @@
22
from typing import Final
33

44
from aws_library.ec2._models import AWSTagKey, AWSTagValue, EC2Tags
5-
from pydantic import parse_obj_as
5+
from pydantic import TypeAdapter
66

7-
BUFFER_MACHINE_PULLING_EC2_TAG_KEY: Final[AWSTagKey] = parse_obj_as(
8-
AWSTagKey, "pulling"
9-
)
10-
BUFFER_MACHINE_PULLING_COMMAND_ID_EC2_TAG_KEY: Final[AWSTagKey] = parse_obj_as(
11-
AWSTagKey, "ssm-command-id"
12-
)
7+
BUFFER_MACHINE_PULLING_EC2_TAG_KEY: Final[AWSTagKey] = TypeAdapter(
8+
AWSTagKey
9+
).validate_python("pulling")
10+
BUFFER_MACHINE_PULLING_COMMAND_ID_EC2_TAG_KEY: Final[AWSTagKey] = TypeAdapter(
11+
AWSTagKey
12+
).validate_python("ssm-command-id")
1313
PREPULL_COMMAND_NAME: Final[str] = "docker images pulling"
1414

1515
DOCKER_PULL_COMMAND: Final[
1616
str
1717
] = "docker compose -f /docker-pull.compose.yml -p buffering pull"
1818

19-
PRE_PULLED_IMAGES_EC2_TAG_KEY: Final[AWSTagKey] = parse_obj_as(
20-
AWSTagKey, "io.simcore.autoscaling.pre_pulled_images"
21-
)
19+
PRE_PULLED_IMAGES_EC2_TAG_KEY: Final[AWSTagKey] = TypeAdapter(
20+
AWSTagKey
21+
).validate_python("io.simcore.autoscaling.pre_pulled_images")
2222

23-
BUFFER_MACHINE_TAG_KEY: Final[AWSTagKey] = parse_obj_as(
24-
AWSTagKey, "io.simcore.autoscaling.buffer_machine"
23+
BUFFER_MACHINE_TAG_KEY: Final[AWSTagKey] = TypeAdapter(AWSTagKey).validate_python(
24+
"io.simcore.autoscaling.buffer_machine"
2525
)
2626
DEACTIVATED_BUFFER_MACHINE_EC2_TAGS: Final[EC2Tags] = {
27-
BUFFER_MACHINE_TAG_KEY: parse_obj_as(AWSTagValue, "true")
27+
BUFFER_MACHINE_TAG_KEY: TypeAdapter(AWSTagValue).validate_python("true")
2828
}
2929
ACTIVATED_BUFFER_MACHINE_EC2_TAGS: Final[EC2Tags] = {
30-
BUFFER_MACHINE_TAG_KEY: parse_obj_as(AWSTagValue, "false")
30+
BUFFER_MACHINE_TAG_KEY: TypeAdapter(AWSTagValue).validate_python("false")
3131
}
3232
PRE_PULLED_IMAGES_RE: Final[re.Pattern] = re.compile(
3333
rf"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_\((\d+)\)"

services/autoscaling/src/simcore_service_autoscaling/core/errors.py

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,7 @@
1-
from typing import Any
2-
3-
from models_library.errors_classes import OsparcErrorMixin
1+
from common_library.errors_classes import OsparcErrorMixin
42

53

64
class AutoscalingRuntimeError(OsparcErrorMixin, RuntimeError):
7-
def __init__(self, **ctx: Any) -> None:
8-
super().__init__(**ctx)
9-
105
msg_template: str = "Autoscaling unexpected error"
116

127

services/autoscaling/src/simcore_service_autoscaling/core/settings.py

Lines changed: 43 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import datetime
22
from functools import cached_property
3-
from typing import Any, ClassVar, Final, cast
3+
from typing import Annotated, Final, cast
44

55
from aws_library.ec2 import EC2InstanceBootSpecific, EC2Tags
66
from fastapi import FastAPI
@@ -14,14 +14,16 @@
1414
from models_library.clusters import InternalClusterAuthentication
1515
from models_library.docker import DockerLabelKey
1616
from pydantic import (
17+
AliasChoices,
1718
AnyUrl,
1819
Field,
1920
NonNegativeInt,
2021
PositiveInt,
21-
parse_obj_as,
22-
root_validator,
23-
validator,
22+
TypeAdapter,
23+
field_validator,
24+
model_validator,
2425
)
26+
from pytest_simcore.helpers.dict_tools import ConfigDict
2527
from settings_library.base import BaseCustomSettings
2628
from settings_library.docker_registry import RegistrySettings
2729
from settings_library.ec2 import EC2Settings
@@ -41,10 +43,9 @@ class AutoscalingSSMSettings(SSMSettings):
4143

4244

4345
class AutoscalingEC2Settings(EC2Settings):
44-
class Config(EC2Settings.Config):
45-
env_prefix = AUTOSCALING_ENV_PREFIX
46-
47-
schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc]
46+
model_config = ConfigDict(
47+
env_prefix=AUTOSCALING_ENV_PREFIX,
48+
json_schema_extra={
4849
"examples": [
4950
{
5051
f"{AUTOSCALING_ENV_PREFIX}EC2_ACCESS_KEY_ID": "my_access_key_id",
@@ -53,7 +54,8 @@ class Config(EC2Settings.Config):
5354
f"{AUTOSCALING_ENV_PREFIX}EC2_SECRET_ACCESS_KEY": "my_secret_access_key",
5455
}
5556
],
56-
}
57+
},
58+
)
5759

5860

5961
class EC2InstancesSettings(BaseCustomSettings):
@@ -93,7 +95,7 @@ class EC2InstancesSettings(BaseCustomSettings):
9395

9496
EC2_INSTANCES_SECURITY_GROUP_IDS: list[str] = Field(
9597
...,
96-
min_items=1,
98+
min_length=1,
9799
description="A security group acts as a virtual firewall for your EC2 instances to control incoming and outgoing traffic"
98100
" (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-security-groups.html), "
99101
" this is required to start a new EC2 instance",
@@ -130,7 +132,7 @@ class EC2InstancesSettings(BaseCustomSettings):
130132
description="ARN the EC2 instance should be attached to (example: arn:aws:iam::XXXXX:role/NAME), to disable pass an empty string",
131133
)
132134

133-
@validator("EC2_INSTANCES_TIME_BEFORE_DRAINING")
135+
@field_validator("EC2_INSTANCES_TIME_BEFORE_DRAINING")
134136
@classmethod
135137
def ensure_draining_delay_time_is_in_range(
136138
cls, value: datetime.timedelta
@@ -141,7 +143,7 @@ def ensure_draining_delay_time_is_in_range(
141143
value = datetime.timedelta(minutes=1)
142144
return value
143145

144-
@validator("EC2_INSTANCES_TIME_BEFORE_TERMINATION")
146+
@field_validator("EC2_INSTANCES_TIME_BEFORE_TERMINATION")
145147
@classmethod
146148
def ensure_termination_delay_time_is_in_range(
147149
cls, value: datetime.timedelta
@@ -152,14 +154,14 @@ def ensure_termination_delay_time_is_in_range(
152154
value = datetime.timedelta(minutes=59)
153155
return value
154156

155-
@validator("EC2_INSTANCES_ALLOWED_TYPES")
157+
@field_validator("EC2_INSTANCES_ALLOWED_TYPES")
156158
@classmethod
157159
def check_valid_instance_names(
158160
cls, value: dict[str, EC2InstanceBootSpecific]
159161
) -> dict[str, EC2InstanceBootSpecific]:
160162
# NOTE: needed because of a flaw in BaseCustomSettings
161163
# issubclass raises TypeError if used on Aliases
162-
parse_obj_as(list[InstanceTypeType], list(value))
164+
TypeAdapter(list[InstanceTypeType]).validate_python(list(value))
163165
return value
164166

165167

@@ -181,7 +183,7 @@ class NodesMonitoringSettings(BaseCustomSettings):
181183

182184

183185
class DaskMonitoringSettings(BaseCustomSettings):
184-
DASK_MONITORING_URL: AnyUrl = Field(
186+
DASK_MONITORING_URL: Annotated[str, AnyUrl] = Field(
185187
..., description="the url to the osparc-dask-scheduler"
186188
)
187189
DASK_SCHEDULER_AUTH: InternalClusterAuthentication = Field(
@@ -217,36 +219,39 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings):
217219

218220
# RUNTIME -----------------------------------------------------------
219221
AUTOSCALING_DEBUG: bool = Field(
220-
default=False, description="Debug mode", env=["AUTOSCALING_DEBUG", "DEBUG"]
222+
default=False,
223+
description="Debug mode",
224+
validation_alias=AliasChoices("AUTOSCALING_DEBUG", "DEBUG"),
221225
)
222-
AUTOSCALING_REMOTE_DEBUG_PORT: PortInt = PortInt(3000)
226+
AUTOSCALING_REMOTE_DEBUG_PORT: PortInt = 3000
223227

224228
AUTOSCALING_LOGLEVEL: LogLevel = Field(
225-
LogLevel.INFO, env=["AUTOSCALING_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"]
229+
LogLevel.INFO,
230+
validation_alias=AliasChoices("AUTOSCALING_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"),
226231
)
227232
AUTOSCALING_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field(
228233
default=False,
229-
env=[
234+
validation_alias=AliasChoices(
230235
"AUTOSCALING_LOG_FORMAT_LOCAL_DEV_ENABLED",
231236
"LOG_FORMAT_LOCAL_DEV_ENABLED",
232-
],
237+
),
233238
description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!",
234239
)
235240

236241
AUTOSCALING_EC2_ACCESS: AutoscalingEC2Settings | None = Field(
237-
auto_default_from_env=True
242+
json_schema_extra={"auto_default_from_env": True}
238243
)
239244

240245
AUTOSCALING_SSM_ACCESS: AutoscalingSSMSettings | None = Field(
241-
auto_default_from_env=True
246+
json_schema_extra={"auto_default_from_env": True}
242247
)
243248

244249
AUTOSCALING_EC2_INSTANCES: EC2InstancesSettings | None = Field(
245-
auto_default_from_env=True
250+
json_schema_extra={"auto_default_from_env": True}
246251
)
247252

248253
AUTOSCALING_NODES_MONITORING: NodesMonitoringSettings | None = Field(
249-
auto_default_from_env=True
254+
json_schema_extra={"auto_default_from_env": True}
250255
)
251256

252257
AUTOSCALING_POLL_INTERVAL: datetime.timedelta = Field(
@@ -255,13 +260,21 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings):
255260
"(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)",
256261
)
257262

258-
AUTOSCALING_RABBITMQ: RabbitSettings | None = Field(auto_default_from_env=True)
263+
AUTOSCALING_RABBITMQ: RabbitSettings | None = Field(
264+
json_schema_extra={"auto_default_from_env": True}
265+
)
259266

260-
AUTOSCALING_REDIS: RedisSettings = Field(auto_default_from_env=True)
267+
AUTOSCALING_REDIS: RedisSettings = Field(
268+
json_schema_extra={"auto_default_from_env": True}
269+
)
261270

262-
AUTOSCALING_REGISTRY: RegistrySettings | None = Field(auto_default_from_env=True)
271+
AUTOSCALING_REGISTRY: RegistrySettings | None = Field(
272+
json_schema_extra={"auto_default_from_env": True}
273+
)
263274

264-
AUTOSCALING_DASK: DaskMonitoringSettings | None = Field(auto_default_from_env=True)
275+
AUTOSCALING_DASK: DaskMonitoringSettings | None = Field(
276+
json_schema_extra={"auto_default_from_env": True}
277+
)
265278

266279
AUTOSCALING_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True
267280

@@ -276,12 +289,12 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings):
276289
def LOG_LEVEL(self): # noqa: N802
277290
return self.AUTOSCALING_LOGLEVEL
278291

279-
@validator("AUTOSCALING_LOGLEVEL")
292+
@field_validator("AUTOSCALING_LOGLEVEL")
280293
@classmethod
281294
def valid_log_level(cls, value: str) -> str:
282295
return cls.validate_log_level(value)
283296

284-
@root_validator()
297+
@model_validator(mode="after")
285298
@classmethod
286299
def exclude_both_dynamic_computational_mode(cls, values):
287300
if (

services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_computational.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -151,10 +151,10 @@ async def compute_cluster_used_resources(
151151
for i in instances
152152
)
153153
)
154-
counter = collections.Counter({k: 0 for k in Resources.__fields__})
154+
counter = collections.Counter({k: 0 for k in Resources.model_fields})
155155
for result in list_of_used_resources:
156156
counter.update(result.dict())
157-
return Resources.parse_obj(dict(counter))
157+
return Resources.model_validate(dict(counter))
158158

159159
@staticmethod
160160
async def compute_cluster_total_resources(

services/autoscaling/src/simcore_service_autoscaling/utils/buffer_machines_pool_core.py

Lines changed: 21 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
from collections.abc import Iterable
22
from operator import itemgetter
33

4-
from aws_library.ec2 import AWSTagKey, AWSTagValue, EC2Tags
4+
from aws_library.ec2 import AWS_TAG_VALUE_MAX_LENGTH, AWSTagKey, AWSTagValue, EC2Tags
55
from fastapi import FastAPI
66
from models_library.docker import DockerGenericTag
77
from models_library.utils.json_serialization import json_dumps
8-
from pydantic import parse_obj_as, parse_raw_as
8+
from pydantic import TypeAdapter
99

1010
from ..constants import (
1111
ACTIVATED_BUFFER_MACHINE_EC2_TAGS,
@@ -29,8 +29,10 @@ def get_deactivated_buffer_ec2_tags(
2929
base_ec2_tags = (
3030
auto_scaling_mode.get_ec2_tags(app) | DEACTIVATED_BUFFER_MACHINE_EC2_TAGS
3131
)
32-
base_ec2_tags[AWSTagKey("Name")] = AWSTagValue(
33-
f"{base_ec2_tags[AWSTagKey('Name')]}-buffer"
32+
base_ec2_tags[TypeAdapter(AWSTagKey).validate_python("Name")] = TypeAdapter(
33+
AWSTagValue
34+
).validate_python(
35+
f"{base_ec2_tags[TypeAdapter(AWSTagKey).validate_python('Name')]}-buffer"
3436
)
3537
return base_ec2_tags
3638

@@ -43,28 +45,36 @@ def dump_pre_pulled_images_as_tags(images: Iterable[DockerGenericTag]) -> EC2Tag
4345
# AWS Tag Values are limited to 256 characaters so we chunk the images
4446
# into smaller chunks
4547
jsonized_images = json_dumps(images)
46-
assert AWSTagValue.max_length # nosec
47-
if len(jsonized_images) > AWSTagValue.max_length:
48+
assert AWS_TAG_VALUE_MAX_LENGTH # nosec
49+
if len(jsonized_images) > AWS_TAG_VALUE_MAX_LENGTH:
4850
# let's chunk the string
49-
chunk_size = AWSTagValue.max_length
51+
chunk_size = AWS_TAG_VALUE_MAX_LENGTH
5052
chunks = [
5153
jsonized_images[i : i + chunk_size]
5254
for i in range(0, len(jsonized_images), chunk_size)
5355
]
5456
return {
55-
AWSTagKey(f"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_({i})"): AWSTagValue(c)
57+
TypeAdapter(AWSTagKey)
58+
.validate_python(f"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_({i})"): TypeAdapter(
59+
AWSTagValue
60+
)
61+
.validate_python(c)
5662
for i, c in enumerate(chunks)
5763
}
5864
return {
59-
PRE_PULLED_IMAGES_EC2_TAG_KEY: parse_obj_as(AWSTagValue, json_dumps(images))
65+
PRE_PULLED_IMAGES_EC2_TAG_KEY: TypeAdapter(AWSTagValue).validate_python(
66+
json_dumps(images)
67+
)
6068
}
6169

6270

6371
def load_pre_pulled_images_from_tags(tags: EC2Tags) -> list[DockerGenericTag]:
6472
# AWS Tag values are limited to 256 characters so we chunk the images
6573
if PRE_PULLED_IMAGES_EC2_TAG_KEY in tags:
6674
# read directly
67-
return parse_raw_as(list[DockerGenericTag], tags[PRE_PULLED_IMAGES_EC2_TAG_KEY])
75+
return TypeAdapter(list[DockerGenericTag]).validate_json(
76+
tags[PRE_PULLED_IMAGES_EC2_TAG_KEY]
77+
)
6878

6979
assembled_json = "".join(
7080
map(
@@ -80,5 +90,5 @@ def load_pre_pulled_images_from_tags(tags: EC2Tags) -> list[DockerGenericTag]:
8090
)
8191
)
8292
if assembled_json:
83-
return parse_raw_as(list[DockerGenericTag], assembled_json)
93+
return TypeAdapter(list[DockerGenericTag]).validate_json(assembled_json)
8494
return []

services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -264,7 +264,7 @@ async def compute_cluster_total_resources(nodes: list[Node]) -> Resources:
264264
}
265265
)
266266

267-
return Resources.parse_obj(dict(cluster_resources_counter))
267+
return Resources.model_validate(dict(cluster_resources_counter))
268268

269269

270270
def get_max_resources_from_docker_task(task: Task) -> Resources:
@@ -370,7 +370,7 @@ async def compute_node_used_resources(
370370
"cpus": task_reservations.get("NanoCPUs", 0) / _NANO_CPU,
371371
}
372372
)
373-
return Resources.parse_obj(dict(cluster_resources_counter))
373+
return Resources.model_validate(dict(cluster_resources_counter))
374374

375375

376376
async def compute_cluster_used_resources(
@@ -380,11 +380,11 @@ async def compute_cluster_used_resources(
380380
list_of_used_resources = await logged_gather(
381381
*(compute_node_used_resources(docker_client, node) for node in nodes)
382382
)
383-
counter = collections.Counter({k: 0 for k in Resources.__fields__})
383+
counter = collections.Counter({k: 0 for k in Resources.model_fields})
384384
for result in list_of_used_resources:
385385
counter.update(result.dict())
386386

387-
return Resources.parse_obj(dict(counter))
387+
return Resources.model_validate(dict(counter))
388388

389389

390390
_COMMAND_TIMEOUT_S = 10

0 commit comments

Comments
 (0)