Skip to content

Commit afc88bb

Browse files
committed
migration
1 parent 7ad3426 commit afc88bb

File tree

2 files changed

+47
-53
lines changed

2 files changed

+47
-53
lines changed

services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py

Lines changed: 43 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
Task,
3030
TaskState,
3131
)
32-
from pydantic import ByteSize, ValidationError, parse_obj_as
32+
from pydantic import ByteSize, TypeAdapter, ValidationError
3333
from servicelib.docker_utils import to_datetime
3434
from servicelib.logging_utils import log_context
3535
from servicelib.utils import logged_gather
@@ -59,25 +59,27 @@
5959
_PENDING_DOCKER_TASK_MESSAGE: Final[str] = "pending task scheduling"
6060
_INSUFFICIENT_RESOURCES_DOCKER_TASK_ERR: Final[str] = "insufficient resources on"
6161
_NOT_SATISFIED_SCHEDULING_CONSTRAINTS_TASK_ERR: Final[str] = "no suitable node"
62-
_OSPARC_SERVICE_READY_LABEL_KEY: Final[DockerLabelKey] = parse_obj_as(
63-
DockerLabelKey, "io.simcore.osparc-services-ready"
64-
)
65-
_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY: Final[DockerLabelKey] = parse_obj_as(
66-
DockerLabelKey, f"{_OSPARC_SERVICE_READY_LABEL_KEY}-last-changed"
62+
_OSPARC_SERVICE_READY_LABEL_KEY: Final[DockerLabelKey] = TypeAdapter(
63+
DockerLabelKey
64+
).validate_python(
65+
"io.simcore.osparc-services-ready",
6766
)
67+
_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY: Final[DockerLabelKey] = TypeAdapter(
68+
DockerLabelKey
69+
).validate_python(f"{_OSPARC_SERVICE_READY_LABEL_KEY}-last-changed")
6870
_OSPARC_SERVICE_READY_LABEL_KEYS: Final[list[DockerLabelKey]] = [
6971
_OSPARC_SERVICE_READY_LABEL_KEY,
7072
_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY,
7173
]
7274

7375

74-
_OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY: Final[DockerLabelKey] = parse_obj_as(
75-
DockerLabelKey, "io.simcore.osparc-node-found-empty"
76-
)
76+
_OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY: Final[DockerLabelKey] = TypeAdapter(
77+
DockerLabelKey
78+
).validate_python("io.simcore.osparc-node-found-empty")
7779

78-
_OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY: Final[DockerLabelKey] = parse_obj_as(
79-
DockerLabelKey, "io.simcore.osparc-node-termination-started"
80-
)
80+
_OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY: Final[DockerLabelKey] = TypeAdapter(
81+
DockerLabelKey
82+
).validate_python("io.simcore.osparc-node-termination-started")
8183

8284

8385
async def get_monitored_nodes(
@@ -86,23 +88,21 @@ async def get_monitored_nodes(
8688
node_label_filters = [f"{label}=true" for label in node_labels] + [
8789
f"{label}" for label in _OSPARC_SERVICE_READY_LABEL_KEYS
8890
]
89-
return parse_obj_as(
90-
list[Node],
91-
await docker_client.nodes.list(filters={"node.label": node_label_filters}),
91+
return TypeAdapter(list[Node]).validate_python(
92+
await docker_client.nodes.list(filters={"node.label": node_label_filters})
9293
)
9394

9495

9596
async def get_worker_nodes(docker_client: AutoscalingDocker) -> list[Node]:
96-
return parse_obj_as(
97-
list[Node],
97+
return TypeAdapter(list[Node]).validate_python(
9898
await docker_client.nodes.list(
9999
filters={
100100
"role": ["worker"],
101101
"node.label": [
102102
f"{label}" for label in _OSPARC_SERVICE_READY_LABEL_KEYS
103103
],
104104
}
105-
),
105+
)
106106
)
107107

108108

@@ -161,8 +161,8 @@ async def _associated_service_has_no_node_placement_contraints(
161161
docker_client: AutoscalingDocker, task: Task
162162
) -> bool:
163163
assert task.ServiceID # nosec
164-
service_inspect = parse_obj_as(
165-
Service, await docker_client.services.inspect(task.ServiceID)
164+
service_inspect = TypeAdapter(Service).validate_python(
165+
await docker_client.services.inspect(task.ServiceID)
166166
)
167167
assert service_inspect.Spec # nosec
168168
assert service_inspect.Spec.TaskTemplate # nosec
@@ -190,11 +190,9 @@ def _by_created_dt(task: Task) -> datetime.datetime:
190190
if task.CreatedAt:
191191
with suppress(ValueError):
192192
created_at = to_datetime(task.CreatedAt)
193-
created_at_utc: datetime.datetime = created_at.replace(
194-
tzinfo=datetime.timezone.utc
195-
)
193+
created_at_utc: datetime.datetime = created_at.replace(tzinfo=datetime.UTC)
196194
return created_at_utc
197-
return datetime.datetime.now(datetime.timezone.utc)
195+
return datetime.datetime.now(datetime.UTC)
198196

199197

200198
async def pending_service_tasks_with_insufficient_resources(
@@ -209,14 +207,13 @@ async def pending_service_tasks_with_insufficient_resources(
209207
- have an error message with "insufficient resources"
210208
- are not scheduled on any node
211209
"""
212-
tasks = parse_obj_as(
213-
list[Task],
210+
tasks = TypeAdapter(list[Task]).validate_python(
214211
await docker_client.tasks.list(
215212
filters={
216213
"desired-state": "running",
217214
"label": service_labels,
218215
}
219-
),
216+
)
220217
)
221218

222219
sorted_tasks = sorted(tasks, key=_by_created_dt)
@@ -264,7 +261,7 @@ async def compute_cluster_total_resources(nodes: list[Node]) -> Resources:
264261
}
265262
)
266263

267-
return Resources.parse_obj(dict(cluster_resources_counter))
264+
return Resources.model_validate(dict(cluster_resources_counter))
268265

269266

270267
def get_max_resources_from_docker_task(task: Task) -> Resources:
@@ -285,16 +282,15 @@ def get_max_resources_from_docker_task(task: Task) -> Resources:
285282
),
286283
)
287284
/ _NANO_CPU,
288-
ram=parse_obj_as(
289-
ByteSize,
285+
ram=TypeAdapter(ByteSize).validate_python(
290286
max(
291287
task.Spec.Resources.Reservations
292288
and task.Spec.Resources.Reservations.MemoryBytes
293289
or 0,
294290
task.Spec.Resources.Limits
295291
and task.Spec.Resources.Limits.MemoryBytes
296292
or 0,
297-
),
293+
)
298294
),
299295
)
300296
return Resources(cpus=0, ram=ByteSize(0))
@@ -305,8 +301,8 @@ async def get_task_instance_restriction(
305301
) -> InstanceTypeType | None:
306302
with contextlib.suppress(ValidationError):
307303
assert task.ServiceID # nosec
308-
service_inspect = parse_obj_as(
309-
Service, await docker_client.services.inspect(task.ServiceID)
304+
service_inspect = TypeAdapter(Service).validate_python(
305+
await docker_client.services.inspect(task.ServiceID)
310306
)
311307
assert service_inspect.Spec # nosec
312308
assert service_inspect.Spec.TaskTemplate # nosec
@@ -326,8 +322,8 @@ async def get_task_instance_restriction(
326322
)
327323
for constraint in service_placement_constraints:
328324
if constraint.startswith(node_label_to_find):
329-
return parse_obj_as(
330-
InstanceTypeType, constraint.removeprefix(node_label_to_find) # type: ignore[arg-type]
325+
return TypeAdapter(InstanceTypeType).validate_python(
326+
constraint.removeprefix(node_label_to_find)
331327
)
332328

333329
return None
@@ -351,9 +347,8 @@ async def compute_node_used_resources(
351347
task_filters: dict[str, str | list[DockerLabelKey]] = {"node": node.ID}
352348
if service_labels is not None:
353349
task_filters |= {"label": service_labels}
354-
all_tasks_on_node = parse_obj_as(
355-
list[Task],
356-
await docker_client.tasks.list(filters=task_filters),
350+
all_tasks_on_node = TypeAdapter(list[Task]).validate_python(
351+
await docker_client.tasks.list(filters=task_filters)
357352
)
358353
for task in all_tasks_on_node:
359354
assert task.Status # nosec
@@ -370,7 +365,7 @@ async def compute_node_used_resources(
370365
"cpus": task_reservations.get("NanoCPUs", 0) / _NANO_CPU,
371366
}
372367
)
373-
return Resources.parse_obj(dict(cluster_resources_counter))
368+
return Resources.model_validate(dict(cluster_resources_counter))
374369

375370

376371
async def compute_cluster_used_resources(
@@ -380,11 +375,11 @@ async def compute_cluster_used_resources(
380375
list_of_used_resources = await logged_gather(
381376
*(compute_node_used_resources(docker_client, node) for node in nodes)
382377
)
383-
counter = collections.Counter({k: 0 for k in Resources.__fields__})
378+
counter = collections.Counter({k: 0 for k in Resources.model_fields})
384379
for result in list_of_used_resources:
385380
counter.update(result.dict())
386381

387-
return Resources.parse_obj(dict(counter))
382+
return Resources.model_validate(dict(counter))
388383

389384

390385
_COMMAND_TIMEOUT_S = 10
@@ -446,10 +441,7 @@ def write_compose_file_command(
446441
},
447442
}
448443
compose_yaml = yaml.safe_dump(compose)
449-
write_compose_file_cmd = " ".join(
450-
["echo", f'"{compose_yaml}"', ">", f"{_PRE_PULL_COMPOSE_PATH}"]
451-
)
452-
return write_compose_file_cmd
444+
return " ".join(["echo", f'"{compose_yaml}"', ">", f"{_PRE_PULL_COMPOSE_PATH}"])
453445

454446

455447
def get_docker_pull_images_on_start_bash_command(
@@ -504,7 +496,7 @@ async def find_node_with_name(
504496
if not list_of_nodes:
505497
return None
506498
# note that there might be several nodes with a common_prefixed name. so now we want exact matching
507-
parsed_list_of_nodes = parse_obj_as(list[Node], list_of_nodes)
499+
parsed_list_of_nodes = TypeAdapter(list[Node]).validate_python(list_of_nodes)
508500
for node in parsed_list_of_nodes:
509501
assert node.Description # nosec
510502
if node.Description.Hostname == name:
@@ -525,8 +517,8 @@ async def tag_node(
525517
):
526518
assert node.ID # nosec
527519

528-
latest_version_node = parse_obj_as(
529-
Node, await docker_client.nodes.inspect(node_id=node.ID)
520+
latest_version_node = TypeAdapter(Node).validate_python(
521+
await docker_client.nodes.inspect(node_id=node.ID)
530522
)
531523
assert latest_version_node.Version # nosec
532524
assert latest_version_node.Version.Index # nosec
@@ -543,7 +535,9 @@ async def tag_node(
543535
"Role": latest_version_node.Spec.Role.value,
544536
},
545537
)
546-
return parse_obj_as(Node, await docker_client.nodes.inspect(node_id=node.ID))
538+
return TypeAdapter(Node).validate_python(
539+
await docker_client.nodes.inspect(node_id=node.ID)
540+
)
547541

548542

549543
async def set_node_availability(

services/autoscaling/tests/unit/test_utils_rabbitmq.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ async def test_post_task_log_message(
8282
service_tasks = TypeAdapter(list[Task]).validate_python(
8383
await async_docker_client.tasks.list(
8484
filters={"service": service_with_labels.Spec.Name}
85-
),
85+
)
8686
)
8787
assert service_tasks
8888
assert len(service_tasks) == 1
@@ -128,7 +128,7 @@ async def test_post_task_log_message_does_not_raise_if_service_has_no_labels(
128128
service_tasks = TypeAdapter(list[Task]).validate_python(
129129
await async_docker_client.tasks.list(
130130
filters={"service": service_without_labels.Spec.Name}
131-
),
131+
)
132132
)
133133
assert service_tasks
134134
assert len(service_tasks) == 1
@@ -172,7 +172,7 @@ async def test_post_task_progress_message(
172172
service_tasks = TypeAdapter(list[Task]).validate_python(
173173
await async_docker_client.tasks.list(
174174
filters={"service": service_with_labels.Spec.Name}
175-
),
175+
)
176176
)
177177
assert service_tasks
178178
assert len(service_tasks) == 1
@@ -218,7 +218,7 @@ async def test_post_task_progress_does_not_raise_if_service_has_no_labels(
218218
service_tasks = TypeAdapter(list[Task]).validate_python(
219219
await async_docker_client.tasks.list(
220220
filters={"service": service_without_labels.Spec.Name}
221-
),
221+
)
222222
)
223223
assert service_tasks
224224
assert len(service_tasks) == 1

0 commit comments

Comments
 (0)