Skip to content

Commit 6199ed6

Browse files
authored
⬆️Dask sidecar: migration to v2 (#6591)
1 parent 7be9541 commit 6199ed6

File tree

22 files changed

+272
-124
lines changed

22 files changed

+272
-124
lines changed

packages/common-library/src/common_library/serialization.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,14 @@
11
from datetime import timedelta
22
from typing import Any
33

4-
from common_library.pydantic_fields_extension import get_type
54
from pydantic import BaseModel, SecretStr
65
from pydantic_core import Url
76

7+
from .pydantic_fields_extension import get_type
8+
89

910
def model_dump_with_secrets(
10-
settings_obj: BaseModel, show_secrets: bool, **pydantic_export_options
11+
settings_obj: BaseModel, *, show_secrets: bool, **pydantic_export_options
1112
) -> dict[str, Any]:
1213
data = settings_obj.model_dump(**pydantic_export_options)
1314

@@ -25,16 +26,16 @@ def model_dump_with_secrets(
2526
data[field_name] = field_data.get_secret_value()
2627
else:
2728
data[field_name] = str(field_data)
28-
29+
2930
elif isinstance(field_data, Url):
3031
data[field_name] = str(field_data)
31-
32+
3233
elif isinstance(field_data, dict):
3334
field_type = get_type(settings_obj.model_fields[field_name])
3435
if issubclass(field_type, BaseModel):
3536
data[field_name] = model_dump_with_secrets(
3637
field_type.model_validate(field_data),
37-
show_secrets,
38+
show_secrets=show_secrets,
3839
**pydantic_export_options,
3940
)
4041

packages/settings-library/src/settings_library/utils_cli.py

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,9 @@
88
import rich
99
import typer
1010
from common_library.serialization import model_dump_with_secrets
11+
from models_library.utils.json_serialization import json_dumps
1112
from pydantic import ValidationError
13+
from pydantic_core import to_jsonable_python
1214
from pydantic_settings import BaseSettings
1315

1416
from ._constants import HEADER_STR
@@ -87,7 +89,7 @@ def print_as_json(
8789
def create_settings_command(
8890
settings_cls: type[BaseCustomSettings],
8991
logger: logging.Logger | None = None,
90-
json_serializer=json.dumps,
92+
json_serializer=json_dumps,
9193
) -> Callable:
9294
"""Creates typer command function for settings"""
9395

@@ -112,14 +114,24 @@ def settings(
112114
"""Resolves settings and prints envfile"""
113115

114116
if as_json_schema:
115-
typer.echo(settings_cls.schema_json(indent=0 if compact else 2))
117+
typer.echo(
118+
json.dumps(
119+
settings_cls.model_json_schema(),
120+
default=to_jsonable_python,
121+
indent=0 if compact else 2,
122+
)
123+
)
116124
return
117125

118126
try:
119127
settings_obj = settings_cls.create_from_envs()
120128

121129
except ValidationError as err:
122-
settings_schema = settings_cls.schema_json(indent=2)
130+
settings_schema = json.dumps(
131+
settings_cls.model_json_schema(),
132+
default=to_jsonable_python,
133+
indent=2,
134+
)
123135

124136
assert logger is not None # nosec
125137
logger.error( # noqa: TRY400

services/dask-sidecar/requirements/_base.in

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,6 @@ dask[distributed, diagnostics]
2525
dask-gateway # needed for the osparc-dask-gateway to preload the module
2626
fsspec[http, s3] # sub types needed as we acces http and s3 here
2727
lz4 # for compression
28-
pydantic[email,dotenv]
28+
pydantic
2929
prometheus_client
3030
repro-zipfile

services/dask-sidecar/requirements/_base.txt

Lines changed: 103 additions & 2 deletions
Large diffs are not rendered by default.

services/dask-sidecar/requirements/_test.txt

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
1+
annotated-types==0.7.0
2+
# via
3+
# -c requirements/_base.txt
4+
# pydantic
15
antlr4-python3-runtime==4.13.2
26
# via moto
37
attrs==23.2.0
@@ -141,11 +145,15 @@ py-partiql-parser==0.5.6
141145
# via moto
142146
pycparser==2.22
143147
# via cffi
144-
pydantic==1.10.15
148+
pydantic==2.9.2
145149
# via
146150
# -c requirements/../../../requirements/constraints.txt
147151
# -c requirements/_base.txt
148152
# aws-sam-translator
153+
pydantic-core==2.23.4
154+
# via
155+
# -c requirements/_base.txt
156+
# pydantic
149157
pyftpdlib==2.0.0
150158
# via pytest-localftpserver
151159
pyopenssl==24.2.1
@@ -244,6 +252,7 @@ typing-extensions==4.11.0
244252
# aws-sam-translator
245253
# cfn-lint
246254
# pydantic
255+
# pydantic-core
247256
urllib3==2.2.1
248257
# via
249258
# -c requirements/../../../requirements/constraints.txt

services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ async def _write_input_data(
7171
if isinstance(input_params, FileUrl):
7272
file_name = (
7373
input_params.file_mapping
74-
or Path(URL(input_params.url).path.strip("/")).name
74+
or Path(URL(f"{input_params.url}").path.strip("/")).name
7575
)
7676

7777
destination_path = task_volumes.inputs_folder / file_name
@@ -114,7 +114,7 @@ async def _retrieve_output_data(
114114
)
115115
_logger.debug(
116116
"following outputs will be searched for:\n%s",
117-
self.task_parameters.output_data_keys.json(indent=1),
117+
self.task_parameters.output_data_keys.model_dump_json(indent=1),
118118
)
119119

120120
output_data = TaskOutputData.from_task_output(
@@ -132,7 +132,7 @@ async def _retrieve_output_data(
132132
if isinstance(output_params, FileUrl):
133133
assert ( # nosec
134134
output_params.file_mapping
135-
), f"{output_params.json(indent=1)} expected resolved in TaskOutputData.from_task_output"
135+
), f"{output_params.model_dump_json(indent=1)} expected resolved in TaskOutputData.from_task_output"
136136

137137
src_path = task_volumes.outputs_folder / output_params.file_mapping
138138
upload_tasks.append(
@@ -146,7 +146,9 @@ async def _retrieve_output_data(
146146
await asyncio.gather(*upload_tasks)
147147

148148
await self._publish_sidecar_log("All the output data were uploaded.")
149-
_logger.info("retrieved outputs data:\n%s", output_data.json(indent=1))
149+
_logger.info(
150+
"retrieved outputs data:\n%s", output_data.model_dump_json(indent=1)
151+
)
150152
return output_data
151153

152154
except (ValueError, ValidationError) as exc:

services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
from models_library.services_resources import BootMode
2727
from models_library.utils.labels_annotations import OSPARC_LABEL_PREFIXES, from_labels
2828
from packaging import version
29-
from pydantic import ByteSize, parse_obj_as
29+
from pydantic import ByteSize, TypeAdapter
3030
from servicelib.logging_utils import (
3131
LogLevelInt,
3232
LogMessageStr,
@@ -95,7 +95,7 @@ async def create_container_config(
9595
NanoCPUs=nano_cpus_limit,
9696
),
9797
)
98-
logger.debug("Container configuration: \n%s", pformat(config.dict()))
98+
logger.debug("Container configuration: \n%s", pformat(config.model_dump()))
9999
return config
100100

101101

@@ -109,7 +109,7 @@ async def managed_container(
109109
logger, logging.DEBUG, msg=f"managing container {name} for {config.image}"
110110
):
111111
container = await docker_client.containers.create(
112-
config.dict(by_alias=True), name=name
112+
config.model_dump(by_alias=True), name=name
113113
)
114114
yield container
115115
except asyncio.CancelledError:
@@ -443,7 +443,7 @@ async def get_image_labels(
443443
data = from_labels(
444444
image_labels, prefix_key=OSPARC_LABEL_PREFIXES[0], trim_key_head=False
445445
)
446-
return parse_obj_as(ImageLabels, data)
446+
return TypeAdapter(ImageLabels).validate_python(data)
447447
return ImageLabels()
448448

449449

services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/errors.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
1-
from pydantic.errors import PydanticErrorMixin
1+
from common_library.errors_classes import OsparcErrorMixin
22

33

4-
class ComputationalSidecarRuntimeError(PydanticErrorMixin, RuntimeError):
4+
class ComputationalSidecarRuntimeError(OsparcErrorMixin, RuntimeError):
55
...
66

77

services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py

Lines changed: 20 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,14 @@
33
from models_library.basic_regex import SIMPLE_VERSION_RE
44
from models_library.services import ServiceMetaDataPublished
55
from packaging import version
6-
from pydantic import BaseModel, ByteSize, Extra, Field, validator
6+
from pydantic import (
7+
BaseModel,
8+
ByteSize,
9+
ConfigDict,
10+
Field,
11+
field_validator,
12+
model_validator,
13+
)
714

815
LEGACY_INTEGRATION_VERSION = version.Version("0")
916
PROGRESS_REGEXP: re.Pattern[str] = re.compile(
@@ -41,21 +48,15 @@ class ContainerHostConfig(BaseModel):
4148
..., alias="NanoCPUs", description="CPU quota in units of 10-9 CPUs"
4249
)
4350

44-
@validator("memory_swap", pre=True, always=True)
45-
@classmethod
46-
def ensure_no_memory_swap_means_no_swap(cls, v, values):
47-
if v is None:
48-
# if not set it will be the same value as memory to ensure swap is disabled
49-
return values["memory"]
50-
return v
51+
@model_validator(mode="after")
52+
def ensure_memory_swap_is_not_unlimited(self) -> "ContainerHostConfig":
53+
if self.memory_swap is None:
54+
self.memory_swap = self.memory
5155

52-
@validator("memory_swap")
53-
@classmethod
54-
def ensure_memory_swap_cannot_be_unlimited_nor_smaller_than_memory(cls, v, values):
55-
if v < values["memory"]:
56+
if self.memory_swap < self.memory:
5657
msg = "Memory swap cannot be set to a smaller value than memory"
5758
raise ValueError(msg)
58-
return v
59+
return self
5960

6061

6162
class DockerContainerConfig(BaseModel):
@@ -71,26 +72,24 @@ class ImageLabels(BaseModel):
7172
default=str(LEGACY_INTEGRATION_VERSION),
7273
alias="integration-version",
7374
description="integration version number",
74-
regex=SIMPLE_VERSION_RE,
75+
pattern=SIMPLE_VERSION_RE,
7576
examples=["1.0.0"],
7677
)
7778
progress_regexp: str = Field(
7879
default=PROGRESS_REGEXP.pattern,
7980
alias="progress_regexp",
8081
description="regexp pattern for detecting computational service's progress",
8182
)
83+
model_config = ConfigDict(extra="ignore")
8284

83-
class Config:
84-
extra = Extra.ignore
85-
86-
@validator("integration_version", pre=True)
85+
@field_validator("integration_version", mode="before")
8786
@classmethod
8887
def default_integration_version(cls, v):
8988
if v is None:
9089
return ImageLabels().integration_version
9190
return v
9291

93-
@validator("progress_regexp", pre=True)
92+
@field_validator("progress_regexp", mode="before")
9493
@classmethod
9594
def default_progress_regexp(cls, v):
9695
if v is None:
@@ -104,6 +103,6 @@ def get_progress_regexp(self) -> re.Pattern[str]:
104103
return re.compile(self.progress_regexp)
105104

106105

107-
assert set(ImageLabels.__fields__).issubset(
108-
ServiceMetaDataPublished.__fields__
106+
assert set(ImageLabels.model_fields).issubset(
107+
ServiceMetaDataPublished.model_fields
109108
), "ImageLabels must be compatible with ServiceDockerData"

services/dask-sidecar/src/simcore_service_dask_sidecar/dask_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -153,4 +153,4 @@ async def periodicaly_check_if_aborted(task_name: str) -> None:
153153
def publish_event(dask_pub: distributed.Pub, event: BaseTaskEvent) -> None:
154154
"""never reraises, only CancellationError"""
155155
with log_catch(_logger, reraise=False):
156-
dask_pub.put(event.json())
156+
dask_pub.put(event.model_dump_json())

0 commit comments

Comments
 (0)