Skip to content

Commit 57ac5a2

Browse files
author
Andrei Neagu
committed
fixed unit tests
1 parent 68f683c commit 57ac5a2

File tree

6 files changed

+45
-27
lines changed

6 files changed

+45
-27
lines changed

packages/common-library/src/common_library/pydantic_networks_extension.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from typing import Annotated, TypeAlias
22

3-
from pydantic import AfterValidator, AnyHttpUrl, AnyUrl, HttpUrl
3+
from pydantic import AfterValidator, AnyHttpUrl, AnyUrl, FileUrl, HttpUrl
44
from pydantic_core import Url
55

66

@@ -23,3 +23,8 @@ def _strip_last_slash(url: Url) -> str:
2323
HttpUrl,
2424
AfterValidator(_strip_last_slash),
2525
]
26+
27+
FileUrlLegacy: TypeAlias = Annotated[
28+
FileUrl,
29+
AfterValidator(_strip_last_slash),
30+
]

services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ async def _write_input_data(
7171
if isinstance(input_params, FileUrl):
7272
file_name = (
7373
input_params.file_mapping
74-
or Path(URL(input_params.url).path.strip("/")).name
74+
or Path(URL(f"{input_params.url}").path.strip("/")).name
7575
)
7676

7777
destination_path = task_volumes.inputs_folder / file_name

services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/errors.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
1-
from pydantic.errors import PydanticErrorMixin
1+
from common_library.errors_classes import OsparcErrorMixin
22

33

4-
class ComputationalSidecarRuntimeError(PydanticErrorMixin, RuntimeError):
4+
class ComputationalSidecarRuntimeError(OsparcErrorMixin, RuntimeError):
55
...
66

77

services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py

Lines changed: 22 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,14 @@
33
from models_library.basic_regex import SIMPLE_VERSION_RE
44
from models_library.services import ServiceMetaDataPublished
55
from packaging import version
6-
from pydantic import BaseModel, ByteSize, Extra, Field, validator
6+
from pydantic import (
7+
BaseModel,
8+
ByteSize,
9+
ConfigDict,
10+
Field,
11+
ValidationInfo,
12+
field_validator,
13+
)
714

815
LEGACY_INTEGRATION_VERSION = version.Version("0")
916
PROGRESS_REGEXP: re.Pattern[str] = re.compile(
@@ -41,18 +48,20 @@ class ContainerHostConfig(BaseModel):
4148
..., alias="NanoCPUs", description="CPU quota in units of 10-9 CPUs"
4249
)
4350

44-
@validator("memory_swap", pre=True, always=True)
51+
@field_validator("memory_swap", mode="before")
4552
@classmethod
46-
def ensure_no_memory_swap_means_no_swap(cls, v, values):
53+
def ensure_no_memory_swap_means_no_swap(cls, v, info: ValidationInfo):
4754
if v is None:
4855
# if not set it will be the same value as memory to ensure swap is disabled
49-
return values["memory"]
56+
return info.data["memory"]
5057
return v
5158

52-
@validator("memory_swap")
59+
@field_validator("memory_swap")
5360
@classmethod
54-
def ensure_memory_swap_cannot_be_unlimited_nor_smaller_than_memory(cls, v, values):
55-
if v < values["memory"]:
61+
def ensure_memory_swap_cannot_be_unlimited_nor_smaller_than_memory(
62+
cls, v, info: ValidationInfo
63+
):
64+
if v < info.data["memory"]:
5665
msg = "Memory swap cannot be set to a smaller value than memory"
5766
raise ValueError(msg)
5867
return v
@@ -71,26 +80,24 @@ class ImageLabels(BaseModel):
7180
default=str(LEGACY_INTEGRATION_VERSION),
7281
alias="integration-version",
7382
description="integration version number",
74-
regex=SIMPLE_VERSION_RE,
83+
pattern=SIMPLE_VERSION_RE,
7584
examples=["1.0.0"],
7685
)
7786
progress_regexp: str = Field(
7887
default=PROGRESS_REGEXP.pattern,
7988
alias="progress_regexp",
8089
description="regexp pattern for detecting computational service's progress",
8190
)
91+
model_config = ConfigDict(extra="ignore")
8292

83-
class Config:
84-
extra = Extra.ignore
85-
86-
@validator("integration_version", pre=True)
93+
@field_validator("integration_version", mode="before")
8794
@classmethod
8895
def default_integration_version(cls, v):
8996
if v is None:
9097
return ImageLabels().integration_version
9198
return v
9299

93-
@validator("progress_regexp", pre=True)
100+
@field_validator("progress_regexp", mode="before")
94101
@classmethod
95102
def default_progress_regexp(cls, v):
96103
if v is None:
@@ -104,6 +111,6 @@ def get_progress_regexp(self) -> re.Pattern[str]:
104111
return re.compile(self.progress_regexp)
105112

106113

107-
assert set(ImageLabels.__fields__).issubset(
108-
ServiceMetaDataPublished.__fields__
114+
assert set(ImageLabels.model_fields).issubset(
115+
ServiceMetaDataPublished.model_fields
109116
), "ImageLabels must be compatible with ServiceDockerData"

services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,8 @@
1212
import aiofiles
1313
import aiofiles.tempfile
1414
import fsspec # type: ignore[import-untyped]
15-
from pydantic import ByteSize, FileUrl, parse_obj_as
15+
from common_library.pydantic_networks_extension import FileUrlLegacy
16+
from pydantic import ByteSize, TypeAdapter
1617
from pydantic.networks import AnyUrl
1718
from servicelib.logging_utils import LogLevelInt, LogMessageStr
1819
from settings_library.s3 import S3Settings
@@ -145,7 +146,7 @@ async def pull_file_from_remote(
145146
storage_kwargs = _s3fs_settings_from_s3_settings(s3_settings)
146147
await _copy_file(
147148
src_url,
148-
parse_obj_as(FileUrl, dst_path.as_uri()),
149+
TypeAdapter(FileUrlLegacy).validate_python(dst_path.as_uri()),
149150
src_storage_cfg=cast(dict[str, Any], storage_kwargs),
150151
log_publishing_cb=log_publishing_cb,
151152
text_prefix=f"Downloading '{src_url.path.strip('/')}':",
@@ -215,7 +216,7 @@ async def _push_file_to_remote(
215216
storage_kwargs = _s3fs_settings_from_s3_settings(s3_settings)
216217

217218
await _copy_file(
218-
parse_obj_as(FileUrl, file_to_upload.as_uri()),
219+
TypeAdapter(FileUrlLegacy).validate_python(file_to_upload.as_uri()),
219220
dst_url,
220221
dst_storage_cfg=cast(dict[str, Any], storage_kwargs),
221222
log_publishing_cb=log_publishing_cb,
@@ -243,7 +244,7 @@ async def push_file_to_remote(
243244
src_mime_type, _ = mimetypes.guess_type(src_path)
244245

245246
if dst_mime_type == _ZIP_MIME_TYPE and src_mime_type != _ZIP_MIME_TYPE:
246-
archive_file_path = Path(tmp_dir) / Path(URL(dst_url).path).name
247+
archive_file_path = Path(tmp_dir) / Path(URL(f"{dst_url}").path).name
247248
await log_publishing_cb(
248249
f"Compressing '{src_path.name}' to '{archive_file_path.name}'...",
249250
logging.INFO,

services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
from typing import Any
33

44
from models_library.basic_types import LogLevel
5-
from pydantic import Field, validator
5+
from pydantic import AliasChoices, Field, field_validator
66
from settings_library.base import BaseCustomSettings
77
from settings_library.utils_logging import MixinLoggingSettings
88

@@ -14,7 +14,9 @@ class Settings(BaseCustomSettings, MixinLoggingSettings):
1414
SC_BOOT_MODE: str | None = None
1515
LOG_LEVEL: LogLevel = Field(
1616
LogLevel.INFO.value,
17-
env=["DASK_SIDECAR_LOGLEVEL", "SIDECAR_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"],
17+
validation_alias=AliasChoices(
18+
"DASK_SIDECAR_LOGLEVEL", "SIDECAR_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"
19+
),
1820
)
1921

2022
# sidecar config ---
@@ -37,7 +39,10 @@ class Settings(BaseCustomSettings, MixinLoggingSettings):
3739

3840
DASK_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field(
3941
default=False,
40-
env=["DASK_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED"],
42+
validation_alias=AliasChoices(
43+
"DASK_LOG_FORMAT_LOCAL_DEV_ENABLED",
44+
"LOG_FORMAT_LOCAL_DEV_ENABLED",
45+
),
4146
description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!",
4247
)
4348

@@ -50,7 +55,7 @@ def as_worker(self) -> bool:
5055
assert self.DASK_SCHEDULER_HOST is not None # nosec
5156
return as_worker
5257

53-
@validator("LOG_LEVEL", pre=True)
58+
@field_validator("LOG_LEVEL", mode="before")
5459
@classmethod
5560
def _validate_loglevel(cls, value: Any) -> str:
5661
return cls.validate_log_level(f"{value}")

0 commit comments

Comments
 (0)