Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
105 changes: 103 additions & 2 deletions services/dask-sidecar/requirements/_base.txt

Large diffs are not rendered by default.

11 changes: 10 additions & 1 deletion services/dask-sidecar/requirements/_test.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
annotated-types==0.7.0
# via
# -c requirements/_base.txt
# pydantic
antlr4-python3-runtime==4.13.2
# via moto
attrs==23.2.0
Expand Down Expand Up @@ -141,11 +145,15 @@ py-partiql-parser==0.5.6
# via moto
pycparser==2.22
# via cffi
pydantic==1.10.15
pydantic==2.9.2
# via
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
# aws-sam-translator
pydantic-core==2.23.4
# via
# -c requirements/_base.txt
# pydantic
pyftpdlib==2.0.0
# via pytest-localftpserver
pyopenssl==24.2.1
Expand Down Expand Up @@ -244,6 +252,7 @@ typing-extensions==4.11.0
# aws-sam-translator
# cfn-lint
# pydantic
# pydantic-core
urllib3==2.2.1
# via
# -c requirements/../../../requirements/constraints.txt
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import logging

import typer
from models_library.utils.json_serialization import json_dumps
from settings_library.utils_cli import create_settings_command, create_version_callback

from ._meta import PROJECT_NAME, __version__
Expand All @@ -15,4 +16,8 @@
# COMMANDS
#
main.callback()(create_version_callback(__version__))
main.command()(create_settings_command(settings_cls=Settings, logger=_logger))
main.command()(
create_settings_command(
settings_cls=Settings, logger=_logger, json_serializer=json_dumps
)
)
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ async def _write_input_data(
if isinstance(input_params, FileUrl):
file_name = (
input_params.file_mapping
or Path(URL(input_params.url).path.strip("/")).name
or Path(URL(f"{input_params.url}").path.strip("/")).name
)

destination_path = task_volumes.inputs_folder / file_name
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from pydantic.errors import PydanticErrorMixin
from common_library.errors_classes import OsparcErrorMixin


class ComputationalSidecarRuntimeError(PydanticErrorMixin, RuntimeError):
class ComputationalSidecarRuntimeError(OsparcErrorMixin, RuntimeError):
...


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,14 @@
from models_library.basic_regex import SIMPLE_VERSION_RE
from models_library.services import ServiceMetaDataPublished
from packaging import version
from pydantic import BaseModel, ByteSize, Extra, Field, validator
from pydantic import (
BaseModel,
ByteSize,
ConfigDict,
Field,
ValidationInfo,
field_validator,
)

LEGACY_INTEGRATION_VERSION = version.Version("0")
PROGRESS_REGEXP: re.Pattern[str] = re.compile(
Expand Down Expand Up @@ -41,18 +48,20 @@ class ContainerHostConfig(BaseModel):
..., alias="NanoCPUs", description="CPU quota in units of 10-9 CPUs"
)

@validator("memory_swap", pre=True, always=True)
@field_validator("memory_swap", mode="before")
@classmethod
def ensure_no_memory_swap_means_no_swap(cls, v, values):
def ensure_no_memory_swap_means_no_swap(cls, v, info: ValidationInfo):
if v is None:
# if not set it will be the same value as memory to ensure swap is disabled
return values["memory"]
return info.data["memory"]
return v

@validator("memory_swap")
@field_validator("memory_swap")
@classmethod
def ensure_memory_swap_cannot_be_unlimited_nor_smaller_than_memory(cls, v, values):
if v < values["memory"]:
def ensure_memory_swap_cannot_be_unlimited_nor_smaller_than_memory(
cls, v, info: ValidationInfo
):
if v < info.data["memory"]:
msg = "Memory swap cannot be set to a smaller value than memory"
raise ValueError(msg)
return v
Expand All @@ -71,26 +80,24 @@ class ImageLabels(BaseModel):
default=str(LEGACY_INTEGRATION_VERSION),
alias="integration-version",
description="integration version number",
regex=SIMPLE_VERSION_RE,
pattern=SIMPLE_VERSION_RE,
examples=["1.0.0"],
)
progress_regexp: str = Field(
default=PROGRESS_REGEXP.pattern,
alias="progress_regexp",
description="regexp pattern for detecting computational service's progress",
)
model_config = ConfigDict(extra="ignore")

class Config:
extra = Extra.ignore

@validator("integration_version", pre=True)
@field_validator("integration_version", mode="before")
@classmethod
def default_integration_version(cls, v):
if v is None:
return ImageLabels().integration_version
return v

@validator("progress_regexp", pre=True)
@field_validator("progress_regexp", mode="before")
@classmethod
def default_progress_regexp(cls, v):
if v is None:
Expand All @@ -104,6 +111,6 @@ def get_progress_regexp(self) -> re.Pattern[str]:
return re.compile(self.progress_regexp)


assert set(ImageLabels.__fields__).issubset(
ServiceMetaDataPublished.__fields__
assert set(ImageLabels.model_fields).issubset(
ServiceMetaDataPublished.model_fields
), "ImageLabels must be compatible with ServiceDockerData"
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import aiofiles
import aiofiles.tempfile
import fsspec # type: ignore[import-untyped]
from pydantic import ByteSize, FileUrl, parse_obj_as
from pydantic import ByteSize, FileUrl, TypeAdapter
from pydantic.networks import AnyUrl
from servicelib.logging_utils import LogLevelInt, LogMessageStr
from settings_library.s3 import S3Settings
Expand Down Expand Up @@ -145,7 +145,7 @@ async def pull_file_from_remote(
storage_kwargs = _s3fs_settings_from_s3_settings(s3_settings)
await _copy_file(
src_url,
parse_obj_as(FileUrl, dst_path.as_uri()),
TypeAdapter(FileUrl).validate_python(dst_path.as_uri()),
src_storage_cfg=cast(dict[str, Any], storage_kwargs),
log_publishing_cb=log_publishing_cb,
text_prefix=f"Downloading '{src_url.path.strip('/')}':",
Expand Down Expand Up @@ -215,7 +215,7 @@ async def _push_file_to_remote(
storage_kwargs = _s3fs_settings_from_s3_settings(s3_settings)

await _copy_file(
parse_obj_as(FileUrl, file_to_upload.as_uri()),
TypeAdapter(FileUrl).validate_python(file_to_upload.as_uri()),
dst_url,
dst_storage_cfg=cast(dict[str, Any], storage_kwargs),
log_publishing_cb=log_publishing_cb,
Expand Down Expand Up @@ -243,7 +243,7 @@ async def push_file_to_remote(
src_mime_type, _ = mimetypes.guess_type(src_path)

if dst_mime_type == _ZIP_MIME_TYPE and src_mime_type != _ZIP_MIME_TYPE:
archive_file_path = Path(tmp_dir) / Path(URL(dst_url).path).name
archive_file_path = Path(tmp_dir) / Path(URL(f"{dst_url}").path).name
await log_publishing_cb(
f"Compressing '{src_path.name}' to '{archive_file_path.name}'...",
logging.INFO,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from typing import Any

from models_library.basic_types import LogLevel
from pydantic import Field, validator
from pydantic import AliasChoices, Field, field_validator
from settings_library.base import BaseCustomSettings
from settings_library.utils_logging import MixinLoggingSettings

Expand All @@ -14,7 +14,9 @@ class Settings(BaseCustomSettings, MixinLoggingSettings):
SC_BOOT_MODE: str | None = None
LOG_LEVEL: LogLevel = Field(
LogLevel.INFO.value,
env=["DASK_SIDECAR_LOGLEVEL", "SIDECAR_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"],
validation_alias=AliasChoices(
"DASK_SIDECAR_LOGLEVEL", "SIDECAR_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"
),
)

# sidecar config ---
Expand All @@ -37,7 +39,10 @@ class Settings(BaseCustomSettings, MixinLoggingSettings):

DASK_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field(
default=False,
env=["DASK_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED"],
validation_alias=AliasChoices(
"DASK_LOG_FORMAT_LOCAL_DEV_ENABLED",
"LOG_FORMAT_LOCAL_DEV_ENABLED",
),
description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!",
)

Expand All @@ -50,7 +55,7 @@ def as_worker(self) -> bool:
assert self.DASK_SCHEDULER_HOST is not None # nosec
return as_worker

@validator("LOG_LEVEL", pre=True)
@field_validator("LOG_LEVEL", mode="before")
@classmethod
def _validate_loglevel(cls, value: Any) -> str:
return cls.validate_log_level(f"{value}")
14 changes: 11 additions & 3 deletions services/dask-sidecar/tests/unit/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,27 +6,35 @@


import os
import traceback

from click.testing import Result
from pytest_simcore.helpers.typing_env import EnvVarsDict
from simcore_service_dask_sidecar._meta import API_VERSION
from simcore_service_dask_sidecar.cli import main
from simcore_service_dask_sidecar.settings import Settings
from typer.testing import CliRunner


def _format_cli_error(result: Result) -> str:
assert result.exception
tb_message = "\n".join(traceback.format_tb(result.exception.__traceback__))
return tb_message


def test_cli_help_and_version(cli_runner: CliRunner):
# invitations-maker --help
result = cli_runner.invoke(main, "--help")
assert result.exit_code == os.EX_OK, result.output
assert result.exit_code == os.EX_OK, _format_cli_error(result)

result = cli_runner.invoke(main, "--version")
assert result.exit_code == os.EX_OK, result.output
assert result.exit_code == os.EX_OK, _format_cli_error(result)
assert result.stdout.strip() == API_VERSION


def test_list_settings(cli_runner: CliRunner, app_environment: EnvVarsDict):
result = cli_runner.invoke(main, ["settings", "--show-secrets", "--as-json"])
assert result.exit_code == os.EX_OK, result.output
assert result.exit_code == os.EX_OK, _format_cli_error(result)

print(result.output)
settings = Settings.parse_raw(result.output)
Expand Down
13 changes: 7 additions & 6 deletions services/dask-sidecar/tests/unit/test_file_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
import fsspec
import pytest
from faker import Faker
from pydantic import AnyUrl, parse_obj_as
from pydantic import AnyUrl, TypeAdapter, parse_obj_as
from pytest_localftpserver.servers import ProcessFTPServer
from pytest_mock.plugin import MockerFixture
from settings_library.s3 import S3Settings
Expand Down Expand Up @@ -45,8 +45,8 @@ def s3_presigned_link_storage_kwargs(s3_settings: S3Settings) -> dict[str, Any]:

@pytest.fixture
def ftp_remote_file_url(ftpserver: ProcessFTPServer, faker: Faker) -> AnyUrl:
return parse_obj_as(
AnyUrl, f"{ftpserver.get_login_data(style='url')}/{faker.file_name()}"
return TypeAdapter(AnyUrl).validate_python(
f"{ftpserver.get_login_data(style='url')}/{faker.file_name()}"
)


Expand All @@ -56,8 +56,7 @@ async def s3_presigned_link_remote_file_url(
aiobotocore_s3_client,
faker: Faker,
) -> AnyUrl:
return parse_obj_as(
AnyUrl,
return TypeAdapter(AnyUrl).validate_python(
await aiobotocore_s3_client.generate_presigned_url(
"put_object",
Params={"Bucket": s3_settings.S3_BUCKET_NAME, "Key": faker.file_name()},
Expand All @@ -68,7 +67,9 @@ async def s3_presigned_link_remote_file_url(

@pytest.fixture
def s3_remote_file_url(s3_settings: S3Settings, faker: Faker) -> AnyUrl:
return parse_obj_as(AnyUrl, f"s3://{s3_settings.S3_BUCKET_NAME}{faker.file_path()}")
return TypeAdapter(AnyUrl).validate_python(
f"s3://{s3_settings.S3_BUCKET_NAME}{faker.file_path()}"
)


@dataclass(frozen=True)
Expand Down
Loading