diff --git a/.env-devel b/.env-devel index 8ae1e5856d5e..dc71eb30f760 100644 --- a/.env-devel +++ b/.env-devel @@ -31,7 +31,7 @@ AUTOSCALING_EC2_ACCESS=null AUTOSCALING_EC2_INSTANCES=null AUTOSCALING_LOGLEVEL=WARNING AUTOSCALING_NODES_MONITORING=null -AUTOSCALING_POLL_INTERVAL=10 +AUTOSCALING_POLL_INTERVAL="00:00:10" AUTOSCALING_SSM_ACCESS=null AWS_S3_CLI_S3=null diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 89701508fbf1..faa7597a3712 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -13,6 +13,7 @@ Makefile @pcrespov @sanderegg /api/ @sanderegg @pcrespov @matusdrobuliak66 /ci/ @sanderegg @pcrespov /docs/ @pcrespov +/packages/common-library/ @giancarloromeo /packages/models-library/ @sanderegg @pcrespov @matusdrobuliak66 /packages/postgres-database/ @matusdrobuliak66 /packages/pytest-simcore/ @pcrespov @sanderegg diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index 958b074cae24..130a6d898210 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -54,6 +54,7 @@ jobs: aws-library: ${{ steps.filter.outputs.aws-library }} dask-task-models-library: ${{ steps.filter.outputs.dask-task-models-library }} models-library: ${{ steps.filter.outputs.models-library }} + common-library: ${{ steps.filter.outputs.common-library }} notifications-library: ${{ steps.filter.outputs.notifications-library }} postgres-database: ${{ steps.filter.outputs.postgres-database }} service-integration: ${{ steps.filter.outputs.service-integration }} @@ -110,6 +111,8 @@ jobs: - 'services/docker-compose*' - 'scripts/mypy/*' - 'mypy.ini' + common-library: + - 'packages/common-library/**' notifications-library: - 'packages/notifications-library/**' - 'packages/postgres-database/**' @@ -1605,6 +1608,47 @@ jobs: with: flags: unittests #optional + unit-test-common-library: + needs: changes + if: ${{ needs.changes.outputs.common-library == 'true' || github.event_name == 'push' }} + timeout-minutes: 18 # if this timeout gets too small, then split the tests + name: "[unit] common-library" + runs-on: ${{ matrix.os }} + strategy: + matrix: + python: ["3.11"] + os: [ubuntu-22.04] + fail-fast: false + steps: + - uses: actions/checkout@v4 + - name: setup docker buildx + id: buildx + uses: docker/setup-buildx-action@v3 + with: + driver: docker-container + - name: setup python environment + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + - name: install uv + uses: yezz123/setup-uv@v4 + - uses: actions/cache@v4 + id: cache-uv + with: + path: ~/.cache/uv + key: ${{ runner.os }}-${{ github.job }}-python-${{ matrix.python }}-uv + - name: show system version + run: ./ci/helpers/show_system_versions.bash + - name: install + run: ./ci/github/unit-testing/common-library.bash install + - name: typecheck + run: ./ci/github/unit-testing/common-library.bash typecheck + - name: test + run: ./ci/github/unit-testing/common-library.bash test + - uses: codecov/codecov-action@v4.5.0 + with: + flags: unittests #optional + unit-test-notifications-library: needs: changes if: ${{ needs.changes.outputs.notifications-library == 'true' || github.event_name == 'push' }} @@ -1716,6 +1760,7 @@ jobs: unit-test-efs-guardian, unit-test-frontend, unit-test-models-library, + unit-test-common-library, unit-test-notifications-library, unit-test-osparc-gateway-server, unit-test-payments, diff --git a/ci/github/unit-testing/common-library.bash b/ci/github/unit-testing/common-library.bash new file mode 100755 index 000000000000..715471741033 --- /dev/null +++ b/ci/github/unit-testing/common-library.bash @@ -0,0 +1,43 @@ +#!/bin/bash +# http://redsymbol.net/articles/unofficial-bash-strict-mode/ +set -o errexit # abort on nonzero exitstatus +set -o nounset # abort on unbound variable +set -o pipefail # don't hide errors within pipes +IFS=$'\n\t' + +install() { + make devenv + # shellcheck source=/dev/null + source .venv/bin/activate + pushd packages/common-library + make install-ci + popd + uv pip list +} + +test() { + # shellcheck source=/dev/null + source .venv/bin/activate + pushd packages/common-library + make tests-ci + popd +} + +typecheck() { + # shellcheck source=/dev/null + source .venv/bin/activate + uv pip install mypy + pushd packages/common-library + make mypy + popd +} + +# Check if the function exists (bash specific) +if declare -f "$1" >/dev/null; then + # call arguments verbatim + "$@" +else + # Show a helpful error + echo "'$1' is not a known function name" >&2 + exit 1 +fi diff --git a/ci/helpers/requirements.txt b/ci/helpers/requirements.txt index 55a3ab4e1632..daf26ed5c0b7 100644 --- a/ci/helpers/requirements.txt +++ b/ci/helpers/requirements.txt @@ -1,17 +1,19 @@ # This file was autogenerated by uv via the following command: -# uv pip compile requirements.in +# uv pip compile requirements.in -o requirements.txt aiohttp==3.9.5 + # via + # -c ../../requirements/constraints.txt + # -r requirements.in aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via starlette -async-timeout==4.0.3 - # via aiohttp attrs==23.2.0 # via aiohttp -exceptiongroup==1.2.1 - # via anyio -fastapi==0.99.1 +fastapi==0.115.0 + # via -r requirements.in frozenlist==1.4.1 # via # aiohttp @@ -24,16 +26,22 @@ multidict==6.0.5 # via # aiohttp # yarl -pydantic==1.10.15 - # via fastapi +pydantic==2.9.2 + # via + # -c ../../requirements/constraints.txt + # fastapi +pydantic-core==2.23.4 + # via pydantic sniffio==1.3.1 # via anyio -starlette==0.27.0 - # via fastapi +starlette==0.38.6 + # via + # -c ../../requirements/constraints.txt + # fastapi typing-extensions==4.11.0 # via - # anyio # fastapi # pydantic + # pydantic-core yarl==1.9.4 # via aiohttp diff --git a/packages/aws-library/requirements/_base.in b/packages/aws-library/requirements/_base.in index 628cebcf110c..d884806f7032 100644 --- a/packages/aws-library/requirements/_base.in +++ b/packages/aws-library/requirements/_base.in @@ -2,6 +2,7 @@ # Specifies third-party dependencies for 'aws-library' # --constraint ../../../requirements/constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/service-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in diff --git a/packages/aws-library/requirements/_base.txt b/packages/aws-library/requirements/_base.txt index b9681f9234ac..b70065c91d34 100644 --- a/packages/aws-library/requirements/_base.txt +++ b/packages/aws-library/requirements/_base.txt @@ -34,6 +34,8 @@ aiormq==6.8.1 # via aio-pika aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.6.0 # via # fast-depends @@ -178,7 +180,7 @@ protobuf==4.25.5 # opentelemetry-proto psutil==6.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -pydantic==1.10.18 +pydantic==2.9.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -193,6 +195,20 @@ pydantic==1.10.18 # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fast-depends + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.3 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.7.3 @@ -201,6 +217,8 @@ python-dateutil==2.9.0.post0 # via # arrow # botocore +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -277,6 +295,7 @@ typing-extensions==4.12.2 # faststream # opentelemetry-sdk # pydantic + # pydantic-core # typer # types-aiobotocore # types-aiobotocore-ec2 diff --git a/packages/aws-library/requirements/_test.txt b/packages/aws-library/requirements/_test.txt index 68df09cd6f43..562f30e34869 100644 --- a/packages/aws-library/requirements/_test.txt +++ b/packages/aws-library/requirements/_test.txt @@ -1,3 +1,7 @@ +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic antlr4-python3-runtime==4.13.2 # via moto appdirs==1.4.4 @@ -154,11 +158,15 @@ py-partiql-parser==0.5.6 # via moto pycparser==2.22 # via cffi -pydantic==1.10.18 +pydantic==2.9.1 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.3 + # via + # -c requirements/_base.txt + # pydantic pyparsing==3.1.4 # via moto pytest==8.3.3 @@ -196,7 +204,9 @@ python-dateutil==2.9.0.post0 # faker # moto python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt @@ -274,6 +284,7 @@ typing-extensions==4.12.2 # flexparser # pint # pydantic + # pydantic-core # types-aioboto3 # types-aiobotocore urllib3==2.2.3 diff --git a/packages/aws-library/requirements/ci.txt b/packages/aws-library/requirements/ci.txt index 89277c22256f..bac75da67f80 100644 --- a/packages/aws-library/requirements/ci.txt +++ b/packages/aws-library/requirements/ci.txt @@ -12,8 +12,9 @@ --requirement _tools.txt # installs this repo's packages +simcore-common-library @ ../common-library +simcore-models-library @ ../models-library/ pytest-simcore @ ../pytest-simcore -simcore-models-library @ ../models-library simcore-service-library @ ../service-library/ simcore-settings-library @ ../settings-library/ diff --git a/packages/aws-library/requirements/dev.txt b/packages/aws-library/requirements/dev.txt index f89567254070..34cc644b370a 100644 --- a/packages/aws-library/requirements/dev.txt +++ b/packages/aws-library/requirements/dev.txt @@ -12,8 +12,9 @@ --requirement _tools.txt # installs this repo's packages ---editable ../pytest-simcore +--editable ../common-library/ --editable ../models-library/ +--editable ../pytest-simcore/ --editable ../service-library/ --editable ../settings-library/ diff --git a/packages/aws-library/src/aws_library/ec2/__init__.py b/packages/aws-library/src/aws_library/ec2/__init__.py index 02fcf10b00e8..112c70861b29 100644 --- a/packages/aws-library/src/aws_library/ec2/__init__.py +++ b/packages/aws-library/src/aws_library/ec2/__init__.py @@ -1,6 +1,10 @@ from ._client import SimcoreEC2API from ._errors import EC2AccessError, EC2NotConnectedError, EC2RuntimeError from ._models import ( + AWS_TAG_KEY_MAX_LENGTH, + AWS_TAG_KEY_MIN_LENGTH, + AWS_TAG_VALUE_MAX_LENGTH, + AWS_TAG_VALUE_MIN_LENGTH, AWSTagKey, AWSTagValue, EC2InstanceBootSpecific, @@ -14,6 +18,10 @@ __all__: tuple[str, ...] = ( "AWSTagKey", "AWSTagValue", + "AWS_TAG_KEY_MIN_LENGTH", + "AWS_TAG_KEY_MAX_LENGTH", + "AWS_TAG_VALUE_MIN_LENGTH", + "AWS_TAG_VALUE_MAX_LENGTH", "EC2AccessError", "EC2InstanceBootSpecific", "EC2InstanceConfig", diff --git a/packages/aws-library/src/aws_library/ec2/_errors.py b/packages/aws-library/src/aws_library/ec2/_errors.py index c39047db00de..4fb0e611ed2b 100644 --- a/packages/aws-library/src/aws_library/ec2/_errors.py +++ b/packages/aws-library/src/aws_library/ec2/_errors.py @@ -1,12 +1,9 @@ # pylint: disable=too-many-ancestors -from typing import Any - -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class EC2BaseError(OsparcErrorMixin, Exception): - def __init__(self, **ctx: Any) -> None: - super().__init__(**ctx) + pass class EC2RuntimeError(EC2BaseError, RuntimeError): diff --git a/packages/aws-library/src/aws_library/ec2/_models.py b/packages/aws-library/src/aws_library/ec2/_models.py index fed1f3ea46a5..621adc0f4eed 100644 --- a/packages/aws-library/src/aws_library/ec2/_models.py +++ b/packages/aws-library/src/aws_library/ec2/_models.py @@ -2,18 +2,19 @@ import re import tempfile from dataclasses import dataclass -from typing import Any, ClassVar, TypeAlias +from typing import Annotated, Final, TypeAlias import sh # type: ignore[import-untyped] from models_library.docker import DockerGenericTag from pydantic import ( BaseModel, ByteSize, - ConstrainedStr, + ConfigDict, Field, NonNegativeFloat, NonNegativeInt, - validator, + StringConstraints, + field_validator, ) from types_aiobotocore_ec2.literals import InstanceStateNameType, InstanceTypeType @@ -33,26 +34,26 @@ def __gt__(self, other: "Resources") -> bool: return self.cpus > other.cpus or self.ram > other.ram def __add__(self, other: "Resources") -> "Resources": - return Resources.construct( + return Resources.model_construct( **{ key: a + b for (key, a), b in zip( - self.dict().items(), other.dict().values(), strict=True + self.model_dump().items(), other.model_dump().values(), strict=True ) } ) def __sub__(self, other: "Resources") -> "Resources": - return Resources.construct( + return Resources.model_construct( **{ key: a - b for (key, a), b in zip( - self.dict().items(), other.dict().values(), strict=True + self.model_dump().items(), other.model_dump().values(), strict=True ) } ) - @validator("cpus", pre=True) + @field_validator("cpus", mode="before") @classmethod def _floor_cpus_to_0(cls, v: float) -> float: return max(v, 0) @@ -67,19 +68,31 @@ class EC2InstanceType: InstancePrivateDNSName: TypeAlias = str -class AWSTagKey(ConstrainedStr): +AWS_TAG_KEY_MIN_LENGTH: Final[int] = 1 +AWS_TAG_KEY_MAX_LENGTH: Final[int] = 128 +AWSTagKey: TypeAlias = Annotated[ # see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] - regex = re.compile(r"^(?!(_index|\.{1,2})$)[a-zA-Z0-9\+\-=\._:@]+$") - min_length = 1 - max_length = 128 - - -class AWSTagValue(ConstrainedStr): + str, + StringConstraints( + min_length=AWS_TAG_KEY_MIN_LENGTH, + max_length=AWS_TAG_KEY_MAX_LENGTH, + pattern=re.compile(r"^(?!(_index|\.{1,2})$)[a-zA-Z0-9\+\-=\._:@]+$"), + ), +] + + +AWS_TAG_VALUE_MIN_LENGTH: Final[int] = 0 +AWS_TAG_VALUE_MAX_LENGTH: Final[int] = 256 +AWSTagValue: TypeAlias = Annotated[ # see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] # quotes []{} were added as it allows to json encode. it seems to be accepted as a value - regex = re.compile(r"^[a-zA-Z0-9\s\+\-=\.,_:/@\"\'\[\]\{\}]*$") - min_length = 0 - max_length = 256 + str, + StringConstraints( + min_length=AWS_TAG_VALUE_MIN_LENGTH, + max_length=AWS_TAG_VALUE_MAX_LENGTH, + pattern=r"^[a-zA-Z0-9\s\+\-=\.,_:/@\"\'\[\]\{\}]*$", + ), +] EC2Tags: TypeAlias = dict[AWSTagKey, AWSTagValue] @@ -148,8 +161,23 @@ class EC2InstanceBootSpecific(BaseModel): default=0, description="number of buffer EC2s to keep (defaults to 0)" ) - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + @field_validator("custom_boot_scripts") + @classmethod + def validate_bash_calls(cls, v): + try: + with tempfile.NamedTemporaryFile(mode="wt", delete=True) as temp_file: + temp_file.writelines(v) + temp_file.flush() + # NOTE: this will not capture runtime errors, but at least some syntax errors such as invalid quotes + sh.bash("-n", temp_file.name) + except sh.ErrorReturnCode as exc: + msg = f"Invalid bash call in custom_boot_scripts: {v}, Error: {exc.stderr}" + raise ValueError(msg) from exc + + return v + + model_config = ConfigDict( + json_schema_extra={ "examples": [ { # just AMI @@ -205,18 +233,4 @@ class Config: }, ] } - - @validator("custom_boot_scripts") - @classmethod - def validate_bash_calls(cls, v): - try: - with tempfile.NamedTemporaryFile(mode="wt", delete=True) as temp_file: - temp_file.writelines(v) - temp_file.flush() - # NOTE: this will not capture runtime errors, but at least some syntax errors such as invalid quotes - sh.bash("-n", temp_file.name) - except sh.ErrorReturnCode as exc: - msg = f"Invalid bash call in custom_boot_scripts: {v}, Error: {exc.stderr}" - raise ValueError(msg) from exc - - return v + ) diff --git a/packages/aws-library/src/aws_library/s3/_client.py b/packages/aws-library/src/aws_library/s3/_client.py index 4ddb2bfb9c22..acd9402fda33 100644 --- a/packages/aws-library/src/aws_library/s3/_client.py +++ b/packages/aws-library/src/aws_library/s3/_client.py @@ -13,9 +13,10 @@ from boto3.s3.transfer import TransferConfig from botocore import exceptions as botocore_exc from botocore.client import Config +from common_library.pydantic_networks_extension import AnyUrlLegacy from models_library.api_schemas_storage import ETag, S3BucketName, UploadedPart from models_library.basic_types import SHA256Str -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.logging_utils import log_catch, log_context from servicelib.utils import limited_gather from settings_library.s3 import S3Settings @@ -44,6 +45,9 @@ _AWS_MAX_ITEMS_PER_PAGE: Final[int] = 1000 +ListAnyUrlTypeAdapter: Final[TypeAdapter[list[AnyUrl]]] = TypeAdapter(list[AnyUrl]) + + class UploadedBytesTransferredCallback(Protocol): def __call__(self, bytes_transferred: int, *, file_name: str) -> None: ... @@ -251,7 +255,7 @@ async def create_single_presigned_download_link( bucket: S3BucketName, object_key: S3ObjectKey, expiration_secs: int, - ) -> AnyUrl: + ) -> str: # NOTE: ensure the bucket/object exists, this will raise if not await self._client.head_bucket(Bucket=bucket) await self._client.head_object(Bucket=bucket, Key=object_key) @@ -260,13 +264,12 @@ async def create_single_presigned_download_link( Params={"Bucket": bucket, "Key": object_key}, ExpiresIn=expiration_secs, ) - url: AnyUrl = parse_obj_as(AnyUrl, generated_link) - return url + return f"{TypeAdapter(AnyUrlLegacy).validate_python(generated_link)}" @s3_exception_handler(_logger) async def create_single_presigned_upload_link( self, *, bucket: S3BucketName, object_key: S3ObjectKey, expiration_secs: int - ) -> AnyUrl: + ) -> str: # NOTE: ensure the bucket/object exists, this will raise if not await self._client.head_bucket(Bucket=bucket) generated_link = await self._client.generate_presigned_url( @@ -274,8 +277,7 @@ async def create_single_presigned_upload_link( Params={"Bucket": bucket, "Key": object_key}, ExpiresIn=expiration_secs, ) - url: AnyUrl = parse_obj_as(AnyUrl, generated_link) - return url + return f"{TypeAdapter(AnyUrlLegacy).validate_python(generated_link)}" @s3_exception_handler(_logger) async def create_multipart_upload_links( @@ -298,8 +300,7 @@ async def create_multipart_upload_links( # compute the number of links, based on the announced file size num_upload_links, chunk_size = compute_num_file_chunks(file_size) # now create the links - upload_links = parse_obj_as( - list[AnyUrl], + upload_links = ListAnyUrlTypeAdapter.validate_python( await asyncio.gather( *( self._client.generate_presigned_url( @@ -473,7 +474,6 @@ def is_multipart(file_size: ByteSize) -> bool: @staticmethod def compute_s3_url(*, bucket: S3BucketName, object_key: S3ObjectKey) -> AnyUrl: - url: AnyUrl = parse_obj_as( - AnyUrl, f"s3://{bucket}/{urllib.parse.quote(object_key)}" + return TypeAdapter(AnyUrlLegacy).validate_python( + f"s3://{bucket}/{urllib.parse.quote(object_key)}" ) - return url diff --git a/packages/aws-library/src/aws_library/s3/_constants.py b/packages/aws-library/src/aws_library/s3/_constants.py index 05f2b3dc6d64..a94cd555f432 100644 --- a/packages/aws-library/src/aws_library/s3/_constants.py +++ b/packages/aws-library/src/aws_library/s3/_constants.py @@ -1,10 +1,14 @@ from typing import Final -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter # NOTE: AWS S3 upload limits https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html -MULTIPART_UPLOADS_MIN_TOTAL_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "100MiB") -MULTIPART_COPY_THRESHOLD: Final[ByteSize] = parse_obj_as(ByteSize, "100MiB") +MULTIPART_UPLOADS_MIN_TOTAL_SIZE: Final[ByteSize] = TypeAdapter( + ByteSize +).validate_python("100MiB") +MULTIPART_COPY_THRESHOLD: Final[ByteSize] = TypeAdapter(ByteSize).validate_python( + "100MiB" +) -PRESIGNED_LINK_MAX_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "5GiB") -S3_MAX_FILE_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "5TiB") +PRESIGNED_LINK_MAX_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("5GiB") +S3_MAX_FILE_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("5TiB") diff --git a/packages/aws-library/src/aws_library/s3/_errors.py b/packages/aws-library/src/aws_library/s3/_errors.py index f297b04b64d4..d14105dbd30f 100644 --- a/packages/aws-library/src/aws_library/s3/_errors.py +++ b/packages/aws-library/src/aws_library/s3/_errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class S3RuntimeError(PydanticErrorMixin, RuntimeError): +class S3RuntimeError(OsparcErrorMixin, RuntimeError): msg_template: str = "S3 client unexpected error" @@ -10,25 +10,25 @@ class S3NotConnectedError(S3RuntimeError): class S3AccessError(S3RuntimeError): - code = "s3_access.error" + code = "s3_access.error" # type: ignore[assignment] msg_template: str = "Unexpected error while accessing S3 backend" class S3BucketInvalidError(S3AccessError): - code = "s3_bucket.invalid_error" + code = "s3_bucket.invalid_error" # type: ignore[assignment] msg_template: str = "The bucket '{bucket}' is invalid" class S3KeyNotFoundError(S3AccessError): - code = "s3_key.not_found_error" + code = "s3_key.not_found_error" # type: ignore[assignment] msg_template: str = "The file {key} in {bucket} was not found" class S3UploadNotFoundError(S3AccessError): - code = "s3_upload.not_found_error" + code = "s3_upload.not_found_error" # type: ignore[assignment] msg_template: str = "The upload for {key} in {bucket} was not found" class S3DestinationNotEmptyError(S3AccessError): - code = "s3_destination.not_empty_error" + code = "s3_destination.not_empty_error" # type: ignore[assignment] msg_template: str = "The destination {dst_prefix} is not empty" diff --git a/packages/aws-library/src/aws_library/s3/_utils.py b/packages/aws-library/src/aws_library/s3/_utils.py index 00a1bcc59bbd..96ad59f57d30 100644 --- a/packages/aws-library/src/aws_library/s3/_utils.py +++ b/packages/aws-library/src/aws_library/s3/_utils.py @@ -1,13 +1,13 @@ from typing import Final -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter _MULTIPART_MAX_NUMBER_OF_PARTS: Final[int] = 10000 # this is artifically defined, if possible we keep a maximum number of requests for parallel # uploading. If that is not possible then we create as many upload part as the max part size allows _MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE: Final[list[ByteSize]] = [ - parse_obj_as(ByteSize, x) + TypeAdapter(ByteSize).validate_python(x) for x in [ "10Mib", "50Mib", diff --git a/packages/aws-library/src/aws_library/ssm/_errors.py b/packages/aws-library/src/aws_library/ssm/_errors.py index 32300d08d29f..5d3ea16b6c69 100644 --- a/packages/aws-library/src/aws_library/ssm/_errors.py +++ b/packages/aws-library/src/aws_library/ssm/_errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class SSMRuntimeError(PydanticErrorMixin, RuntimeError): +class SSMRuntimeError(OsparcErrorMixin, RuntimeError): msg_template: str = "SSM client unexpected error" diff --git a/packages/aws-library/tests/test_ec2_models.py b/packages/aws-library/tests/test_ec2_models.py index f7c114932be8..ed232ad0043d 100644 --- a/packages/aws-library/tests/test_ec2_models.py +++ b/packages/aws-library/tests/test_ec2_models.py @@ -6,7 +6,7 @@ import pytest from aws_library.ec2._models import AWSTagKey, AWSTagValue, EC2InstanceData, Resources from faker import Faker -from pydantic import ByteSize, ValidationError, parse_obj_as +from pydantic import ByteSize, TypeAdapter, ValidationError @pytest.mark.parametrize( @@ -88,9 +88,9 @@ def test_resources_gt_operator(a: Resources, b: Resources, a_greater_than_b: boo Resources(cpus=1, ram=ByteSize(34)), ), ( - Resources(cpus=0.1, ram=ByteSize(-1)), + Resources(cpus=0.1, ram=ByteSize(1)), Resources(cpus=1, ram=ByteSize(34)), - Resources(cpus=1.1, ram=ByteSize(33)), + Resources(cpus=1.1, ram=ByteSize(35)), ), ], ) @@ -108,14 +108,14 @@ def test_resources_create_as_empty(): "a,b,result", [ ( - Resources(cpus=0, ram=ByteSize(0)), - Resources(cpus=1, ram=ByteSize(34)), - Resources.construct(cpus=-1, ram=ByteSize(-34)), + Resources(cpus=0, ram=ByteSize(34)), + Resources(cpus=1, ram=ByteSize(0)), + Resources.model_construct(cpus=-1, ram=ByteSize(34)), ), ( - Resources(cpus=0.1, ram=ByteSize(-1)), - Resources(cpus=1, ram=ByteSize(34)), - Resources.construct(cpus=-0.9, ram=ByteSize(-35)), + Resources(cpus=0.1, ram=ByteSize(34)), + Resources(cpus=1, ram=ByteSize(1)), + Resources.model_construct(cpus=-0.9, ram=ByteSize(33)), ), ], ) @@ -129,10 +129,10 @@ def test_resources_sub(a: Resources, b: Resources, result: Resources): def test_aws_tag_key_invalid(ec2_tag_key: str): # for a key it raises with pytest.raises(ValidationError): - parse_obj_as(AWSTagKey, ec2_tag_key) + TypeAdapter(AWSTagKey).validate_python(ec2_tag_key) # for a value it does not - parse_obj_as(AWSTagValue, ec2_tag_key) + TypeAdapter(AWSTagValue).validate_python(ec2_tag_key) def test_ec2_instance_data_hashable(faker: Faker): diff --git a/packages/aws-library/tests/test_s3_client.py b/packages/aws-library/tests/test_s3_client.py index 93ee29fe5b02..d07075aed308 100644 --- a/packages/aws-library/tests/test_s3_client.py +++ b/packages/aws-library/tests/test_s3_client.py @@ -32,7 +32,7 @@ from models_library.api_schemas_storage import S3BucketName, UploadedPart from models_library.basic_types import SHA256Str from moto.server import ThreadedMotoServer -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from pytest_benchmark.plugin import BenchmarkFixture from pytest_simcore.helpers.logging_tools import log_context from pytest_simcore.helpers.parametrizations import ( @@ -67,7 +67,9 @@ async def simcore_s3_api( @pytest.fixture def bucket_name(faker: Faker) -> S3BucketName: # NOTE: no faker here as we need some specific namings - return parse_obj_as(S3BucketName, faker.pystr().replace("_", "-").lower()) + return TypeAdapter(S3BucketName).validate_python( + faker.pystr().replace("_", "-").lower() + ) @pytest.fixture @@ -89,7 +91,9 @@ async def with_s3_bucket( @pytest.fixture def non_existing_s3_bucket(faker: Faker) -> S3BucketName: - return parse_obj_as(S3BucketName, faker.pystr().replace("_", "-").lower()) + return TypeAdapter(S3BucketName).validate_python( + faker.pystr().replace("_", "-").lower() + ) @pytest.fixture @@ -107,7 +111,7 @@ async def _( file, MultiPartUploadLinks( upload_id="fake", - chunk_size=parse_obj_as(ByteSize, file.stat().st_size), + chunk_size=TypeAdapter(ByteSize).validate_python(file.stat().st_size), urls=[presigned_url], ), ) @@ -131,7 +135,7 @@ async def with_uploaded_file_on_s3( s3_client: S3Client, with_s3_bucket: S3BucketName, ) -> AsyncIterator[UploadedFile]: - test_file = create_file_of_size(parse_obj_as(ByteSize, "10Kib")) + test_file = create_file_of_size(TypeAdapter(ByteSize).validate_python("10Kib")) await s3_client.upload_file( Filename=f"{test_file}", Bucket=with_s3_bucket, @@ -200,7 +204,7 @@ async def _uploader( object_key=object_key, file_size=ByteSize(file.stat().st_size), expiration_secs=default_expiration_time_seconds, - sha256_checksum=parse_obj_as(SHA256Str, faker.sha256()), + sha256_checksum=TypeAdapter(SHA256Str).validate_python(faker.sha256()), ) assert upload_links @@ -586,7 +590,7 @@ async def test_undelete_file( assert file_metadata.size == with_uploaded_file_on_s3.local_path.stat().st_size # upload another file on top of the existing one - new_file = create_file_of_size(parse_obj_as(ByteSize, "5Kib")) + new_file = create_file_of_size(TypeAdapter(ByteSize).validate_python("5Kib")) await s3_client.upload_file( Filename=f"{new_file}", Bucket=with_s3_bucket, @@ -688,7 +692,7 @@ async def test_create_single_presigned_download_link( object_key=with_uploaded_file_on_s3.s3_key, expiration_secs=default_expiration_time_seconds, ) - assert isinstance(download_url, AnyUrl) + assert download_url dest_file = tmp_path / faker.file_name() async with ClientSession() as session: @@ -738,10 +742,10 @@ async def test_create_single_presigned_upload_link( create_file_of_size: Callable[[ByteSize], Path], default_expiration_time_seconds: int, upload_to_presigned_link: Callable[ - [Path, AnyUrl, S3BucketName, S3ObjectKey], Awaitable[None] + [Path, str, S3BucketName, S3ObjectKey], Awaitable[None] ], ): - file = create_file_of_size(parse_obj_as(ByteSize, "1Mib")) + file = create_file_of_size(TypeAdapter(ByteSize).validate_python("1Mib")) s3_object_key = file.name presigned_url = await simcore_s3_api.create_single_presigned_upload_link( bucket=with_s3_bucket, @@ -769,7 +773,7 @@ async def test_create_single_presigned_upload_link_with_non_existing_bucket_rais create_file_of_size: Callable[[ByteSize], Path], default_expiration_time_seconds: int, ): - file = create_file_of_size(parse_obj_as(ByteSize, "1Mib")) + file = create_file_of_size(TypeAdapter(ByteSize).validate_python("1Mib")) s3_object_key = file.name with pytest.raises(S3BucketInvalidError): await simcore_s3_api.create_single_presigned_upload_link( @@ -863,7 +867,7 @@ async def test_create_multipart_presigned_upload_link_invalid_raises( object_key=faker.pystr(), file_size=ByteSize(file.stat().st_size), expiration_secs=default_expiration_time_seconds, - sha256_checksum=parse_obj_as(SHA256Str, faker.sha256()), + sha256_checksum=TypeAdapter(SHA256Str).validate_python(faker.sha256()), ) # completing with invalid bucket @@ -1076,7 +1080,7 @@ async def test_copy_file_invalid_raises( create_file_of_size: Callable[[ByteSize], Path], faker: Faker, ): - file = create_file_of_size(parse_obj_as(ByteSize, "1MiB")) + file = create_file_of_size(TypeAdapter(ByteSize).validate_python("1MiB")) uploaded_file = await upload_file(file) dst_object_key = faker.file_name() # NOTE: since aioboto3 13.1.0 this raises S3KeyNotFoundError instead of S3BucketInvalidError @@ -1101,9 +1105,9 @@ async def test_copy_file_invalid_raises( "directory_size, min_file_size, max_file_size", [ ( - parse_obj_as(ByteSize, "1Mib"), - parse_obj_as(ByteSize, "1B"), - parse_obj_as(ByteSize, "10Kib"), + TypeAdapter(ByteSize).validate_python("1Mib"), + TypeAdapter(ByteSize).validate_python("1B"), + TypeAdapter(ByteSize).validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1127,9 +1131,9 @@ async def test_get_directory_metadata( "directory_size, min_file_size, max_file_size", [ ( - parse_obj_as(ByteSize, "1Mib"), - parse_obj_as(ByteSize, "1B"), - parse_obj_as(ByteSize, "10Kib"), + TypeAdapter(ByteSize).validate_python("1Mib"), + TypeAdapter(ByteSize).validate_python("1B"), + TypeAdapter(ByteSize).validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1159,9 +1163,9 @@ async def test_get_directory_metadata_raises( "directory_size, min_file_size, max_file_size", [ ( - parse_obj_as(ByteSize, "1Mib"), - parse_obj_as(ByteSize, "1B"), - parse_obj_as(ByteSize, "10Kib"), + TypeAdapter(ByteSize).validate_python("1Mib"), + TypeAdapter(ByteSize).validate_python("1B"), + TypeAdapter(ByteSize).validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1195,9 +1199,9 @@ async def test_delete_file_recursively( "directory_size, min_file_size, max_file_size", [ ( - parse_obj_as(ByteSize, "1Mib"), - parse_obj_as(ByteSize, "1B"), - parse_obj_as(ByteSize, "10Kib"), + TypeAdapter(ByteSize).validate_python("1Mib"), + TypeAdapter(ByteSize).validate_python("1B"), + TypeAdapter(ByteSize).validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1233,9 +1237,9 @@ async def test_delete_file_recursively_raises( "directory_size, min_file_size, max_file_size", [ ( - parse_obj_as(ByteSize, "1Mib"), - parse_obj_as(ByteSize, "1B"), - parse_obj_as(ByteSize, "10Kib"), + TypeAdapter(ByteSize).validate_python("1Mib"), + TypeAdapter(ByteSize).validate_python("1B"), + TypeAdapter(ByteSize).validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1298,7 +1302,7 @@ def test_compute_s3_url( bucket: S3BucketName, object_key: S3ObjectKey, expected_s3_url: AnyUrl ): assert ( - SimcoreS3API.compute_s3_url(bucket=bucket, object_key=object_key) + str(SimcoreS3API.compute_s3_url(bucket=bucket, object_key=object_key)) == expected_s3_url ) @@ -1333,14 +1337,14 @@ def run_async_test(*args, **kwargs) -> None: "directory_size, min_file_size, max_file_size", [ ( - parse_obj_as(ByteSize, "1Mib"), - parse_obj_as(ByteSize, "1B"), - parse_obj_as(ByteSize, "10Kib"), + TypeAdapter(ByteSize).validate_python("1Mib"), + TypeAdapter(ByteSize).validate_python("1B"), + TypeAdapter(ByteSize).validate_python("10Kib"), ), ( - parse_obj_as(ByteSize, "500Mib"), - parse_obj_as(ByteSize, "10Mib"), - parse_obj_as(ByteSize, "50Mib"), + TypeAdapter(ByteSize).validate_python("500Mib"), + TypeAdapter(ByteSize).validate_python("10Mib"), + TypeAdapter(ByteSize).validate_python("50Mib"), ), ], ids=byte_size_ids, diff --git a/packages/aws-library/tests/test_s3_utils.py b/packages/aws-library/tests/test_s3_utils.py index 5354da8bc660..cfba16349433 100644 --- a/packages/aws-library/tests/test_s3_utils.py +++ b/packages/aws-library/tests/test_s3_utils.py @@ -10,23 +10,63 @@ _MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE, compute_num_file_chunks, ) -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from pytest_simcore.helpers.parametrizations import byte_size_ids @pytest.mark.parametrize( "file_size, expected_num_chunks, expected_chunk_size", [ - (parse_obj_as(ByteSize, "5Mib"), 1, parse_obj_as(ByteSize, "10Mib")), - (parse_obj_as(ByteSize, "10Mib"), 1, parse_obj_as(ByteSize, "10Mib")), - (parse_obj_as(ByteSize, "20Mib"), 2, parse_obj_as(ByteSize, "10Mib")), - (parse_obj_as(ByteSize, "50Mib"), 5, parse_obj_as(ByteSize, "10Mib")), - (parse_obj_as(ByteSize, "150Mib"), 15, parse_obj_as(ByteSize, "10Mib")), - (parse_obj_as(ByteSize, "550Mib"), 55, parse_obj_as(ByteSize, "10Mib")), - (parse_obj_as(ByteSize, "560Gib"), 5735, parse_obj_as(ByteSize, "100Mib")), - (parse_obj_as(ByteSize, "5Tib"), 8739, parse_obj_as(ByteSize, "600Mib")), - (parse_obj_as(ByteSize, "15Tib"), 7680, parse_obj_as(ByteSize, "2Gib")), - (parse_obj_as(ByteSize, "9431773844"), 900, parse_obj_as(ByteSize, "10Mib")), + ( + TypeAdapter(ByteSize).validate_python("5Mib"), + 1, + TypeAdapter(ByteSize).validate_python("10Mib"), + ), + ( + TypeAdapter(ByteSize).validate_python("10Mib"), + 1, + TypeAdapter(ByteSize).validate_python("10Mib"), + ), + ( + TypeAdapter(ByteSize).validate_python("20Mib"), + 2, + TypeAdapter(ByteSize).validate_python("10Mib"), + ), + ( + TypeAdapter(ByteSize).validate_python("50Mib"), + 5, + TypeAdapter(ByteSize).validate_python("10Mib"), + ), + ( + TypeAdapter(ByteSize).validate_python("150Mib"), + 15, + TypeAdapter(ByteSize).validate_python("10Mib"), + ), + ( + TypeAdapter(ByteSize).validate_python("550Mib"), + 55, + TypeAdapter(ByteSize).validate_python("10Mib"), + ), + ( + TypeAdapter(ByteSize).validate_python("560Gib"), + 5735, + TypeAdapter(ByteSize).validate_python("100Mib"), + ), + ( + TypeAdapter(ByteSize).validate_python("5Tib"), + 8739, + TypeAdapter(ByteSize).validate_python("600Mib"), + ), + ( + TypeAdapter(ByteSize).validate_python("15Tib"), + 7680, + TypeAdapter(ByteSize).validate_python("2Gib"), + ), + ( + TypeAdapter(ByteSize).validate_python("9431773844"), + 900, + TypeAdapter(ByteSize).validate_python("10Mib"), + ), ], ids=byte_size_ids, ) @@ -39,8 +79,7 @@ def test_compute_num_file_chunks( def test_enormous_file_size_raises_value_error(): - enormous_file_size = parse_obj_as( - ByteSize, + enormous_file_size = TypeAdapter(ByteSize).validate_python( ( max(_MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE) * _MULTIPART_MAX_NUMBER_OF_PARTS diff --git a/packages/common-library/.gitignore b/packages/common-library/.gitignore new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/common-library/Makefile b/packages/common-library/Makefile new file mode 100644 index 000000000000..b554ec6f9c08 --- /dev/null +++ b/packages/common-library/Makefile @@ -0,0 +1,49 @@ +# +# Targets for DEVELOPMENT of common Library +# +include ../../scripts/common.Makefile +include ../../scripts/common-package.Makefile + +.PHONY: requirements +requirements: ## compiles pip requirements (.in -> .txt) + @$(MAKE_C) requirements reqs + + +.PHONY: install-dev install-prod install-ci +install-dev install-prod install-ci: _check_venv_active ## install app in development/production or CI mode + # installing in $(subst install-,,$@) mode + @uv pip sync requirements/$(subst install-,,$@).txt + + +.PHONY: tests tests-ci +tests: ## runs unit tests + # running unit tests + @pytest \ + --asyncio-mode=auto \ + --color=yes \ + --cov-config=../../.coveragerc \ + --cov-report=term-missing \ + --cov=common_library \ + --durations=10 \ + --exitfirst \ + --failed-first \ + --pdb \ + -vv \ + $(CURDIR)/tests + +tests-ci: ## runs unit tests [ci-mode] + # running unit tests + @pytest \ + --asyncio-mode=auto \ + --color=yes \ + --cov-append \ + --cov-config=../../.coveragerc \ + --cov-report=term-missing \ + --cov-report=xml \ + --cov=common_library \ + --durations=10 \ + --log-date-format="%Y-%m-%d %H:%M:%S" \ + --log-format="%(asctime)s %(levelname)s %(message)s" \ + --verbose \ + -m "not heavy_load" \ + $(CURDIR)/tests diff --git a/packages/common-library/README.md b/packages/common-library/README.md new file mode 100644 index 000000000000..8e5c489787b0 --- /dev/null +++ b/packages/common-library/README.md @@ -0,0 +1,42 @@ +# simcore pydantic common library + +Contains the common classes, functions and in general utilities for use in the simcore platform. + +## Installation + +```console +make help +make install-dev +``` + +## Test + +```console +make help +make test-dev +``` + + +## Diagnostics + +How run diagnostics on the service metadata published in a docker registry? + +1. Setup environment +```bash +make devenv +source .venv/bin/activate + +cd packages/common-library +make install-dev +``` +2. Set ``REGISTRY_*`` env vars in ``.env`` (in the repository base folder) +3. Download test data, run diagnostics, archive tests-data, and cleanup +```bash +export DEPLOY_NAME=my-deploy + +make pull_test_data >$DEPLOY_NAME-registry-diagnostics.log 2>&1 +pytest -vv -m diagnostics >>$DEPLOY_NAME-registry-diagnostics.log 2>&1 +zip -r $DEPLOY_NAME-registry-test-data.zip tests/data/.downloaded-ignore +rm -r tests/data/.downloaded-ignore +``` +4. Move all ``$DEPLOY_NAME-*`` files to an archive diff --git a/packages/common-library/VERSION b/packages/common-library/VERSION new file mode 100644 index 000000000000..6e8bf73aa550 --- /dev/null +++ b/packages/common-library/VERSION @@ -0,0 +1 @@ +0.1.0 diff --git a/packages/common-library/requirements/Makefile b/packages/common-library/requirements/Makefile new file mode 100644 index 000000000000..3f25442b790e --- /dev/null +++ b/packages/common-library/requirements/Makefile @@ -0,0 +1,6 @@ +# +# Targets to pip-compile requirements +# +include ../../../requirements/base.Makefile + +# Add here any extra explicit dependency: e.g. _migration.txt: _base.txt diff --git a/packages/common-library/requirements/_base.in b/packages/common-library/requirements/_base.in new file mode 100644 index 000000000000..73cc02bceb78 --- /dev/null +++ b/packages/common-library/requirements/_base.in @@ -0,0 +1,6 @@ +# +# Specifies third-party dependencies for 'common-library' +# +--constraint ../../../requirements/constraints.txt + +pydantic diff --git a/packages/common-library/requirements/_base.txt b/packages/common-library/requirements/_base.txt new file mode 100644 index 000000000000..32a8575abeaa --- /dev/null +++ b/packages/common-library/requirements/_base.txt @@ -0,0 +1,12 @@ +annotated-types==0.7.0 + # via pydantic +pydantic==2.9.2 + # via + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_base.in +pydantic-core==2.23.4 + # via pydantic +typing-extensions==4.12.2 + # via + # pydantic + # pydantic-core diff --git a/packages/common-library/requirements/_test.in b/packages/common-library/requirements/_test.in new file mode 100644 index 000000000000..1fe37ac01517 --- /dev/null +++ b/packages/common-library/requirements/_test.in @@ -0,0 +1,22 @@ +# +# Specifies dependencies required to run 'common-library' +# +--constraint ../../../requirements/constraints.txt + +# Adds base AS CONSTRAINT specs, not requirement. +# - Resulting _text.txt is a frozen list of EXTRA packages for testing, besides _base.txt +# +--constraint _base.txt + +coverage +faker +pydantic-settings +pytest +pytest-asyncio +pytest-cov +pytest-icdiff +pytest-instafail +pytest-mock +pytest-runner +pytest-sugar +python-dotenv diff --git a/packages/common-library/requirements/_test.txt b/packages/common-library/requirements/_test.txt new file mode 100644 index 000000000000..89b9a19eca60 --- /dev/null +++ b/packages/common-library/requirements/_test.txt @@ -0,0 +1,74 @@ +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic +coverage==7.6.1 + # via + # -r requirements/_test.in + # pytest-cov +faker==30.3.0 + # via -r requirements/_test.in +icdiff==2.0.7 + # via pytest-icdiff +iniconfig==2.0.0 + # via pytest +packaging==24.1 + # via + # pytest + # pytest-sugar +pluggy==1.5.0 + # via pytest +pprintpp==0.4.0 + # via pytest-icdiff +pydantic==2.9.2 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # pydantic-settings +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic +pydantic-settings==2.5.2 + # via -r requirements/_test.in +pytest==8.3.3 + # via + # -r requirements/_test.in + # pytest-asyncio + # pytest-cov + # pytest-icdiff + # pytest-instafail + # pytest-mock + # pytest-sugar +pytest-asyncio==0.23.8 + # via + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_test.in +pytest-cov==5.0.0 + # via -r requirements/_test.in +pytest-icdiff==0.9 + # via -r requirements/_test.in +pytest-instafail==0.5.0 + # via -r requirements/_test.in +pytest-mock==3.14.0 + # via -r requirements/_test.in +pytest-runner==6.0.1 + # via -r requirements/_test.in +pytest-sugar==1.0.0 + # via -r requirements/_test.in +python-dateutil==2.9.0.post0 + # via faker +python-dotenv==1.0.1 + # via + # -r requirements/_test.in + # pydantic-settings +six==1.16.0 + # via python-dateutil +termcolor==2.5.0 + # via pytest-sugar +typing-extensions==4.12.2 + # via + # -c requirements/_base.txt + # faker + # pydantic + # pydantic-core diff --git a/packages/common-library/requirements/_tools.in b/packages/common-library/requirements/_tools.in new file mode 100644 index 000000000000..1def82c12a30 --- /dev/null +++ b/packages/common-library/requirements/_tools.in @@ -0,0 +1,5 @@ +--constraint ../../../requirements/constraints.txt +--constraint _base.txt +--constraint _test.txt + +--requirement ../../../requirements/devenv.txt diff --git a/packages/common-library/requirements/_tools.txt b/packages/common-library/requirements/_tools.txt new file mode 100644 index 000000000000..b5f85d4efccc --- /dev/null +++ b/packages/common-library/requirements/_tools.txt @@ -0,0 +1,79 @@ +astroid==3.3.5 + # via pylint +black==24.10.0 + # via -r requirements/../../../requirements/devenv.txt +build==1.2.2.post1 + # via pip-tools +bump2version==1.0.1 + # via -r requirements/../../../requirements/devenv.txt +cfgv==3.4.0 + # via pre-commit +click==8.1.7 + # via + # black + # pip-tools +dill==0.3.9 + # via pylint +distlib==0.3.8 + # via virtualenv +filelock==3.16.1 + # via virtualenv +identify==2.6.1 + # via pre-commit +isort==5.13.2 + # via + # -r requirements/../../../requirements/devenv.txt + # pylint +mccabe==0.7.0 + # via pylint +mypy==1.11.2 + # via -r requirements/../../../requirements/devenv.txt +mypy-extensions==1.0.0 + # via + # black + # mypy +nodeenv==1.9.1 + # via pre-commit +packaging==24.1 + # via + # -c requirements/_test.txt + # black + # build +pathspec==0.12.1 + # via black +pip==24.2 + # via pip-tools +pip-tools==7.4.1 + # via -r requirements/../../../requirements/devenv.txt +platformdirs==4.3.6 + # via + # black + # pylint + # virtualenv +pre-commit==4.0.0 + # via -r requirements/../../../requirements/devenv.txt +pylint==3.3.1 + # via -r requirements/../../../requirements/devenv.txt +pyproject-hooks==1.2.0 + # via + # build + # pip-tools +pyyaml==6.0.2 + # via + # -c requirements/../../../requirements/constraints.txt + # pre-commit +ruff==0.6.9 + # via -r requirements/../../../requirements/devenv.txt +setuptools==75.1.0 + # via pip-tools +tomlkit==0.13.2 + # via pylint +typing-extensions==4.12.2 + # via + # -c requirements/_base.txt + # -c requirements/_test.txt + # mypy +virtualenv==20.26.6 + # via pre-commit +wheel==0.44.0 + # via pip-tools diff --git a/packages/common-library/requirements/ci.txt b/packages/common-library/requirements/ci.txt new file mode 100644 index 000000000000..ed9eb3028e89 --- /dev/null +++ b/packages/common-library/requirements/ci.txt @@ -0,0 +1,17 @@ +# Shortcut to install all packages for the contigous integration (CI) of 'common-library' +# +# - As ci.txt but w/ tests +# +# Usage: +# pip install -r requirements/ci.txt +# + +# installs base + tests requirements +--requirement _base.txt +--requirement _test.txt + +# installs this repo's packages +pytest-simcore @ ../pytest-simcore + +# current module +simcore-common-library @ . diff --git a/packages/common-library/requirements/dev.txt b/packages/common-library/requirements/dev.txt new file mode 100644 index 000000000000..02718f95c3ad --- /dev/null +++ b/packages/common-library/requirements/dev.txt @@ -0,0 +1,18 @@ +# Shortcut to install all packages needed to develop 'common-library' +# +# - As ci.txt but with current and repo packages in develop (edit) mode +# +# Usage: +# pip install -r requirements/dev.txt +# + +# installs base + tests requirements +--requirement _base.txt +--requirement _test.txt +--requirement _tools.txt + +# installs this repo's packages +--editable ../pytest-simcore/ + +# current module +--editable . diff --git a/packages/common-library/setup.cfg b/packages/common-library/setup.cfg new file mode 100644 index 000000000000..b33be52008af --- /dev/null +++ b/packages/common-library/setup.cfg @@ -0,0 +1,24 @@ +[bumpversion] +current_version = 0.2.0 +commit = True +message = packages/common-library version: {current_version} → {new_version} +tag = False +commit_args = --no-verify + +[bumpversion:file:VERSION] + +[bdist_wheel] +universal = 1 + +[aliases] +test = pytest + +[tool:pytest] +asyncio_mode = auto +markers = + diagnostics: "can be used to run diagnostics against deployed data (e.g. database, registry etc)" + testit: "marks test to run during development" + +[mypy] +plugins = + pydantic.mypy diff --git a/packages/common-library/setup.py b/packages/common-library/setup.py new file mode 100644 index 000000000000..4e381f5bbc2d --- /dev/null +++ b/packages/common-library/setup.py @@ -0,0 +1,60 @@ +import re +import sys +from pathlib import Path + +from setuptools import find_packages, setup + + +def read_reqs(reqs_path: Path) -> set[str]: + return { + r + for r in re.findall( + r"(^[^#\n-][\w\[,\]]+[-~>=<.\w]*)", + reqs_path.read_text(), + re.MULTILINE, + ) + if isinstance(r, str) + } + + +CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + + +INSTALL_REQUIREMENTS = tuple( + read_reqs(CURRENT_DIR / "requirements" / "_base.in") +) # WEAK requirements + +TEST_REQUIREMENTS = tuple( + read_reqs(CURRENT_DIR / "requirements" / "_test.txt") +) # STRICK requirements + + +SETUP = { + "name": "simcore-common-library", + "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), + "author": "Giancarlo Romeo (giancarloromeo)", + "description": "Core service library for simcore pydantic common", + "python_requires": "~=3.11", + "classifiers": [ + "Development Status :: 2 - Pre-Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python :: 3.11", + ], + "long_description": Path(CURRENT_DIR / "README.md").read_text(), + "license": "MIT license", + "install_requires": INSTALL_REQUIREMENTS, + "packages": find_packages(where="src"), + "package_data": {"": ["py.typed"]}, + "package_dir": {"": "src"}, + "include_package_data": True, + "test_suite": "tests", + "tests_require": TEST_REQUIREMENTS, + "extras_require": {"test": TEST_REQUIREMENTS}, + "zip_safe": False, +} + + +if __name__ == "__main__": + setup(**SETUP) diff --git a/packages/common-library/src/common_library/__init__.py b/packages/common-library/src/common_library/__init__.py new file mode 100644 index 000000000000..dc0c65ff7214 --- /dev/null +++ b/packages/common-library/src/common_library/__init__.py @@ -0,0 +1,12 @@ +""" osparc's service common library + +""" + +# +# NOTE: +# - "examples" = [ ...] keyword and NOT "example". See https://json-schema.org/understanding-json-schema/reference/generic.html#annotations +# + +from importlib.metadata import version + +__version__: str = version("simcore-common-library") diff --git a/packages/common-library/src/common_library/error_codes.py b/packages/common-library/src/common_library/error_codes.py new file mode 100644 index 000000000000..13b3b1566daa --- /dev/null +++ b/packages/common-library/src/common_library/error_codes.py @@ -0,0 +1,31 @@ +""" osparc ERROR CODES (OEC) + Unique identifier of an exception instance + Intended to report a user about unexpected errors. + Unexpected exceptions can be traced by matching the + logged error code with that appeneded to the user-friendly message + +SEE test_error_codes for some use cases +""" + +import re +from typing import TYPE_CHECKING, Annotated + +from pydantic import StringConstraints, TypeAdapter + +_LABEL = "OEC:{}" +_PATTERN = r"OEC:\d+" + +if TYPE_CHECKING: + ErrorCodeStr = str +else: + ErrorCodeStr = Annotated[ + str, StringConstraints(strip_whitespace=True, pattern=_PATTERN) + ] + + +def create_error_code(exception: BaseException) -> ErrorCodeStr: + return TypeAdapter(ErrorCodeStr).validate_python(_LABEL.format(id(exception))) + + +def parse_error_code(obj) -> set[ErrorCodeStr]: + return set(re.findall(_PATTERN, f"{obj}")) diff --git a/packages/models-library/src/models_library/errors_classes.py b/packages/common-library/src/common_library/errors_classes.py similarity index 74% rename from packages/models-library/src/models_library/errors_classes.py rename to packages/common-library/src/common_library/errors_classes.py index 921db49df3c0..83e40b2a2b0e 100644 --- a/packages/models-library/src/models_library/errors_classes.py +++ b/packages/common-library/src/common_library/errors_classes.py @@ -1,3 +1,5 @@ +from typing import Any + from pydantic.errors import PydanticErrorMixin from .error_codes import create_error_code @@ -9,12 +11,22 @@ def __missing__(self, key): class OsparcErrorMixin(PydanticErrorMixin): - def __new__(cls, *args, **kwargs): + code: str # type: ignore[assignment] + msg_template: str + + def __new__(cls, *_args, **_kwargs): if not hasattr(cls, "code"): cls.code = cls._get_full_class_name() - return super().__new__(cls, *args, **kwargs) + return super().__new__(cls) + + def __init__(self, **ctx: Any) -> None: + self.__dict__ = ctx + super().__init__(message=self._build_message(), code=self.code) # type: ignore[arg-type] def __str__(self) -> str: + return self._build_message() + + def _build_message(self) -> str: # NOTE: safe. Does not raise KeyError return self.msg_template.format_map(_DefaultDict(**self.__dict__)) diff --git a/packages/common-library/src/common_library/py.typed b/packages/common-library/src/common_library/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/common-library/src/common_library/pydantic_basic_types.py b/packages/common-library/src/common_library/pydantic_basic_types.py new file mode 100644 index 000000000000..452c118dae95 --- /dev/null +++ b/packages/common-library/src/common_library/pydantic_basic_types.py @@ -0,0 +1,79 @@ +from re import Pattern +from typing import Annotated, Final, TypeAlias + +from pydantic import Field +from pydantic_core import core_schema + +# https://en.wikipedia.org/wiki/List_of_TCP_and_UDP_port_numbers#Registered_ports +RegisteredPortInt: TypeAlias = Annotated[int, Field(gt=1024, lt=65535)] + +# non-empty bounded string used as identifier +# e.g. "123" or "name_123" or "fa327c73-52d8-462a-9267-84eeaf0f90e3" but NOT "" +_ELLIPSIS_CHAR: Final[str] = "..." + + +class ConstrainedStr(str): # noqa: SLOT000 + pattern: str | Pattern[str] | None = None + min_length: int | None = None + max_length: int | None = None + strip_whitespace: bool = False + curtail_length: int | None = None + + @classmethod + def _validate(cls, __input_value: str) -> str: + if cls.curtail_length and len(__input_value) > cls.curtail_length: + __input_value = __input_value[: cls.curtail_length] + return cls(__input_value) + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + return core_schema.no_info_after_validator_function( + cls._validate, + core_schema.str_schema( + pattern=cls.pattern, + min_length=cls.min_length, + max_length=cls.max_length, + strip_whitespace=cls.strip_whitespace, + ), + ) + + +class IDStr(ConstrainedStr): + strip_whitespace = True + min_length = 1 + max_length = 100 + + @staticmethod + def concatenate(*args: "IDStr", link_char: str = " ") -> "IDStr": + result = link_char.join(args).strip() + assert IDStr.min_length # nosec + assert IDStr.max_length # nosec + if len(result) > IDStr.max_length: + if IDStr.max_length > len(_ELLIPSIS_CHAR): + result = ( + result[: IDStr.max_length - len(_ELLIPSIS_CHAR)].rstrip() + + _ELLIPSIS_CHAR + ) + else: + result = _ELLIPSIS_CHAR[0] * IDStr.max_length + if len(result) < IDStr.min_length: + msg = f"IDStr.concatenate: result is too short: {result}" + raise ValueError(msg) + return IDStr(result) + + +class ShortTruncatedStr(ConstrainedStr): + # NOTE: Use to input e.g. titles or display names + # A truncated string: + # - Strips whitespaces and truncate strings that exceed the specified characters limit (curtail_length). + # - Ensures that the **input** data length to the API is controlled and prevents exceeding large inputs silently, i.e. without raising errors. + # SEE https://github.com/ITISFoundation/osparc-simcore/pull/5989#discussion_r1650506583 + strip_whitespace = True + curtail_length = 600 + + +class LongTruncatedStr(ConstrainedStr): + # NOTE: Use to input e.g. descriptions or summaries + # Analogous to ShortTruncatedStr + strip_whitespace = True + curtail_length = 65536 # same as github descripton diff --git a/packages/common-library/src/common_library/pydantic_fields_extension.py b/packages/common-library/src/common_library/pydantic_fields_extension.py new file mode 100644 index 000000000000..f2f6d59a5f45 --- /dev/null +++ b/packages/common-library/src/common_library/pydantic_fields_extension.py @@ -0,0 +1,22 @@ +from types import UnionType +from typing import Any, Literal, get_args, get_origin + +from pydantic.fields import FieldInfo + + +def get_type(info: FieldInfo) -> Any: + field_type = info.annotation + if args := get_args(info.annotation): + field_type = next(a for a in args if a != type(None)) + return field_type + + +def is_literal(info: FieldInfo) -> bool: + return get_origin(info.annotation) is Literal + + +def is_nullable(info: FieldInfo) -> bool: + origin = get_origin(info.annotation) # X | None or Optional[X] will return Union + if origin is UnionType: + return any(x in get_args(info.annotation) for x in (type(None), Any)) + return False diff --git a/packages/common-library/src/common_library/pydantic_networks_extension.py b/packages/common-library/src/common_library/pydantic_networks_extension.py new file mode 100644 index 000000000000..79c5da906b1a --- /dev/null +++ b/packages/common-library/src/common_library/pydantic_networks_extension.py @@ -0,0 +1,25 @@ +from typing import Annotated, TypeAlias + +from pydantic import AfterValidator, AnyHttpUrl, AnyUrl, HttpUrl +from pydantic_core import Url + + +def _strip_last_slash(url: Url) -> str: + return f"{url}".rstrip("/") + + +AnyUrlLegacy: TypeAlias = Annotated[ + AnyUrl, + AfterValidator(_strip_last_slash), +] + +AnyHttpUrlLegacy: TypeAlias = Annotated[ + AnyHttpUrl, + AfterValidator(_strip_last_slash), +] + + +HttpUrlLegacy: TypeAlias = Annotated[ + HttpUrl, + AfterValidator(_strip_last_slash), +] diff --git a/packages/common-library/src/common_library/pydantic_validators.py b/packages/common-library/src/common_library/pydantic_validators.py new file mode 100644 index 000000000000..60f6219fb13d --- /dev/null +++ b/packages/common-library/src/common_library/pydantic_validators.py @@ -0,0 +1,35 @@ +import datetime +import warnings +from datetime import timedelta + +from pydantic import TypeAdapter, field_validator + + +def validate_numeric_string_as_timedelta(field: str): + """Transforms a float/int number into a valid datetime as it used to work in the past""" + + def _numeric_string_as_timedelta( + v: datetime.timedelta | str | float, + ) -> datetime.timedelta | str | float: + if isinstance(v, str): + try: + converted_value = float(v) + + iso8601_format = TypeAdapter(timedelta).dump_python( + timedelta(seconds=converted_value), mode="json" + ) + warnings.warn( + f"{field}='{v}' -should be set to-> {field}='{iso8601_format}' (ISO8601 datetime format). " + "Please also convert the value in the >>OPS REPOSITORY<<. " + "For details: https://docs.pydantic.dev/1.10/usage/types/#datetime-types.", + DeprecationWarning, + stacklevel=8, + ) + + return converted_value + except ValueError: + # returns format like "1:00:00" + return v + return v + + return field_validator(field, mode="before")(_numeric_string_as_timedelta) diff --git a/packages/common-library/src/common_library/serialization.py b/packages/common-library/src/common_library/serialization.py new file mode 100644 index 000000000000..964dfc01ef83 --- /dev/null +++ b/packages/common-library/src/common_library/serialization.py @@ -0,0 +1,42 @@ +from datetime import timedelta +from typing import Any + +from pydantic import BaseModel, SecretStr +from pydantic_core import Url + +from .pydantic_fields_extension import get_type + + +def model_dump_with_secrets( + settings_obj: BaseModel, *, show_secrets: bool, **pydantic_export_options +) -> dict[str, Any]: + data = settings_obj.model_dump(**pydantic_export_options) + + for field_name in settings_obj.model_fields: + if field_name not in data: + continue + + field_data = data[field_name] + + if isinstance(field_data, timedelta): + data[field_name] = field_data.total_seconds() + + elif isinstance(field_data, SecretStr): + if show_secrets: + data[field_name] = field_data.get_secret_value() + else: + data[field_name] = str(field_data) + + elif isinstance(field_data, Url): + data[field_name] = str(field_data) + + elif isinstance(field_data, dict): + field_type = get_type(settings_obj.model_fields[field_name]) + if issubclass(field_type, BaseModel): + data[field_name] = model_dump_with_secrets( + field_type.model_validate(field_data), + show_secrets=show_secrets, + **pydantic_export_options, + ) + + return data diff --git a/packages/common-library/tests/conftest.py b/packages/common-library/tests/conftest.py new file mode 100644 index 000000000000..46f09f86b465 --- /dev/null +++ b/packages/common-library/tests/conftest.py @@ -0,0 +1,33 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-import + +import sys +from pathlib import Path + +import common_library +import pytest + +pytest_plugins = [ + "pytest_simcore.pydantic_models", + "pytest_simcore.pytest_global_environs", + "pytest_simcore.repository_paths", + "pytest_simcore.schemas", +] + +CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + + +@pytest.fixture(scope="session") +def package_dir(): + pdir = Path(common_library.__file__).resolve().parent + assert pdir.exists() + return pdir + + +@pytest.fixture(scope="session") +def project_slug_dir() -> Path: + folder = CURRENT_DIR.parent + assert folder.exists() + assert any(folder.glob("src/common_library")) + return folder diff --git a/packages/models-library/tests/test_errors_classes.py b/packages/common-library/tests/test_errors_classes.py similarity index 85% rename from packages/models-library/tests/test_errors_classes.py rename to packages/common-library/tests/test_errors_classes.py index ab45d6e48d86..3be2532f1ab5 100644 --- a/packages/models-library/tests/test_errors_classes.py +++ b/packages/common-library/tests/test_errors_classes.py @@ -9,8 +9,7 @@ from typing import Any import pytest -from models_library.errors_classes import OsparcErrorMixin -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin def test_get_full_class_name(): @@ -39,8 +38,7 @@ class B12(B1, ValueError): def test_error_codes_and_msg_template(): class MyBaseError(OsparcErrorMixin, Exception): - def __init__(self, **ctx: Any) -> None: - super().__init__(**ctx) # Do not forget this for base exceptions! + pass class MyValueError(MyBaseError, ValueError): msg_template = "Wrong value {value}" @@ -138,16 +136,10 @@ class MyError(OsparcErrorMixin, ValueError): def test_missing_keys_in_msg_template_does_not_raise(): - class MyErrorBefore(PydanticErrorMixin, ValueError): - msg_template = "{value} and {missing}" - - with pytest.raises(KeyError, match="missing"): - str(MyErrorBefore(value=42)) - - class MyErrorAfter(OsparcErrorMixin, ValueError): + class MyError(OsparcErrorMixin, ValueError): msg_template = "{value} and {missing}" - assert str(MyErrorAfter(value=42)) == "42 and 'missing=?'" + assert str(MyError(value=42)) == "42 and 'missing=?'" def test_exception_context(): @@ -155,7 +147,17 @@ class MyError(OsparcErrorMixin, ValueError): msg_template = "{value} and {missing}" exc = MyError(value=42, missing="foo", extra="bar") - assert exc.error_context() == {"value": 42, "missing": "foo", "extra": "bar"} + assert exc.error_context() == { + "code": "ValueError.MyError", + "message": "42 and foo", + "value": 42, + "missing": "foo", + "extra": "bar", + } exc = MyError(value=42) - assert exc.error_context() == {"value": 42} + assert exc.error_context() == { + "code": "ValueError.MyError", + "message": "42 and 'missing=?'", + "value": 42, + } diff --git a/packages/common-library/tests/test_pydantic_fields_extension.py b/packages/common-library/tests/test_pydantic_fields_extension.py new file mode 100644 index 000000000000..9f5aa1ae2fc6 --- /dev/null +++ b/packages/common-library/tests/test_pydantic_fields_extension.py @@ -0,0 +1,72 @@ +from typing import Any, Callable, Literal + +import pytest +from common_library.pydantic_fields_extension import get_type, is_literal, is_nullable +from pydantic import BaseModel, Field + + +class MyModel(BaseModel): + a: int + b: float | None = Field(...) + c: str = "bla" + d: bool | None = None + e: Literal["bla"] + + +@pytest.mark.parametrize( + "fn,expected,name", + [ + ( + get_type, + int, + "a", + ), + ( + get_type, + float, + "b", + ), + ( + get_type, + str, + "c", + ), + (get_type, bool, "d"), + ( + is_literal, + False, + "a", + ), + ( + is_literal, + False, + "b", + ), + ( + is_literal, + False, + "c", + ), + (is_literal, False, "d"), + (is_literal, True, "e"), + ( + is_nullable, + False, + "a", + ), + ( + is_nullable, + True, + "b", + ), + ( + is_nullable, + False, + "c", + ), + (is_nullable, True, "d"), + (is_nullable, False, "e"), + ], +) +def test_field_fn(fn: Callable[[Any], Any], expected: Any, name: str): + assert expected == fn(MyModel.model_fields[name]) diff --git a/packages/common-library/tests/test_pydantic_networks_extension.py b/packages/common-library/tests/test_pydantic_networks_extension.py new file mode 100644 index 000000000000..6ab50a42a2b5 --- /dev/null +++ b/packages/common-library/tests/test_pydantic_networks_extension.py @@ -0,0 +1,39 @@ +import pytest +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy +from pydantic import AnyHttpUrl, BaseModel, TypeAdapter, ValidationError +from pydantic_core import Url + + +class A(BaseModel): + url: AnyHttpUrlLegacy + + +def test_any_http_url(): + url = TypeAdapter(AnyHttpUrl).validate_python( + "http://backgroud.testserver.io", + ) + + assert isinstance(url, Url) + assert ( + f"{url}" == "http://backgroud.testserver.io/" + ) # trailing slash added (in Pydantic v2) + + +def test_any_http_url_legacy(): + url = TypeAdapter(AnyHttpUrlLegacy).validate_python( + "http://backgroud.testserver.io", + ) + + assert isinstance(url, str) + assert url == "http://backgroud.testserver.io" # no trailing slash was added + + +def test_valid_any_http_url_legacy_field(): + a = A(url="http://backgroud.testserver.io") # type: ignore + + assert a.url == "http://backgroud.testserver.io" # no trailing slash was added + + +def test_not_valid_any_http_url_legacy_field(): + with pytest.raises(ValidationError): + A(url="htttttp://backgroud.testserver.io") # type: ignore diff --git a/packages/common-library/tests/test_pydantic_validators.py b/packages/common-library/tests/test_pydantic_validators.py new file mode 100644 index 000000000000..da1ccf95adbf --- /dev/null +++ b/packages/common-library/tests/test_pydantic_validators.py @@ -0,0 +1,40 @@ +from datetime import timedelta + +import pytest +from common_library.pydantic_validators import validate_numeric_string_as_timedelta +from faker import Faker +from pydantic import Field +from pydantic_settings import BaseSettings, SettingsConfigDict +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict + + +def test_validate_timedelta_in_legacy_mode( + monkeypatch: pytest.MonkeyPatch, faker: Faker +): + class Settings(BaseSettings): + APP_NAME: str + REQUEST_TIMEOUT: timedelta = Field(default=timedelta(seconds=40)) + + _validate_request_timeout = validate_numeric_string_as_timedelta( + "REQUEST_TIMEOUT" + ) + + model_config = SettingsConfigDict() + + app_name = faker.pystr() + env_vars: dict[str, str | bool] = {"APP_NAME": app_name} + + # without timedelta + setenvs_from_dict(monkeypatch, env_vars) + settings = Settings() + print(settings.model_dump()) + assert app_name == settings.APP_NAME + assert timedelta(seconds=40) == settings.REQUEST_TIMEOUT + + # with timedelta in seconds + env_vars["REQUEST_TIMEOUT"] = "5555" + setenvs_from_dict(monkeypatch, env_vars) + settings = Settings() + print(settings.model_dump()) + assert app_name == settings.APP_NAME + assert timedelta(seconds=5555) == settings.REQUEST_TIMEOUT diff --git a/packages/common-library/tests/test_serialization.py b/packages/common-library/tests/test_serialization.py new file mode 100644 index 000000000000..d897ff5ec5d6 --- /dev/null +++ b/packages/common-library/tests/test_serialization.py @@ -0,0 +1,25 @@ +import pytest +from common_library.serialization import model_dump_with_secrets +from pydantic import BaseModel, SecretStr + + +class Credentials(BaseModel): + USERNAME: str | None = None + PASSWORD: SecretStr | None = None + + +@pytest.mark.parametrize( + "expected,show_secrets", + [ + ( + {"USERNAME": "DeepThought", "PASSWORD": "42"}, + True, + ), + ( + {"USERNAME": "DeepThought", "PASSWORD": "**********"}, + False, # hide secrets + ), + ], +) +def test_model_dump_with_secrets(expected: dict, show_secrets: bool): + assert expected == model_dump_with_secrets(Credentials(USERNAME="DeepThought", PASSWORD=SecretStr("42")), show_secrets=show_secrets) diff --git a/packages/dask-task-models-library/requirements/_base.in b/packages/dask-task-models-library/requirements/_base.in index 3cdef671c4b6..f25da08947b4 100644 --- a/packages/dask-task-models-library/requirements/_base.in +++ b/packages/dask-task-models-library/requirements/_base.in @@ -2,6 +2,7 @@ # Specifies third-party dependencies for 'dask-task-models-library' # --constraint ../../../requirements/constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in diff --git a/packages/dask-task-models-library/requirements/_base.txt b/packages/dask-task-models-library/requirements/_base.txt index ff32942482a6..eb111f70fb14 100644 --- a/packages/dask-task-models-library/requirements/_base.txt +++ b/packages/dask-task-models-library/requirements/_base.txt @@ -1,3 +1,5 @@ +annotated-types==0.7.0 + # via pydantic arrow==1.3.0 # via -r requirements/../../../packages/models-library/requirements/_base.in attrs==24.2.0 @@ -65,7 +67,7 @@ partd==1.4.2 # via dask psutil==6.0.0 # via distributed -pydantic==1.10.18 +pydantic==2.9.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -73,10 +75,22 @@ pydantic==1.10.18 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.3 + # via pydantic +pydantic-extra-types==2.9.0 + # via -r requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich python-dateutil==2.9.0.post0 # via arrow +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -118,6 +132,7 @@ types-python-dateutil==2.9.0.20240906 typing-extensions==4.12.2 # via # pydantic + # pydantic-core # typer urllib3==2.2.3 # via diff --git a/packages/dask-task-models-library/requirements/ci.txt b/packages/dask-task-models-library/requirements/ci.txt index 562a0c1c6422..d7fc2c347fa2 100644 --- a/packages/dask-task-models-library/requirements/ci.txt +++ b/packages/dask-task-models-library/requirements/ci.txt @@ -13,6 +13,7 @@ # installs this repo's packages pytest-simcore @ ../pytest-simcore +simcore-common-library @ ../common-library simcore-models-library @ ../models-library simcore-settings-library @ ../settings-library/ diff --git a/packages/dask-task-models-library/requirements/dev.txt b/packages/dask-task-models-library/requirements/dev.txt index 0edd20961ac9..a9d9555b2e8f 100644 --- a/packages/dask-task-models-library/requirements/dev.txt +++ b/packages/dask-task-models-library/requirements/dev.txt @@ -13,6 +13,7 @@ # installs this repo's packages --editable ../pytest-simcore/ +--editable ../common-library/ --editable ../models-library/ --editable ../settings-library/ diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/docker.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/docker.py index 4e9d36df3fbb..b4fa976b6659 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/docker.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/docker.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel, Extra, SecretStr +from pydantic import BaseModel, ConfigDict, SecretStr class DockerBasicAuth(BaseModel): @@ -6,9 +6,9 @@ class DockerBasicAuth(BaseModel): username: str password: SecretStr - class Config: - extra = Extra.forbid - schema_extra = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ { "server_address": "docker.io", @@ -16,4 +16,5 @@ class Config: "password": "123456", } ] - } + }, + ) diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py index f4060531f7fc..f0a6813ba15f 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py @@ -1,20 +1,20 @@ """ Dask task exceptions """ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class TaskValueError(PydanticErrorMixin, ValueError): - code = "task.value_error" +class TaskValueError(OsparcErrorMixin, ValueError): + code = "task.value_error" # type: ignore[assignment] -class TaskCancelledError(PydanticErrorMixin, RuntimeError): - code = "task.cancelled_error" +class TaskCancelledError(OsparcErrorMixin, RuntimeError): + code = "task.cancelled_error" # type: ignore[assignment] msg_template = "The task was cancelled" -class ServiceRuntimeError(PydanticErrorMixin, RuntimeError): - code = "service.runtime_error" +class ServiceRuntimeError(OsparcErrorMixin, RuntimeError): + code = "service.runtime_error" # type: ignore[assignment] msg_template = ( "The service {service_key}:{service_version}" " running in container {container_id} failed with code" diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/events.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/events.py index 1455c00cbff1..a27bb027e948 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/events.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/events.py @@ -1,10 +1,10 @@ import logging from abc import ABC, abstractmethod -from typing import Any, ClassVar, TypeAlias +from typing import TypeAlias import dask.typing from distributed.worker import get_worker -from pydantic import BaseModel, Extra, validator +from pydantic import BaseModel, ConfigDict, field_validator from .protocol import TaskOwner @@ -19,8 +19,7 @@ class BaseTaskEvent(BaseModel, ABC): def topic_name() -> str: raise NotImplementedError - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") def _dask_key_to_dask_task_id(key: dask.typing.Key) -> str: @@ -51,8 +50,8 @@ def from_dask_worker( task_owner=task_owner, ) - class Config(BaseTaskEvent.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "job_id": "simcore/services/comp/sleeper:1.1.0:projectid_ec7e595a-63ee-46a1-a04a-901b11b649f8:nodeid_39467d89-b659-4914-9359-c40b1b6d1d6d:uuid_5ee5c655-450d-4711-a3ec-32ffe16bc580", @@ -78,8 +77,9 @@ class Config(BaseTaskEvent.Config): }, ] } + ) - @validator("progress", always=True) + @field_validator("progress") @classmethod def ensure_between_0_1(cls, v): if 0 <= v <= 1: @@ -112,8 +112,8 @@ def from_dask_worker( task_owner=task_owner, ) - class Config(BaseTaskEvent.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "job_id": "simcore/services/comp/sleeper:1.1.0:projectid_ec7e595a-63ee-46a1-a04a-901b11b649f8:nodeid_39467d89-b659-4914-9359-c40b1b6d1d6d:uuid_5ee5c655-450d-4711-a3ec-32ffe16bc580", @@ -129,3 +129,4 @@ class Config(BaseTaskEvent.Config): }, ] } + ) diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py index 887397d42278..0bb951307233 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py @@ -1,7 +1,7 @@ import json from contextlib import suppress from pathlib import Path -from typing import Any, ClassVar, TypeAlias, Union +from typing import Any, TypeAlias, Union from models_library.basic_regex import MIME_TYPE_RE from models_library.generics import DictModel @@ -9,7 +9,7 @@ from pydantic import ( AnyUrl, BaseModel, - Extra, + ConfigDict, Field, StrictBool, StrictFloat, @@ -23,9 +23,9 @@ class PortSchema(BaseModel): required: bool - class Config: - extra = Extra.forbid - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ { "required": True, @@ -34,15 +34,16 @@ class Config: "required": False, }, ] - } + }, + ) class FilePortSchema(PortSchema): mapping: str | None = None url: AnyUrl - class Config(PortSchema.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "mapping": "some_filename.txt", @@ -55,6 +56,7 @@ class Config(PortSchema.Config): }, ] } + ) class FileUrl(BaseModel): @@ -64,12 +66,12 @@ class FileUrl(BaseModel): description="Local file relpath name (if given), otherwise it takes the url filename", ) file_mime_type: str | None = Field( - default=None, description="the file MIME type", regex=MIME_TYPE_RE + default=None, description="the file MIME type", pattern=MIME_TYPE_RE ) - class Config: - extra = Extra.forbid - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ {"url": "https://some_file_url", "file_mime_type": "application/json"}, { @@ -78,7 +80,8 @@ class Config: "file_mime_type": "application/json", }, ] - } + }, + ) PortValue: TypeAlias = Union[ @@ -94,8 +97,8 @@ class Config: class TaskInputData(DictModel[ServicePortKey, PortValue]): - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "boolean_input": False, @@ -106,6 +109,7 @@ class Config: }, ] } + ) PortSchemaValue: TypeAlias = Union[PortSchema, FilePortSchema] @@ -118,8 +122,8 @@ class TaskOutputDataSchema(DictModel[ServicePortKey, PortSchemaValue]): # does not work well in that case. For that reason, the schema is # sent as a json-schema instead of with a dynamically-created model class # - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "boolean_output": {"required": False}, @@ -138,6 +142,7 @@ class Config: }, ] } + ) class TaskOutputData(DictModel[ServicePortKey, PortValue]): @@ -170,10 +175,10 @@ def from_task_output( msg = f"Could not locate '{output_key}' in {output_data_file}" raise ValueError(msg) - return cls.parse_obj(data) + return cls.model_validate(data) - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "boolean_output": False, @@ -184,3 +189,4 @@ class Config: }, ] } + ) diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py index 00f89d96d94d..fd6acf554e02 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py @@ -1,4 +1,4 @@ -from typing import Any, ClassVar, Protocol, TypeAlias +from typing import Any, Protocol, TypeAlias from models_library.basic_types import EnvVarKey from models_library.docker import DockerLabelKey @@ -6,7 +6,7 @@ from models_library.projects_nodes_io import NodeID from models_library.services_resources import BootMode from models_library.users import UserID -from pydantic import AnyUrl, BaseModel, root_validator +from pydantic import AnyUrl, BaseModel, ConfigDict, model_validator from settings_library.s3 import S3Settings from .docker import DockerBasicAuth @@ -32,7 +32,7 @@ class TaskOwner(BaseModel): def has_parent(self) -> bool: return bool(self.parent_node_id and self.parent_project_id) - @root_validator + @model_validator(mode="before") @classmethod def check_parent_valid(cls, values: dict[str, Any]) -> dict[str, Any]: parent_project_id = values.get("parent_project_id") @@ -44,8 +44,8 @@ def check_parent_valid(cls, values: dict[str, Any]) -> dict[str, Any]: raise ValueError(msg) return values - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "user_id": 32, @@ -63,6 +63,7 @@ class Config: }, ] } + ) class ContainerTaskParameters(BaseModel): @@ -76,24 +77,23 @@ class ContainerTaskParameters(BaseModel): boot_mode: BootMode task_owner: TaskOwner - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "image": "ubuntu", "tag": "latest", - "input_data": TaskInputData.Config.schema_extra["examples"][0], - "output_data_keys": TaskOutputDataSchema.Config.schema_extra[ - "examples" - ][0], + "input_data": TaskInputData.model_config["json_schema_extra"]["examples"][0], # type: ignore[index] + "output_data_keys": TaskOutputDataSchema.model_config["json_schema_extra"]["examples"][0], # type: ignore[index] "command": ["sleep 10", "echo hello"], "envs": {"MYENV": "is an env"}, "labels": {"io.simcore.thelabel": "is amazing"}, "boot_mode": BootMode.CPU.value, - "task_owner": TaskOwner.Config.schema_extra["examples"][0], + "task_owner": TaskOwner.model_config["json_schema_extra"]["examples"][0], # type: ignore[index] }, ] } + ) class ContainerRemoteFct(Protocol): diff --git a/packages/dask-task-models-library/tests/container_tasks/test_docker.py b/packages/dask-task-models-library/tests/container_tasks/test_docker.py index 307fe175547b..4eb5bc749808 100644 --- a/packages/dask-task-models-library/tests/container_tasks/test_docker.py +++ b/packages/dask-task-models-library/tests/container_tasks/test_docker.py @@ -4,7 +4,7 @@ @pytest.mark.parametrize("model_cls", [(DockerBasicAuth)]) def test_docker_models_examples(model_cls): - examples = model_cls.Config.schema_extra["examples"] + examples = model_cls.model_config["json_schema_extra"]["examples"] for index, example in enumerate(examples): print(f"{index:-^10}:\n", example) diff --git a/packages/dask-task-models-library/tests/container_tasks/test_events.py b/packages/dask-task-models-library/tests/container_tasks/test_events.py index 16a308e11e0f..1aa4139720d6 100644 --- a/packages/dask-task-models-library/tests/container_tasks/test_events.py +++ b/packages/dask-task-models-library/tests/container_tasks/test_events.py @@ -26,7 +26,7 @@ def test_task_event_abstract(): @pytest.mark.parametrize("model_cls", [TaskProgressEvent, TaskLogEvent]) def test_events_models_examples(model_cls): - examples = model_cls.Config.schema_extra["examples"] + examples = model_cls.model_config["json_schema_extra"]["examples"] for index, example in enumerate(examples): print(f"{index:-^10}:\n", example) @@ -51,7 +51,7 @@ def mocked_dask_worker_job_id(mocker: MockerFixture, job_id: str) -> str: return job_id -@pytest.fixture(params=TaskOwner.Config.schema_extra["examples"]) +@pytest.fixture(params=TaskOwner.model_config["json_schema_extra"]["examples"]) def task_owner(request: pytest.FixtureRequest) -> TaskOwner: return TaskOwner(**request.param) diff --git a/packages/dask-task-models-library/tests/container_tasks/test_io.py b/packages/dask-task-models-library/tests/container_tasks/test_io.py index 14527d92391e..db6357d930cd 100644 --- a/packages/dask-task-models-library/tests/container_tasks/test_io.py +++ b/packages/dask-task-models-library/tests/container_tasks/test_io.py @@ -30,7 +30,7 @@ def test_io_models_examples(model_cls, model_cls_examples): for name, example in model_cls_examples.items(): print(name, ":", pformat(example)) - model_instance = model_cls.parse_obj(example) + model_instance = model_cls.model_validate(example) assert model_instance, f"Failed with {name}" print(name, ":", model_instance) @@ -69,9 +69,11 @@ def _create_fake_outputs( def test_create_task_output_from_task_with_optional_fields_as_required( tmp_path: Path, optional_fields_set: bool, faker: Faker ): - for schema_example in TaskOutputDataSchema.Config.schema_extra["examples"]: + for schema_example in TaskOutputDataSchema.model_config["json_schema_extra"][ + "examples" + ]: - task_output_schema = TaskOutputDataSchema.parse_obj(schema_example) + task_output_schema = TaskOutputDataSchema.model_validate(schema_example) outputs_file_name = _create_fake_outputs( task_output_schema, tmp_path, optional_fields_set, faker ) @@ -92,7 +94,7 @@ def test_create_task_output_from_task_with_optional_fields_as_required( def test_create_task_output_from_task_throws_when_there_are_missing_files( tmp_path: Path, faker: Faker ): - task_output_schema = TaskOutputDataSchema.parse_obj( + task_output_schema = TaskOutputDataSchema.model_validate( { "required_file_output": { "required": True, @@ -113,7 +115,7 @@ def test_create_task_output_from_task_throws_when_there_are_missing_files( def test_create_task_output_from_task_does_not_throw_when_there_are_optional_missing_files( tmp_path: Path, faker: Faker ): - task_output_schema = TaskOutputDataSchema.parse_obj( + task_output_schema = TaskOutputDataSchema.model_validate( { "optional_file_output": { "required": False, @@ -134,7 +136,7 @@ def test_create_task_output_from_task_does_not_throw_when_there_are_optional_mis def test_create_task_output_from_task_throws_when_there_are_entries( tmp_path: Path, faker: Faker ): - task_output_schema = TaskOutputDataSchema.parse_obj( + task_output_schema = TaskOutputDataSchema.model_validate( { "some_output": { "required": True, @@ -153,7 +155,7 @@ def test_create_task_output_from_task_throws_when_there_are_entries( def test_create_task_output_from_task_does_not_throw_when_there_are_optional_entries( tmp_path: Path, faker: Faker ): - task_output_schema = TaskOutputDataSchema.parse_obj( + task_output_schema = TaskOutputDataSchema.model_validate( { "some_output": { "required": False, @@ -182,6 +184,6 @@ def test_objects_are_compatible_with_dask_requirements(model_cls, model_cls_exam for name, example in model_cls_examples.items(): print(name, ":", pformat(example)) - model_instance = model_cls.parse_obj(example) + model_instance = model_cls.model_validate(example) reloaded_instance = loads(dumps(model_instance)) assert reloaded_instance == model_instance diff --git a/packages/dask-task-models-library/tests/container_tasks/test_protocol.py b/packages/dask-task-models-library/tests/container_tasks/test_protocol.py index d17202adabda..3c70924a0437 100644 --- a/packages/dask-task-models-library/tests/container_tasks/test_protocol.py +++ b/packages/dask-task-models-library/tests/container_tasks/test_protocol.py @@ -9,7 +9,7 @@ @pytest.mark.parametrize("model_cls", [TaskOwner, ContainerTaskParameters]) def test_events_models_examples(model_cls): - examples = model_cls.Config.schema_extra["examples"] + examples = model_cls.model_config["json_schema_extra"]["examples"] for index, example in enumerate(examples): print(f"{index:-^10}:\n", example) @@ -19,7 +19,9 @@ def test_events_models_examples(model_cls): def test_task_owner_parent_valid(faker: Faker): - invalid_task_owner_example = TaskOwner.Config.schema_extra["examples"][0] + invalid_task_owner_example = TaskOwner.model_config["json_schema_extra"][ + "examples" + ][0] invalid_task_owner_example["parent_project_id"] = faker.uuid4() assert invalid_task_owner_example["parent_node_id"] is None with pytest.raises(ValidationError, match=r".+ are None or both are set!"): diff --git a/packages/models-library/Makefile b/packages/models-library/Makefile index b41bafd2f2c7..8a18102f51a5 100644 --- a/packages/models-library/Makefile +++ b/packages/models-library/Makefile @@ -61,7 +61,7 @@ DOCKER_API_VERSION ?= 1.41 docker_rest_api.py: ## auto-generates pydantic models for Docker REST API models # auto-generates $@ from $< @$(SCRIPTS_DIR)/openapi-pydantic-models-generator.bash \ - --url https://docs.docker.com/engine/api/v$(DOCKER_API_VERSION).yaml \ + --url https://docs.docker.com/reference/api/engine/version/v$(DOCKER_API_VERSION).yaml \ --output $@ # formats diff --git a/packages/models-library/requirements/_base.in b/packages/models-library/requirements/_base.in index 01da93156ecb..b33d20bdd6b0 100644 --- a/packages/models-library/requirements/_base.in +++ b/packages/models-library/requirements/_base.in @@ -2,8 +2,11 @@ # Specifies third-party dependencies for 'models-library' # --constraint ../../../requirements/constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in arrow jsonschema orjson pydantic[email] +pydantic-settings +pydantic-extra-types diff --git a/packages/models-library/requirements/_base.txt b/packages/models-library/requirements/_base.txt index d21b94e9cb78..e80a9bc5fd9b 100644 --- a/packages/models-library/requirements/_base.txt +++ b/packages/models-library/requirements/_base.txt @@ -1,3 +1,5 @@ +annotated-types==0.7.0 + # via pydantic arrow==1.3.0 # via -r requirements/_base.in attrs==24.2.0 @@ -18,12 +20,22 @@ orjson==3.10.7 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in -pydantic==1.10.18 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via -r requirements/_base.in +pydantic-settings==2.4.0 + # via -r requirements/_base.in python-dateutil==2.9.0.post0 # via arrow +python-dotenv==1.0.1 + # via pydantic-settings referencing==0.35.1 # via # jsonschema @@ -37,4 +49,6 @@ six==1.16.0 types-python-dateutil==2.9.0.20240906 # via arrow typing-extensions==4.12.2 - # via pydantic + # via + # pydantic + # pydantic-core diff --git a/packages/models-library/requirements/_test.txt b/packages/models-library/requirements/_test.txt index b0e97313b9f2..95f4d1cfad93 100644 --- a/packages/models-library/requirements/_test.txt +++ b/packages/models-library/requirements/_test.txt @@ -66,7 +66,9 @@ python-dateutil==2.9.0.post0 # -c requirements/_base.txt # faker python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt diff --git a/packages/models-library/requirements/ci.txt b/packages/models-library/requirements/ci.txt index 255e69e47100..fa3c1d99410a 100644 --- a/packages/models-library/requirements/ci.txt +++ b/packages/models-library/requirements/ci.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +simcore-common-library @ ../common-library simcore-postgres-database[migration] @ ../postgres-database/ pytest-simcore @ ../pytest-simcore diff --git a/packages/models-library/requirements/dev.txt b/packages/models-library/requirements/dev.txt index 901530f36447..e8372a6f3f61 100644 --- a/packages/models-library/requirements/dev.txt +++ b/packages/models-library/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library/ --editable ../postgres-database/[migration] --editable ../pytest-simcore/ diff --git a/packages/models-library/scripts/validate-pg-projects.py b/packages/models-library/scripts/validate-pg-projects.py old mode 100755 new mode 100644 index e74b438698d8..648b68468763 --- a/packages/models-library/scripts/validate-pg-projects.py +++ b/packages/models-library/scripts/validate-pg-projects.py @@ -4,14 +4,10 @@ import typer from models_library.projects import ProjectAtDB -from pydantic import Json, ValidationError, validator -from pydantic.main import Extra +from pydantic import ConfigDict, Json, ValidationError, field_validator class ProjectFromCsv(ProjectAtDB): - class Config(ProjectAtDB.Config): - extra = Extra.forbid - # TODO: missing in ProjectAtDB access_rights: Json @@ -22,9 +18,11 @@ class Config(ProjectAtDB.Config): hidden: bool + model_config = ConfigDict(extra="forbid") + # NOTE: validators introduced to parse CSV - @validator("published", "hidden", pre=True, check_fields=False) + @field_validator("published", "hidden", mode="before", check_fields=False) @classmethod def empty_str_as_false(cls, v): # See booleans for >v1.0 https://pydantic-docs.helpmanual.io/usage/types/#booleans @@ -32,7 +30,7 @@ def empty_str_as_false(cls, v): return False return v - @validator("workbench", pre=True, check_fields=False) + @field_validator("workbench", mode="before", check_fields=False) @classmethod def jsonstr_to_dict(cls, v): if isinstance(v, str): @@ -61,12 +59,12 @@ def validate_csv_exported_pg_project( pid = row.get("uuid", index + 1) try: - model = ProjectFromCsv.parse_obj(row) + model = ProjectFromCsv.model_validate(row) if verbose > 1: typer.secho(f"{pid} OK", fg=typer.colors.GREEN) if verbose > 2: - typer.echo(model.json(indent=2)) + typer.echo(model.model_dump_json(indent=2)) except ValidationError as err: failed.append(pid) typer.secho( diff --git a/packages/models-library/src/models_library/access_rights.py b/packages/models-library/src/models_library/access_rights.py index b1218b858a10..a6cea15a946c 100644 --- a/packages/models-library/src/models_library/access_rights.py +++ b/packages/models-library/src/models_library/access_rights.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel, Extra, Field +from pydantic import BaseModel, ConfigDict, Field class AccessRights(BaseModel): @@ -6,5 +6,4 @@ class AccessRights(BaseModel): write: bool = Field(..., description="has write access") delete: bool = Field(..., description="has deletion rights") - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") diff --git a/packages/models-library/src/models_library/aiodocker_api.py b/packages/models-library/src/models_library/aiodocker_api.py index 757036f31e89..865570fbe36d 100644 --- a/packages/models-library/src/models_library/aiodocker_api.py +++ b/packages/models-library/src/models_library/aiodocker_api.py @@ -1,4 +1,4 @@ -from pydantic import Field, validator +from pydantic import ConfigDict, Field, field_validator from .generated_models.docker_rest_api import ( ContainerSpec, @@ -11,12 +11,12 @@ class AioDockerContainerSpec(ContainerSpec): - Env: dict[str, str | None] | None = Field( # type: ignore + Env: dict[str, str | None] | None = Field( default=None, description="aiodocker expects here a dictionary and re-convert it back internally`.\n", ) - @validator("Env", pre=True) + @field_validator("Env", mode="before") @classmethod def convert_list_to_dict(cls, v): if v is not None and isinstance(v, list): @@ -37,8 +37,7 @@ class AioDockerResources1(Resources1): None, description="Define resources reservation.", alias="Reservations" ) - class Config(Resources1.Config): # type: ignore - allow_population_by_field_name = True + model_config = ConfigDict(populate_by_name=True) class AioDockerTaskSpec(TaskSpec): @@ -55,6 +54,4 @@ class AioDockerTaskSpec(TaskSpec): class AioDockerServiceSpec(ServiceSpec): TaskTemplate: AioDockerTaskSpec | None = None - class Config(ServiceSpec.Config): # type: ignore - alias_generator = camel_to_snake - allow_population_by_field_name = True + model_config = ConfigDict(populate_by_name=True, alias_generator=camel_to_snake) diff --git a/packages/models-library/src/models_library/api_schemas__common/health.py b/packages/models-library/src/models_library/api_schemas__common/health.py index 1f578888b189..827ec533418c 100644 --- a/packages/models-library/src/models_library/api_schemas__common/health.py +++ b/packages/models-library/src/models_library/api_schemas__common/health.py @@ -1,14 +1,12 @@ -from typing import Any, ClassVar - -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict class HealthCheckGet(BaseModel): timestamp: str - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "timestamp": "simcore_service_directorv2.api.routes.health@2023-07-03T12:59:12.024551+00:00" } } + ) diff --git a/packages/models-library/src/models_library/api_schemas__common/meta.py b/packages/models-library/src/models_library/api_schemas__common/meta.py index 8cd2db53ae61..514abdc7d6df 100644 --- a/packages/models-library/src/models_library/api_schemas__common/meta.py +++ b/packages/models-library/src/models_library/api_schemas__common/meta.py @@ -1,6 +1,4 @@ -from typing import Any, ClassVar - -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field from ..basic_types import VersionStr @@ -12,11 +10,12 @@ class BaseMeta(BaseModel): default=None, description="Maps every route's path tag with a released version" ) - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "name": "simcore_service_foo", "version": "2.4.45", "released": {"v1": "1.3.4", "v2": "2.4.45"}, } } + ) diff --git a/packages/models-library/src/models_library/api_schemas_api_server/api_keys.py b/packages/models-library/src/models_library/api_schemas_api_server/api_keys.py index d828fc6507d5..999cb2f192cb 100644 --- a/packages/models-library/src/models_library/api_schemas_api_server/api_keys.py +++ b/packages/models-library/src/models_library/api_schemas_api_server/api_keys.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel, Field, SecretStr +from pydantic import BaseModel, ConfigDict, Field, SecretStr class ApiKey(BaseModel): @@ -15,5 +15,4 @@ class ApiKeyInDB(BaseModel): user_id: int product_name: str - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) diff --git a/packages/models-library/src/models_library/api_schemas_catalog/__init__.py b/packages/models-library/src/models_library/api_schemas_catalog/__init__.py index 84d761729a4e..2e8c8f75a240 100644 --- a/packages/models-library/src/models_library/api_schemas_catalog/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_catalog/__init__.py @@ -1,7 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -CATALOG_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as(RPCNamespace, "catalog") +CATALOG_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter(RPCNamespace).validate_python( + "catalog" +) diff --git a/packages/models-library/src/models_library/api_schemas_catalog/services.py b/packages/models-library/src/models_library/api_schemas_catalog/services.py index ab0c98c4dc54..db386a8714fc 100644 --- a/packages/models-library/src/models_library/api_schemas_catalog/services.py +++ b/packages/models-library/src/models_library/api_schemas_catalog/services.py @@ -1,8 +1,8 @@ from datetime import datetime -from typing import Any, ClassVar, TypeAlias +from typing import Any, TypeAlias from models_library.rpc_pagination import PageRpc -from pydantic import BaseModel, Extra, Field, HttpUrl, NonNegativeInt +from pydantic import BaseModel, ConfigDict, Field, HttpUrl, NonNegativeInt from ..boot_options import BootOptions from ..emails import LowerCaseEmailStr @@ -23,23 +23,23 @@ class ServiceUpdate(ServiceMetaDataEditable, ServiceAccessRights): - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { # ServiceAccessRights "accessRights": { 1: { "execute_access": False, "write_access": False, - }, + }, # type: ignore[dict-item] 2: { "execute_access": True, "write_access": True, - }, + }, # type: ignore[dict-item] 44: { "execute_access": False, "write_access": False, - }, + }, # type: ignore[dict-item] }, # ServiceMetaData = ServiceCommonData + "name": "My Human Readable Service Name", @@ -72,6 +72,7 @@ class Config: }, } } + ) _EXAMPLE_FILEPICKER: dict[str, Any] = { @@ -206,12 +207,11 @@ class ServiceGet( ): # pylint: disable=too-many-ancestors owner: LowerCaseEmailStr | None - class Config: - allow_population_by_field_name = True - extra = Extra.ignore - schema_extra: ClassVar[dict[str, Any]] = { - "examples": [_EXAMPLE_FILEPICKER, _EXAMPLE_SLEEPER] - } + model_config = ConfigDict( + extra="ignore", + populate_by_name=True, + json_schema_extra={"examples": [_EXAMPLE_FILEPICKER, _EXAMPLE_SLEEPER]}, + ) class ServiceGetV2(BaseModel): @@ -229,7 +229,7 @@ class ServiceGetV2(BaseModel): service_type: ServiceType = Field(default=..., alias="type") contact: LowerCaseEmailStr | None - authors: list[Author] = Field(..., min_items=1) + authors: list[Author] = Field(..., min_length=1) owner: LowerCaseEmailStr | None inputs: ServiceInputsDict @@ -249,11 +249,11 @@ class ServiceGetV2(BaseModel): " It includes current release.", ) - class Config: - extra = Extra.forbid - alias_generator = snake_to_camel - allow_population_by_field_name = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + populate_by_name=True, + alias_generator=snake_to_camel, + json_schema_extra={ "examples": [ { **_EXAMPLE_SLEEPER, # v2.2.1 (latest) @@ -304,7 +304,8 @@ class Config: ], }, ] - } + }, + ) PageRpcServicesGetV2: TypeAlias = PageRpc[ @@ -330,12 +331,13 @@ class ServiceUpdateV2(BaseModel): access_rights: dict[GroupID, ServiceGroupAccessRightsV2] | None = None - class Config: - extra = Extra.forbid - alias_generator = snake_to_camel - allow_population_by_field_name = True + model_config = ConfigDict( + extra="forbid", + populate_by_name=True, + alias_generator=snake_to_camel, + ) -assert set(ServiceUpdateV2.__fields__.keys()) - set( # nosec - ServiceGetV2.__fields__.keys() +assert set(ServiceUpdateV2.model_fields.keys()) - set( # nosec + ServiceGetV2.model_fields.keys() ) == {"deprecated"} diff --git a/packages/models-library/src/models_library/api_schemas_catalog/services_ports.py b/packages/models-library/src/models_library/api_schemas_catalog/services_ports.py index ada65d69e282..8393594b0c85 100644 --- a/packages/models-library/src/models_library/api_schemas_catalog/services_ports.py +++ b/packages/models-library/src/models_library/api_schemas_catalog/services_ports.py @@ -1,6 +1,6 @@ -from typing import Any, ClassVar, Literal +from typing import Any, Literal -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field from ..basic_regex import PUBLIC_VARIABLE_NAME_RE from ..services import ServiceInput, ServiceOutput @@ -17,7 +17,7 @@ class ServicePortGet(BaseModel): key: str = Field( ..., description="port identifier name", - regex=PUBLIC_VARIABLE_NAME_RE, + pattern=PUBLIC_VARIABLE_NAME_RE, title="Key name", ) kind: PortKindStr @@ -26,9 +26,8 @@ class ServicePortGet(BaseModel): None, description="jsonschema for the port's value. SEE https://json-schema.org/understanding-json-schema/", ) - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "key": "input_1", "kind": "input", @@ -41,6 +40,7 @@ class Config: }, } } + ) @classmethod def from_service_io( diff --git a/packages/models-library/src/models_library/api_schemas_catalog/services_specifications.py b/packages/models-library/src/models_library/api_schemas_catalog/services_specifications.py index aaa2b0489ae0..331ef23f83e5 100644 --- a/packages/models-library/src/models_library/api_schemas_catalog/services_specifications.py +++ b/packages/models-library/src/models_library/api_schemas_catalog/services_specifications.py @@ -13,9 +13,6 @@ class ServiceSpecifications(BaseModel): description="schedule-time specifications specifications for the service (follows Docker Service creation API (specifically only the Resources part), see https://docs.docker.com/engine/api/v1.41/#tag/Service/operation/ServiceCreate", ) - class Config: - pass - class ServiceSpecificationsGet(ServiceSpecifications): ... diff --git a/packages/models-library/src/models_library/api_schemas_clusters_keeper/__init__.py b/packages/models-library/src/models_library/api_schemas_clusters_keeper/__init__.py index b6570d01c890..79be28f20210 100644 --- a/packages/models-library/src/models_library/api_schemas_clusters_keeper/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_clusters_keeper/__init__.py @@ -1,9 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -CLUSTERS_KEEPER_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as( - RPCNamespace, "clusters-keeper" -) +CLUSTERS_KEEPER_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter( + RPCNamespace +).validate_python("clusters-keeper") diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py index ca06a39b1291..1c9892a72012 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py @@ -1,13 +1,14 @@ -from typing import Any, ClassVar, TypeAlias +from typing import TypeAlias from pydantic import ( AnyHttpUrl, BaseModel, + ConfigDict, Field, HttpUrl, NonNegativeFloat, - root_validator, - validator, + field_validator, + model_validator, ) from pydantic.networks import AnyUrl from pydantic.types import ByteSize, PositiveFloat @@ -44,7 +45,7 @@ class WorkerMetrics(BaseModel): class UsedResources(DictModel[str, NonNegativeFloat]): - @root_validator(pre=True) + @model_validator(mode="before") @classmethod def ensure_negative_value_is_zero(cls, values): # dasks adds/remove resource values and sometimes @@ -72,7 +73,7 @@ class Scheduler(BaseModel): status: str = Field(..., description="The running status of the scheduler") workers: WorkersDict | None = Field(default_factory=dict) - @validator("workers", pre=True, always=True) + @field_validator("workers", mode="before") @classmethod def ensure_workers_is_empty_dict(cls, v): if v is None: @@ -95,10 +96,9 @@ class ClusterGet(Cluster): alias="accessRights", default_factory=dict ) - class Config(Cluster.Config): - allow_population_by_field_name = True + model_config = ConfigDict(populate_by_name=True) - @root_validator(pre=True) + @model_validator(mode="before") @classmethod def ensure_access_rights_converted(cls, values): if "access_rights" in values: @@ -118,21 +118,8 @@ class ClusterCreate(BaseCluster): alias="accessRights", default_factory=dict ) - @validator("thumbnail", always=True, pre=True) - @classmethod - def set_default_thumbnail_if_empty(cls, v, values): - if v is None: - cluster_type = values["type"] - default_thumbnails = { - ClusterTypeInModel.AWS.value: "https://upload.wikimedia.org/wikipedia/commons/thumb/9/93/Amazon_Web_Services_Logo.svg/250px-Amazon_Web_Services_Logo.svg.png", - ClusterTypeInModel.ON_PREMISE.value: "https://upload.wikimedia.org/wikipedia/commons/thumb/a/ac/Crystal_Clear_app_network_local.png/120px-Crystal_Clear_app_network_local.png", - ClusterTypeInModel.ON_DEMAND.value: "https://upload.wikimedia.org/wikipedia/commons/thumb/9/93/Amazon_Web_Services_Logo.svg/250px-Amazon_Web_Services_Logo.svg.png", - } - return default_thumbnails[cluster_type] - return v - - class Config(BaseCluster.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "name": "My awesome cluster", @@ -156,13 +143,27 @@ class Config(BaseCluster.Config): "password": "somepassword", }, "accessRights": { - 154: CLUSTER_ADMIN_RIGHTS, - 12: CLUSTER_MANAGER_RIGHTS, - 7899: CLUSTER_USER_RIGHTS, + 154: CLUSTER_ADMIN_RIGHTS, # type: ignore[dict-item] + 12: CLUSTER_MANAGER_RIGHTS, # type: ignore[dict-item] + 7899: CLUSTER_USER_RIGHTS, # type: ignore[dict-item] }, }, ] } + ) + + @field_validator("thumbnail", mode="before") + @classmethod + def set_default_thumbnail_if_empty(cls, v, values): + if v is None: + cluster_type = values["type"] + default_thumbnails = { + ClusterTypeInModel.AWS.value: "https://upload.wikimedia.org/wikipedia/commons/thumb/9/93/Amazon_Web_Services_Logo.svg/250px-Amazon_Web_Services_Logo.svg.png", + ClusterTypeInModel.ON_PREMISE.value: "https://upload.wikimedia.org/wikipedia/commons/thumb/a/ac/Crystal_Clear_app_network_local.png/120px-Crystal_Clear_app_network_local.png", + ClusterTypeInModel.ON_DEMAND.value: "https://upload.wikimedia.org/wikipedia/commons/thumb/9/93/Amazon_Web_Services_Logo.svg/250px-Amazon_Web_Services_Logo.svg.png", + } + return default_thumbnails[cluster_type] + return v class ClusterPatch(BaseCluster): @@ -177,8 +178,8 @@ class ClusterPatch(BaseCluster): alias="accessRights" ) - class Config(BaseCluster.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "name": "Changing the name of my cluster", @@ -188,13 +189,14 @@ class Config(BaseCluster.Config): }, { "accessRights": { - 154: CLUSTER_ADMIN_RIGHTS, - 12: CLUSTER_MANAGER_RIGHTS, - 7899: CLUSTER_USER_RIGHTS, + 154: CLUSTER_ADMIN_RIGHTS, # type: ignore[dict-item] + 12: CLUSTER_MANAGER_RIGHTS, # type: ignore[dict-item] + 7899: CLUSTER_USER_RIGHTS, # type: ignore[dict-item] }, }, ] } + ) class ClusterPing(BaseModel): diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/comp_tasks.py b/packages/models-library/src/models_library/api_schemas_directorv2/comp_tasks.py index cb8dab74d65a..e383d45f20ed 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/comp_tasks.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/comp_tasks.py @@ -1,7 +1,7 @@ from typing import Any, TypeAlias from models_library.basic_types import IDStr -from pydantic import AnyHttpUrl, AnyUrl, BaseModel, Field, validator +from pydantic import AnyHttpUrl, AnyUrl, BaseModel, Field, field_validator from ..clusters import ClusterID from ..projects import ProjectID @@ -43,13 +43,14 @@ class ComputationCreate(BaseModel): use_on_demand_clusters: bool = Field( default=False, description="if True, a cluster will be created as necessary (wallet_id cannot be None, and cluster_id must be None)", + validate_default=True, ) wallet_info: WalletInfo | None = Field( default=None, description="contains information about the wallet used to bill the running service", ) - @validator("product_name", always=True) + @field_validator("product_name") @classmethod def ensure_product_name_defined_if_computation_starts(cls, v, values): if "start_pipeline" in values and values["start_pipeline"] and v is None: @@ -57,7 +58,7 @@ def ensure_product_name_defined_if_computation_starts(cls, v, values): raise ValueError(msg) return v - @validator("use_on_demand_clusters", always=True) + @field_validator("use_on_demand_clusters") @classmethod def ensure_expected_options(cls, v, values): if v is True and ("cluster_id" in values and values["cluster_id"] is not None): diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services.py b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services.py index 3515c38a5d7c..151611271a43 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services.py @@ -1,6 +1,6 @@ -from typing import Any, ClassVar, TypeAlias +from typing import TypeAlias -from pydantic import BaseModel, ByteSize, Field +from pydantic import BaseModel, ByteSize, ConfigDict, Field from ..resource_tracker import HardwareInfo, PricingInfo from ..services import ServicePortKey @@ -30,10 +30,9 @@ def from_transferred_bytes( ) -> "RetrieveDataOutEnveloped": return cls(data=RetrieveDataOut(size_bytes=ByteSize(transferred_bytes))) - class Config: - schema_extra: ClassVar[dict[str, Any]] = { - "examples": [{"data": {"size_bytes": 42}}] - } + model_config = ConfigDict( + json_schema_extra={"examples": [{"data": {"size_bytes": 42}}]} + ) class DynamicServiceCreate(ServiceDetails): @@ -55,9 +54,8 @@ class DynamicServiceCreate(ServiceDetails): default=None, description="contains harware information (ex. aws_ec2_instances)", ) - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "key": "simcore/services/dynamic/3dviewer", "version": "2.4.5", @@ -67,14 +65,13 @@ class Config: "basepath": "/x/75c7f3f4-18f9-4678-8610-54a2ade78eaa", "product_name": "osparc", "can_save": True, - "service_resources": ServiceResourcesDictHelpers.Config.schema_extra[ - "examples" - ][0], - "wallet_info": WalletInfo.Config.schema_extra["examples"][0], - "pricing_info": PricingInfo.Config.schema_extra["examples"][0], - "hardware_info": HardwareInfo.Config.schema_extra["examples"][0], + "service_resources": ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] + "wallet_info": WalletInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] + "pricing_info": PricingInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] + "hardware_info": HardwareInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] } } + ) DynamicServiceGet: TypeAlias = RunningDynamicServiceDetails diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py index 281589614ab3..d103a3ea8c5d 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py @@ -1,8 +1,7 @@ from functools import cached_property from pathlib import Path -from typing import Any, ClassVar -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field from ..basic_types import PortInt from ..projects import ProjectID @@ -34,15 +33,14 @@ class CommonServiceDetails(BaseModel): class ServiceDetails(CommonServiceDetails): - basepath: Path = Field( + basepath: Path | None = Field( default=None, description="predefined path where the dynamic service should be served. If empty, the service shall use the root endpoint.", alias="service_basepath", ) - - class Config: - allow_population_by_field_name = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + populate_by_name=True, + json_schema_extra={ "example": { "key": "simcore/services/dynamic/3dviewer", "version": "2.4.5", @@ -51,7 +49,8 @@ class Config: "node_uuid": "75c7f3f4-18f9-4678-8610-54a2ade78eaa", "basepath": "/x/75c7f3f4-18f9-4678-8610-54a2ade78eaa", } - } + }, + ) class RunningDynamicServiceDetails(ServiceDetails): @@ -69,7 +68,7 @@ class RunningDynamicServiceDetails(ServiceDetails): internal_port: PortInt = Field( ..., description="the service swarm internal port", alias="service_port" ) - published_port: PortInt = Field( + published_port: PortInt | None = Field( default=None, description="the service swarm published port if any", deprecated=True, @@ -89,13 +88,9 @@ class RunningDynamicServiceDetails(ServiceDetails): alias="service_message", ) - @cached_property - def legacy_service_url(self) -> str: - return f"http://{self.host}:{self.internal_port}{self.basepath}" # NOSONAR - - class Config(ServiceDetails.Config): - keep_untouched = (cached_property,) - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + ignored_types=(cached_property,), + json_schema_extra={ "examples": [ { "boot_type": "V0", @@ -125,4 +120,9 @@ class Config(ServiceDetails.Config): "node_uuid": "75c7f3f4-18f9-4678-8610-54a2ade78eaa", }, ] - } + }, + ) + + @cached_property + def legacy_service_url(self) -> str: + return f"http://{self.host}:{self.internal_port}{self.basepath}" # NOSONAR diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/services.py b/packages/models-library/src/models_library/api_schemas_directorv2/services.py index 2bd0084b7b32..c797c687fd1f 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/services.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/services.py @@ -1,6 +1,6 @@ -from typing import Any, ClassVar, Final +from typing import Final -from pydantic import BaseModel, Field, validator +from pydantic import BaseModel, ConfigDict, Field, field_validator from pydantic.types import ByteSize, NonNegativeInt from ..service_settings_labels import ContainerSpec @@ -23,6 +23,7 @@ class NodeRequirements(BaseModel): None, description="defines the required (maximum) GPU for running the services", alias="GPU", + validate_default=True, ) ram: ByteSize = Field( ..., @@ -33,17 +34,18 @@ class NodeRequirements(BaseModel): default=None, description="defines the required (maximum) amount of VRAM for running the services", alias="VRAM", + validate_default=True, ) - @validator("vram", "gpu", always=True, pre=True) + @field_validator("vram", "gpu", mode="before") @classmethod def check_0_is_none(cls, v): if v == 0: v = None return v - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ {"CPU": 1.0, "RAM": 4194304}, {"CPU": 1.0, "GPU": 1, "RAM": 4194304}, @@ -53,6 +55,7 @@ class Config: }, ] } + ) class ServiceExtras(BaseModel): @@ -60,11 +63,13 @@ class ServiceExtras(BaseModel): service_build_details: ServiceBuildDetails | None = None container_spec: ContainerSpec | None = None - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ {"node_requirements": node_example} - for node_example in NodeRequirements.Config.schema_extra["examples"] + for node_example in NodeRequirements.model_config["json_schema_extra"][ + "examples" + ] # type: ignore[index,union-attr] ] + [ { @@ -75,7 +80,9 @@ class Config: "vcs_url": "git@github.com:ITISFoundation/osparc-simcore.git", }, } - for node_example in NodeRequirements.Config.schema_extra["examples"] + for node_example in NodeRequirements.model_config["json_schema_extra"][ + "examples" + ] # type: ignore[index,dict-item, union-attr] ] + [ { @@ -87,9 +94,12 @@ class Config: }, "container_spec": {"Command": ["run", "subcommand"]}, } - for node_example in NodeRequirements.Config.schema_extra["examples"] + for node_example in NodeRequirements.model_config["json_schema_extra"][ + "examples" + ] # type: ignore[index,union-attr] ] } + ) CHARS_IN_VOLUME_NAME_BEFORE_DIR_NAME: Final[NonNegativeInt] = 89 diff --git a/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/__init__.py b/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/__init__.py index 5631d38e5f97..70a4f1247bad 100644 --- a/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/__init__.py @@ -1,9 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -DYNAMIC_SCHEDULER_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as( - RPCNamespace, "dynamic-scheduler" -) +DYNAMIC_SCHEDULER_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter( + RPCNamespace +).validate_python("dynamic-scheduler") diff --git a/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/dynamic_services.py b/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/dynamic_services.py index 48ef3c484455..47c4fc69a184 100644 --- a/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/dynamic_services.py +++ b/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/dynamic_services.py @@ -1,5 +1,3 @@ -from typing import Any, ClassVar - from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceCreate from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID @@ -7,7 +5,7 @@ from models_library.services_resources import ServiceResourcesDictHelpers from models_library.users import UserID from models_library.wallets import WalletInfo -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict class DynamicServiceStart(DynamicServiceCreate): @@ -15,8 +13,8 @@ class DynamicServiceStart(DynamicServiceCreate): request_scheme: str simcore_user_agent: str - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "product_name": "osparc", "can_save": True, @@ -28,14 +26,13 @@ class Config: "request_dns": "some.local", "request_scheme": "http", "simcore_user_agent": "", - "service_resources": ServiceResourcesDictHelpers.Config.schema_extra[ - "examples" - ][0], - "wallet_info": WalletInfo.Config.schema_extra["examples"][0], - "pricing_info": PricingInfo.Config.schema_extra["examples"][0], - "hardware_info": HardwareInfo.Config.schema_extra["examples"][0], + "service_resources": ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] + "wallet_info": WalletInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] + "pricing_info": PricingInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] + "hardware_info": HardwareInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] } } + ) class DynamicServiceStop(BaseModel): @@ -45,8 +42,8 @@ class DynamicServiceStop(BaseModel): simcore_user_agent: str save_state: bool - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "user_id": 234, "project_id": "dd1d04d9-d704-4f7e-8f0f-1ca60cc771fe", @@ -55,3 +52,4 @@ class Config: "save_state": True, } } + ) diff --git a/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/containers.py b/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/containers.py index 26fddb502e34..2e14ed62c162 100644 --- a/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/containers.py +++ b/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/containers.py @@ -1,18 +1,18 @@ -from typing import Any, ClassVar, TypeAlias +from typing import TypeAlias -from pydantic import BaseModel, NonNegativeFloat +from pydantic import BaseModel, ConfigDict, NonNegativeFloat class ActivityInfo(BaseModel): seconds_inactive: NonNegativeFloat - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ {"seconds_inactive": 0}, {"seconds_inactive": 100}, ] } + ) ActivityInfoOrNone: TypeAlias = ActivityInfo | None diff --git a/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/telemetry.py b/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/telemetry.py index 986b0c872159..b7d315a8aa69 100644 --- a/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/telemetry.py +++ b/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/telemetry.py @@ -57,8 +57,8 @@ class DiskUsage(BaseModel): total: ByteSize = Field(description="total space = free + used") used_percent: NonNegativeFloat = Field( - gte=0.00, - lte=100.00, + ge=0.00, + le=100.00, description="Percent of used space relative to the total space", ) diff --git a/packages/models-library/src/models_library/api_schemas_efs_guardian/__init__.py b/packages/models-library/src/models_library/api_schemas_efs_guardian/__init__.py index 50793febaf99..f47a9a3f8d36 100644 --- a/packages/models-library/src/models_library/api_schemas_efs_guardian/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_efs_guardian/__init__.py @@ -1,9 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -EFS_GUARDIAN_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as( - RPCNamespace, "efs-guardian" -) +EFS_GUARDIAN_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter( + RPCNamespace +).validate_python("efs-guardian") diff --git a/packages/models-library/src/models_library/api_schemas_invitations/invitations.py b/packages/models-library/src/models_library/api_schemas_invitations/invitations.py index e25aee7c804d..8c5fd85d2e89 100644 --- a/packages/models-library/src/models_library/api_schemas_invitations/invitations.py +++ b/packages/models-library/src/models_library/api_schemas_invitations/invitations.py @@ -1,6 +1,6 @@ -from typing import Any, ClassVar +from typing import Any -from pydantic import BaseModel, Field, HttpUrl +from pydantic import BaseModel, ConfigDict, Field, HttpUrl from ..invitations import InvitationContent, InvitationInputs from ..products import ProductName @@ -13,8 +13,7 @@ class ApiInvitationInputs(InvitationInputs): - class Config: - schema_extra: ClassVar[dict[str, Any]] = {"example": _INPUTS_EXAMPLE} + model_config = ConfigDict(json_schema_extra={"example": _INPUTS_EXAMPLE}) class ApiInvitationContent(InvitationContent): @@ -23,26 +22,27 @@ class ApiInvitationContent(InvitationContent): ..., description="This invitations can only be used for this product." ) - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { **_INPUTS_EXAMPLE, "product": "osparc", "created": "2023-01-11 13:11:47.293595", } } + ) class ApiInvitationContentAndLink(ApiInvitationContent): invitation_url: HttpUrl = Field(..., description="Invitation link") - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { - **ApiInvitationContent.Config.schema_extra["example"], + **ApiInvitationContent.model_config["json_schema_extra"]["example"], # type: ignore[index,dict-item] "invitation_url": "https://foo.com/#/registration?invitation=1234", } } + ) class ApiEncryptedInvitation(BaseModel): diff --git a/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py b/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py index 1995d8c38495..a3bb93813dc0 100644 --- a/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py +++ b/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py @@ -1,7 +1,7 @@ import logging -from typing import TypeAlias +from typing import Annotated, TypeAlias -from pydantic import BaseModel, ConstrainedFloat, Field, validate_arguments, validator +from pydantic import BaseModel, Field, field_validator, validate_call _logger = logging.getLogger(__name__) @@ -9,10 +9,7 @@ ProgressMessage: TypeAlias = str - -class ProgressPercent(ConstrainedFloat): - ge = 0.0 - le = 1.0 +ProgressPercent: TypeAlias = Annotated[float, Field(ge=0.0, le=1.0)] class TaskProgress(BaseModel): @@ -25,7 +22,7 @@ class TaskProgress(BaseModel): message: ProgressMessage = Field(default="") percent: ProgressPercent = Field(default=0.0) - @validate_arguments + @validate_call def update( self, *, @@ -47,7 +44,7 @@ def update( def create(cls, task_id: TaskId | None = None) -> "TaskProgress": return cls(task_id=task_id) - @validator("percent") + @field_validator("percent") @classmethod def round_value_to_3_digit(cls, v): return round(v, 3) diff --git a/packages/models-library/src/models_library/api_schemas_long_running_tasks/tasks.py b/packages/models-library/src/models_library/api_schemas_long_running_tasks/tasks.py index dd0e0c0a72be..b5a8d8443b93 100644 --- a/packages/models-library/src/models_library/api_schemas_long_running_tasks/tasks.py +++ b/packages/models-library/src/models_library/api_schemas_long_running_tasks/tasks.py @@ -2,7 +2,7 @@ from datetime import datetime from typing import Any -from pydantic import BaseModel, validator +from pydantic import BaseModel, field_validator from .base import TaskId, TaskProgress @@ -25,7 +25,7 @@ class TaskGet(BaseModel): result_href: str abort_href: str - @validator("task_name") + @field_validator("task_name") @classmethod def unquote_str(cls, v) -> str: return urllib.parse.unquote(v) diff --git a/packages/models-library/src/models_library/api_schemas_payments/__init__.py b/packages/models-library/src/models_library/api_schemas_payments/__init__.py index 30d68367deda..73928d6ccd7d 100644 --- a/packages/models-library/src/models_library/api_schemas_payments/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_payments/__init__.py @@ -1,7 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -PAYMENTS_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as(RPCNamespace, "payments") +PAYMENTS_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter(RPCNamespace).validate_python( + "payments" +) diff --git a/packages/models-library/src/models_library/api_schemas_payments/errors.py b/packages/models-library/src/models_library/api_schemas_payments/errors.py index eaeba92aab1e..362482772f76 100644 --- a/packages/models-library/src/models_library/api_schemas_payments/errors.py +++ b/packages/models-library/src/models_library/api_schemas_payments/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class _BaseRpcApiError(PydanticErrorMixin, ValueError): +class _BaseRpcApiError(OsparcErrorMixin, ValueError): @classmethod def get_full_class_name(cls) -> str: # Can be used as unique code identifier diff --git a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/__init__.py b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/__init__.py index 295897e5b1d0..d32b474edf69 100644 --- a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/__init__.py @@ -1,9 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -RESOURCE_USAGE_TRACKER_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as( - RPCNamespace, "resource-usage-tracker" -) +RESOURCE_USAGE_TRACKER_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter( + RPCNamespace +).validate_python("resource-usage-tracker") diff --git a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/credit_transactions.py b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/credit_transactions.py index edb308ff39ad..db235ce8094e 100644 --- a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/credit_transactions.py +++ b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/credit_transactions.py @@ -1,7 +1,8 @@ from datetime import datetime from decimal import Decimal +from typing import Annotated -from pydantic import BaseModel, validator +from pydantic import BaseModel, BeforeValidator, PlainSerializer from ..products import ProductName from ..resource_tracker import CreditTransactionId @@ -11,12 +12,11 @@ class WalletTotalCredits(BaseModel): wallet_id: WalletID - available_osparc_credits: Decimal - - @validator("available_osparc_credits", always=True) - @classmethod - def ensure_rounded(cls, v): - return round(v, 2) + available_osparc_credits: Annotated[ + Decimal, + BeforeValidator(lambda x: round(x, 2)), + PlainSerializer(float, return_type=float, when_used="json"), + ] class CreditTransactionCreateBody(BaseModel): diff --git a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/pricing_plans.py b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/pricing_plans.py index 9c3ca4ba2b1a..294ea360e584 100644 --- a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/pricing_plans.py +++ b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/pricing_plans.py @@ -1,8 +1,8 @@ from datetime import datetime from decimal import Decimal -from typing import Any, ClassVar +from typing import Annotated -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict, PlainSerializer from ..resource_tracker import ( HardwareInfo, @@ -19,26 +19,31 @@ class PricingUnitGet(BaseModel): pricing_unit_id: PricingUnitId unit_name: str unit_extra_info: UnitExtraInfo - current_cost_per_unit: Decimal + current_cost_per_unit: Annotated[ + Decimal, PlainSerializer(float, return_type=float, when_used="json") + ] current_cost_per_unit_id: PricingUnitCostId default: bool specific_info: HardwareInfo - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "pricing_unit_id": 1, "unit_name": "SMALL", - "unit_extra_info": UnitExtraInfo.Config.schema_extra["examples"][0], + "unit_extra_info": UnitExtraInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] "current_cost_per_unit": 5.7, "current_cost_per_unit_id": 1, "default": True, "specific_info": hw_config_example, } - for hw_config_example in HardwareInfo.Config.schema_extra["examples"] + for hw_config_example in HardwareInfo.model_config["json_schema_extra"][ + "examples" + ] # type: ignore[index,union-attr] ] } + ) class PricingPlanGet(BaseModel): @@ -51,8 +56,8 @@ class PricingPlanGet(BaseModel): pricing_units: list[PricingUnitGet] | None is_active: bool - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "pricing_plan_id": 1, @@ -64,11 +69,14 @@ class Config: "pricing_units": [pricing_unit_get_example], "is_active": True, } - for pricing_unit_get_example in PricingUnitGet.Config.schema_extra[ + for pricing_unit_get_example in PricingUnitGet.model_config[ + "json_schema_extra" + ][ "examples" - ] + ] # type: ignore[index,union-attr] ] } + ) class PricingPlanToServiceGet(BaseModel): @@ -77,8 +85,8 @@ class PricingPlanToServiceGet(BaseModel): service_version: ServiceVersion created: datetime - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "pricing_plan_id": 1, @@ -88,3 +96,4 @@ class Config: } ] } + ) diff --git a/packages/models-library/src/models_library/api_schemas_storage.py b/packages/models-library/src/models_library/api_schemas_storage.py index 29e341456bce..bd0185a9173a 100644 --- a/packages/models-library/src/models_library/api_schemas_storage.py +++ b/packages/models-library/src/models_library/api_schemas_storage.py @@ -6,22 +6,21 @@ IMPORTANT: DO NOT COUPLE these schemas until storage is refactored """ -import re from datetime import datetime from enum import Enum -from re import Pattern -from typing import Any, ClassVar, TypeAlias +from typing import Annotated, Any, TypeAlias from uuid import UUID from pydantic import ( BaseModel, ByteSize, - ConstrainedStr, - Extra, + ConfigDict, Field, PositiveInt, - root_validator, - validator, + RootModel, + StringConstraints, + field_validator, + model_validator, ) from pydantic.networks import AnyUrl @@ -38,13 +37,11 @@ ETag: TypeAlias = str +S3BucketName: TypeAlias = Annotated[str, StringConstraints(pattern=S3_BUCKET_NAME_RE)] -class S3BucketName(ConstrainedStr): - regex: Pattern[str] | None = re.compile(S3_BUCKET_NAME_RE) - - -class DatCoreDatasetName(ConstrainedStr): - regex: Pattern[str] | None = re.compile(DATCORE_DATASET_NAME_RE) +DatCoreDatasetName: TypeAlias = Annotated[ + str, StringConstraints(pattern=DATCORE_DATASET_NAME_RE) +] # / @@ -60,14 +57,15 @@ class FileLocation(BaseModel): name: LocationName id: LocationID - class Config: - extra = Extra.forbid - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ {"name": "simcore.s3", "id": 0}, {"name": "datcore", "id": 1}, ] - } + }, + ) FileLocationArray: TypeAlias = ListModel[FileLocation] @@ -77,11 +75,10 @@ class Config: class DatasetMetaDataGet(BaseModel): dataset_id: UUID | DatCoreDatasetName display_name: str - - class Config: - extra = Extra.forbid - orm_mode = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + from_attributes=True, + json_schema_extra={ "examples": [ # simcore dataset { @@ -106,7 +103,8 @@ class Config: "display_name": "YetAnotherTest", }, ] - } + }, + ) # /locations/{location_id}/files/metadata: @@ -150,17 +148,10 @@ class FileMetaDataGet(BaseModel): description="SHA256 message digest of the file content. Main purpose: cheap lookup.", ) - @validator("location_id", pre=True) - @classmethod - def ensure_location_is_integer(cls, v): - if v is not None: - return int(v) - return v - - class Config: - extra = Extra.forbid - orm_mode = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + from_attributes=True, + json_schema_extra={ "examples": [ # typical S3 entry { @@ -234,11 +225,19 @@ class Config: "project_name": None, }, ] - } + }, + ) + + @field_validator("location_id", mode="before") + @classmethod + def ensure_location_is_integer(cls, v): + if v is not None: + return int(v) + return v -class FileMetaDataArray(BaseModel): - __root__: list[FileMetaDataGet] = [] +class FileMetaDataArray(RootModel[list[FileMetaDataGet]]): + root: list[FileMetaDataGet] = Field(default_factory=list) # /locations/{location_id}/files/{file_id} @@ -279,7 +278,7 @@ class UploadedPart(BaseModel): class FileUploadCompletionBody(BaseModel): parts: list[UploadedPart] - @validator("parts") + @field_validator("parts") @classmethod def ensure_sorted(cls, value: list[UploadedPart]) -> list[UploadedPart]: return sorted(value, key=lambda uploaded_part: uploaded_part.number) @@ -312,7 +311,7 @@ class FoldersBody(BaseModel): destination: dict[str, Any] = Field(default_factory=dict) nodes_map: dict[NodeID, NodeID] = Field(default_factory=dict) - @root_validator() + @model_validator(mode="after") @classmethod def ensure_consistent_entries(cls, values): source_node_keys = (NodeID(n) for n in values["source"].get("workbench", {})) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/__init__.py b/packages/models-library/src/models_library/api_schemas_webserver/__init__.py index f30e0f0790d4..c95f68ab78c6 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/__init__.py @@ -1,7 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -WEBSERVER_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as(RPCNamespace, "webserver") +WEBSERVER_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter( + RPCNamespace +).validate_python("webserver") diff --git a/packages/models-library/src/models_library/api_schemas_webserver/_base.py b/packages/models-library/src/models_library/api_schemas_webserver/_base.py index 9856a4743e9a..718984116c7e 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/_base.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/_base.py @@ -5,38 +5,36 @@ from typing import Any -from pydantic import BaseModel, Extra +from pydantic import BaseModel, ConfigDict from ..utils.change_case import snake_to_camel class EmptyModel(BaseModel): - # Used to represent body={} - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class InputSchemaWithoutCamelCase(BaseModel): - # Added to tmp keep backwards compatibility - # until all bodies are updated - # - class Config: - allow_population_by_field_name = False - extra = Extra.ignore # Non-strict inputs policy: Used to prune extra field - allow_mutations = False + model_config = ConfigDict( + populate_by_name=False, + extra="ignore", # Non-strict inputs policy: Used to prune extra field + frozen=True, + ) class InputSchema(BaseModel): - class Config(InputSchemaWithoutCamelCase.Config): - alias_generator = snake_to_camel + model_config = ConfigDict( + **InputSchemaWithoutCamelCase.model_config, alias_generator=snake_to_camel + ) class OutputSchema(BaseModel): - class Config: - allow_population_by_field_name = True - extra = Extra.ignore # Used to prune extra fields from internal data - allow_mutations = False - alias_generator = snake_to_camel + model_config = ConfigDict( + alias_generator=snake_to_camel, + populate_by_name=True, + extra="ignore", # Used to prune extra fields from internal data + frozen=True, + ) def data( self, @@ -47,7 +45,7 @@ def data( **kwargs ) -> dict[str, Any]: """Helper function to get envelope's data as a dict""" - return self.dict( + return self.model_dump( by_alias=True, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, @@ -64,7 +62,7 @@ def data_json( **kwargs ) -> str: """Helper function to get envelope's data as a json str""" - return self.json( + return self.model_dump_json( by_alias=True, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, diff --git a/packages/models-library/src/models_library/api_schemas_webserver/auth.py b/packages/models-library/src/models_library/api_schemas_webserver/auth.py index d9f2754171d0..b0b11661cb36 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/auth.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/auth.py @@ -1,7 +1,7 @@ from datetime import timedelta -from typing import Any, ClassVar +from typing import Any -from pydantic import BaseModel, Field, SecretStr +from pydantic import BaseModel, ConfigDict, Field, SecretStr from ..emails import LowerCaseEmailStr from ._base import InputSchema @@ -11,11 +11,13 @@ class AccountRequestInfo(InputSchema): form: dict[str, Any] captcha: str - class Config: + model_config = ConfigDict( + str_strip_whitespace=True, + str_max_length=200, # NOTE: this is just informative. The format of the form is defined # currently in the front-end and it might change # SEE image in https://github.com/ITISFoundation/osparc-simcore/pull/5378 - schema_extra: ClassVar[dict[str, Any]] = { + json_schema_extra={ "example": { "form": { "firstName": "James", @@ -35,9 +37,8 @@ class Config: }, "captcha": "A12B34", } - } - anystr_strip_whitespace = True - max_anystr_length = 200 + }, + ) class UnregisterCheck(InputSchema): @@ -57,8 +58,8 @@ class ApiKeyCreate(BaseModel): description="Time delta from creation time to expiration. If None, then it does not expire.", ) - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "display_name": "test-api-forever", @@ -73,6 +74,7 @@ class Config: }, ] } + ) class ApiKeyGet(BaseModel): @@ -80,9 +82,10 @@ class ApiKeyGet(BaseModel): api_key: str api_secret: str - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ {"display_name": "myapi", "api_key": "key", "api_secret": "secret"}, ] } + ) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/catalog.py b/packages/models-library/src/models_library/api_schemas_webserver/catalog.py index 172575a8f929..09bfa36499ad 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/catalog.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/catalog.py @@ -1,13 +1,12 @@ -from typing import Any, ClassVar, TypeAlias +from typing import Any, TypeAlias -from pydantic import Extra, Field +from pydantic import ConfigDict, Field from pydantic.main import BaseModel from ..api_schemas_catalog import services as api_schemas_catalog_services from ..services_io import ServiceInput, ServiceOutput from ..services_types import ServicePortKey from ..utils.change_case import snake_to_camel -from ..utils.json_serialization import json_dumps, json_loads from ._base import InputSchema, OutputSchema ServiceInputKey: TypeAlias = ServicePortKey @@ -24,12 +23,9 @@ class _BaseCommonApiExtension(BaseModel): description="Short name for the unit for display (html-compatible), if available", ) - class Config: - alias_generator = snake_to_camel - allow_population_by_field_name = True - extra = Extra.forbid - json_dumps = json_dumps - json_loads = json_loads + model_config = ConfigDict( + alias_generator=snake_to_camel, populate_by_name=True, extra="forbid" + ) class ServiceInputGet(ServiceInput, _BaseCommonApiExtension): @@ -39,8 +35,8 @@ class ServiceInputGet(ServiceInput, _BaseCommonApiExtension): ..., description="Unique name identifier for this input" ) - class Config(_BaseCommonApiExtension.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "displayOrder": 2, "label": "Sleep Time", @@ -70,6 +66,7 @@ class Config(_BaseCommonApiExtension.Config): } ], } + ) class ServiceOutputGet(ServiceOutput, _BaseCommonApiExtension): @@ -79,8 +76,8 @@ class ServiceOutputGet(ServiceOutput, _BaseCommonApiExtension): ..., description="Unique name identifier for this input" ) - class Config(_BaseCommonApiExtension.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "displayOrder": 2, "label": "Time Slept", @@ -92,6 +89,7 @@ class Config(_BaseCommonApiExtension.Config): "keyId": "output_2", } } + ) ServiceInputsGetDict: TypeAlias = dict[ServicePortKey, ServiceInputGet] @@ -99,7 +97,7 @@ class Config(_BaseCommonApiExtension.Config): _EXAMPLE_FILEPICKER: dict[str, Any] = { - **api_schemas_catalog_services.ServiceGet.Config.schema_extra["examples"][1], + **api_schemas_catalog_services.ServiceGet.model_config["json_schema_extra"]["examples"][1], # type: ignore [index,dict-item] "inputs": {}, "outputs": { "outFile": { @@ -114,7 +112,7 @@ class Config(_BaseCommonApiExtension.Config): } _EXAMPLE_SLEEPER: dict[str, Any] = { - **api_schemas_catalog_services.ServiceGet.Config.schema_extra["examples"][0], + **api_schemas_catalog_services.ServiceGet.model_config["json_schema_extra"]["examples"][0], # type: ignore[index,dict-item] "inputs": { "input_1": { "displayOrder": 1, @@ -224,15 +222,14 @@ class ServiceGet(api_schemas_catalog_services.ServiceGet): ..., description="outputs with extended information" ) - class Config(OutputSchema.Config): - schema_extra: ClassVar[dict[str, Any]] = { - "examples": [_EXAMPLE_FILEPICKER, _EXAMPLE_SLEEPER] - } + model_config = ConfigDict( + **OutputSchema.model_config, + json_schema_extra={"examples": [_EXAMPLE_FILEPICKER, _EXAMPLE_SLEEPER]}, + ) class ServiceResourcesGet(api_schemas_catalog_services.ServiceResourcesGet): - class Config(OutputSchema.Config): - ... + model_config = OutputSchema.model_config class CatalogServiceGet(api_schemas_catalog_services.ServiceGetV2): @@ -246,23 +243,26 @@ class CatalogServiceGet(api_schemas_catalog_services.ServiceGetV2): ..., description="outputs with extended information" ) - class Config(OutputSchema.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + **OutputSchema.model_config, + json_schema_extra={ "example": { - **api_schemas_catalog_services.ServiceGetV2.Config.schema_extra[ - "examples" - ][0], + **api_schemas_catalog_services.ServiceGetV2.model_config["json_schema_extra"]["examples"][0], # type: ignore [index,dict-item] "inputs": { f"input{i}": example for i, example in enumerate( - ServiceInputGet.Config.schema_extra["examples"] + ServiceInputGet.model_config["json_schema_extra"]["examples"] # type: ignore[index,arg-type] ) }, - "outputs": {"outFile": ServiceOutputGet.Config.schema_extra["example"]}, + "outputs": { + "outFile": ServiceOutputGet.model_config["json_schema_extra"][ + "example" + ] # type: ignore[index] + }, } - } + }, + ) class CatalogServiceUpdate(api_schemas_catalog_services.ServiceUpdateV2): - class Config(InputSchema.Config): - ... + model_config = InputSchema.model_config diff --git a/packages/models-library/src/models_library/api_schemas_webserver/clusters.py b/packages/models-library/src/models_library/api_schemas_webserver/clusters.py index b1b897462389..109e0618b98d 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/clusters.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/clusters.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel, Extra +from pydantic import BaseModel, ConfigDict from ..api_schemas_directorv2 import clusters as directorv2_clusters from ..clusters import ClusterID @@ -7,32 +7,27 @@ class ClusterPathParams(BaseModel): cluster_id: ClusterID - - class Config: - allow_population_by_field_name = True - extra = Extra.forbid + model_config = ConfigDict( + populate_by_name=True, + extra="forbid", + ) class ClusterGet(directorv2_clusters.ClusterGet): - class Config(OutputSchema.Config): - ... + model_config = OutputSchema.model_config class ClusterCreate(directorv2_clusters.ClusterCreate): - class Config(InputSchema.Config): - ... + model_config = InputSchema.model_config class ClusterPatch(directorv2_clusters.ClusterPatch): - class Config(InputSchema.Config): - ... + model_config = InputSchema.model_config class ClusterPing(directorv2_clusters.ClusterPing): - class Config(InputSchema.Config): - ... + model_config = InputSchema.model_config class ClusterDetails(directorv2_clusters.ClusterDetails): - class Config(OutputSchema.Config): - ... + model_config = OutputSchema.model_config diff --git a/packages/models-library/src/models_library/api_schemas_webserver/folders.py b/packages/models-library/src/models_library/api_schemas_webserver/folders.py index e971b1f8c73c..48a2ae605e41 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/folders.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/folders.py @@ -6,7 +6,7 @@ from models_library.projects_access import AccessRights from models_library.users import GroupID from models_library.utils.common_validators import null_or_none_str_to_none_validator -from pydantic import Extra, PositiveInt, validator +from pydantic import ConfigDict, PositiveInt, field_validator from ._base import InputSchema, OutputSchema @@ -33,11 +33,10 @@ class CreateFolderBodyParams(InputSchema): description: str parent_folder_id: FolderID | None = None - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") - _null_or_none_str_to_none_validator = validator( - "parent_folder_id", allow_reuse=True, pre=True + _null_or_none_str_to_none_validator = field_validator( + "parent_folder_id", mode="before" )(null_or_none_str_to_none_validator) @@ -45,5 +44,4 @@ class PutFolderBodyParams(InputSchema): name: IDStr description: str - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") diff --git a/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py b/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py index e2d4918c4355..29fed6baced1 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py @@ -7,7 +7,7 @@ from models_library.users import GroupID from models_library.utils.common_validators import null_or_none_str_to_none_validator from models_library.workspaces import WorkspaceID -from pydantic import Extra, PositiveInt, validator +from pydantic import ConfigDict, PositiveInt, field_validator from ._base import InputSchema, OutputSchema @@ -32,26 +32,22 @@ class CreateFolderBodyParams(InputSchema): name: IDStr parent_folder_id: FolderID | None = None workspace_id: WorkspaceID | None = None + model_config = ConfigDict(extra="forbid") - class Config: - extra = Extra.forbid - - _null_or_none_str_to_none_validator = validator( - "parent_folder_id", allow_reuse=True, pre=True + _null_or_none_str_to_none_validator = field_validator( + "parent_folder_id", mode="before" )(null_or_none_str_to_none_validator) - _null_or_none_str_to_none_validator2 = validator( - "workspace_id", allow_reuse=True, pre=True + _null_or_none_str_to_none_validator2 = field_validator( + "workspace_id", mode="before" )(null_or_none_str_to_none_validator) class PutFolderBodyParams(InputSchema): name: IDStr parent_folder_id: FolderID | None + model_config = ConfigDict(extra="forbid") - class Config: - extra = Extra.forbid - - _null_or_none_str_to_none_validator = validator( - "parent_folder_id", allow_reuse=True, pre=True + _null_or_none_str_to_none_validator = field_validator( + "parent_folder_id", mode="before" )(null_or_none_str_to_none_validator) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/groups.py b/packages/models-library/src/models_library/api_schemas_webserver/groups.py index e0b6d3fbb37c..46e9da3dc525 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/groups.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/groups.py @@ -1,7 +1,14 @@ from contextlib import suppress -from typing import Any, ClassVar -from pydantic import AnyUrl, BaseModel, Field, ValidationError, parse_obj_as, validator +from pydantic import ( + AnyUrl, + BaseModel, + ConfigDict, + Field, + TypeAdapter, + ValidationError, + field_validator, +) from ..emails import LowerCaseEmailStr @@ -18,15 +25,15 @@ class GroupAccessRights(BaseModel): read: bool write: bool delete: bool - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ {"read": True, "write": False, "delete": False}, {"read": True, "write": True, "delete": False}, {"read": True, "write": True, "delete": True}, ] } + ) class UsersGroup(BaseModel): @@ -43,17 +50,8 @@ class UsersGroup(BaseModel): alias="inclusionRules", ) - @validator("thumbnail", pre=True) - @classmethod - def sanitize_legacy_data(cls, v): - if v: - # Enforces null if thumbnail is not valid URL or empty - with suppress(ValidationError): - return parse_obj_as(AnyUrl, v) - return None - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "gid": "27", @@ -84,6 +82,16 @@ class Config: }, ] } + ) + + @field_validator("thumbnail", mode="before") + @classmethod + def _sanitize_legacy_data(cls, v): + if v: + # Enforces null if thumbnail is not valid URL or empty + with suppress(ValidationError): + return TypeAdapter(AnyUrl).validate_python(v) + return None class AllUsersGroups(BaseModel): @@ -92,8 +100,8 @@ class AllUsersGroups(BaseModel): all: UsersGroup | None = None product: UsersGroup | None = None - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "me": { "gid": "27", @@ -131,6 +139,7 @@ class Config: }, } } + ) class GroupUserGet(BaseModel): @@ -142,8 +151,8 @@ class GroupUserGet(BaseModel): gid: str | None = Field(None, description="the user primary gid") access_rights: GroupAccessRights = Field(..., alias="accessRights") - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "id": "1", "login": "mr.smith@matrix.com", @@ -158,3 +167,4 @@ class Config: }, } } + ) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/product.py b/packages/models-library/src/models_library/api_schemas_webserver/product.py index da0db6032022..f967e15d548e 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/product.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/product.py @@ -1,7 +1,7 @@ from datetime import datetime -from typing import Any, ClassVar +from typing import Annotated, TypeAlias -from pydantic import ConstrainedInt, Field, HttpUrl, NonNegativeInt, PositiveInt +from pydantic import ConfigDict, Field, HttpUrl, NonNegativeInt, PositiveInt from ..basic_types import IDStr, NonNegativeDecimal from ..emails import LowerCaseEmailStr @@ -22,8 +22,8 @@ class GetCreditPrice(OutputSchema): "Can be None if this product's price is UNDEFINED", ) - class Config(OutputSchema.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "productName": "osparc", @@ -37,6 +37,7 @@ class Config(OutputSchema.Config): }, ] } + ) class GetProductTemplate(OutputSchema): @@ -75,9 +76,7 @@ class GetProduct(OutputSchema): ) -class ExtraCreditsUsdRangeInt(ConstrainedInt): - ge = 0 - lt = 500 +ExtraCreditsUsdRangeInt: TypeAlias = Annotated[int, Field(ge=0, lt=500)] class GenerateInvitation(InputSchema): @@ -95,8 +94,8 @@ class InvitationGenerated(OutputSchema): created: datetime invitation_link: HttpUrl - class Config(OutputSchema.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "productName": "osparc", @@ -117,3 +116,4 @@ class Config(OutputSchema.Config): }, ] } + ) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects.py b/packages/models-library/src/models_library/api_schemas_webserver/projects.py index 2d8cd69ab93a..d02f37b0aac6 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/projects.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/projects.py @@ -9,14 +9,10 @@ from models_library.folders import FolderID from models_library.workspaces import WorkspaceID -from pydantic import Field, validator +from pydantic import Field, HttpUrl, field_validator from ..api_schemas_long_running_tasks.tasks import TaskGet -from ..basic_types import ( - HttpUrlWithCustomMinLength, - LongTruncatedStr, - ShortTruncatedStr, -) +from ..basic_types import LongTruncatedStr, ShortTruncatedStr from ..emails import LowerCaseEmailStr from ..projects import ClassifierID, DateTimeStr, NodesDict, ProjectID from ..projects_access import AccessRights, GroupIDStr @@ -36,7 +32,7 @@ class ProjectCreateNew(InputSchema): uuid: ProjectID | None = None # NOTE: suggested uuid! but could be different! name: str description: str | None - thumbnail: HttpUrlWithCustomMinLength | None + thumbnail: HttpUrl | None workbench: NodesDict access_rights: dict[GroupIDStr, AccessRights] tags: list[int] = Field(default_factory=list) @@ -45,23 +41,23 @@ class ProjectCreateNew(InputSchema): workspace_id: WorkspaceID | None = None folder_id: FolderID | None = None - _empty_is_none = validator( - "uuid", "thumbnail", "description", allow_reuse=True, pre=True - )(empty_str_to_none_pre_validator) + _empty_is_none = field_validator("uuid", "thumbnail", "description", mode="before")( + empty_str_to_none_pre_validator + ) - _null_or_none_to_none = validator( - "workspace_id", "folder_id", allow_reuse=True, pre=True - )(null_or_none_str_to_none_validator) + _null_or_none_to_none = field_validator("workspace_id", "folder_id", mode="before")( + null_or_none_str_to_none_validator + ) # NOTE: based on OVERRIDABLE_DOCUMENT_KEYS class ProjectCopyOverride(InputSchema): name: str description: str | None - thumbnail: HttpUrlWithCustomMinLength | None + thumbnail: HttpUrl | None prj_owner: LowerCaseEmailStr - _empty_is_none = validator("thumbnail", allow_reuse=True, pre=True)( + _empty_is_none = field_validator("thumbnail", mode="before")( empty_str_to_none_pre_validator ) @@ -70,7 +66,7 @@ class ProjectGet(OutputSchema): uuid: ProjectID name: str description: str - thumbnail: HttpUrlWithCustomMinLength | Literal[""] + thumbnail: HttpUrl | Literal[""] creation_date: DateTimeStr last_change_date: DateTimeStr workbench: NodesDict @@ -78,15 +74,15 @@ class ProjectGet(OutputSchema): access_rights: dict[GroupIDStr, AccessRights] tags: list[int] classifiers: list[ClassifierID] = [] - state: ProjectState | None - ui: EmptyModel | StudyUI | None + state: ProjectState | None = None + ui: EmptyModel | StudyUI | None = None quality: dict[str, Any] = {} - dev: dict | None + dev: dict | None = None permalink: ProjectPermalink = FieldNotRequired() workspace_id: WorkspaceID | None folder_id: FolderID | None - _empty_description = validator("description", allow_reuse=True, pre=True)( + _empty_description = field_validator("description", mode="before")( none_to_empty_str_pre_validator ) @@ -102,7 +98,7 @@ class ProjectReplace(InputSchema): uuid: ProjectID name: ShortTruncatedStr description: LongTruncatedStr - thumbnail: HttpUrlWithCustomMinLength | None + thumbnail: HttpUrl | None creation_date: DateTimeStr last_change_date: DateTimeStr workbench: NodesDict @@ -116,7 +112,7 @@ class ProjectReplace(InputSchema): default_factory=dict, ) - _empty_is_none = validator("thumbnail", allow_reuse=True, pre=True)( + _empty_is_none = field_validator("thumbnail", mode="before")( empty_str_to_none_pre_validator ) @@ -124,7 +120,7 @@ class ProjectReplace(InputSchema): class ProjectUpdate(InputSchema): name: ShortTruncatedStr = FieldNotRequired() description: LongTruncatedStr = FieldNotRequired() - thumbnail: HttpUrlWithCustomMinLength = FieldNotRequired() + thumbnail: HttpUrl = FieldNotRequired() workbench: NodesDict = FieldNotRequired() access_rights: dict[GroupIDStr, AccessRights] = FieldNotRequired() tags: list[int] = FieldNotRequired() @@ -136,7 +132,7 @@ class ProjectUpdate(InputSchema): class ProjectPatch(InputSchema): name: ShortTruncatedStr = FieldNotRequired() description: LongTruncatedStr = FieldNotRequired() - thumbnail: HttpUrlWithCustomMinLength = FieldNotRequired() + thumbnail: HttpUrl = FieldNotRequired() access_rights: dict[GroupIDStr, AccessRights] = FieldNotRequired() classifiers: list[ClassifierID] = FieldNotRequired() dev: dict | None = FieldNotRequired() diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py index 0c2bdd07c7fd..0fddd5267b37 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py @@ -1,7 +1,7 @@ # mypy: disable-error-code=truthy-function -from typing import Any, ClassVar, Literal, TypeAlias +from typing import Any, Literal, TypeAlias -from pydantic import Field +from pydantic import ConfigDict, Field from ..api_schemas_directorv2.dynamic_services import RetrieveDataOut from ..basic_types import PortInt @@ -62,13 +62,13 @@ class NodeGet(OutputSchema): service_key: ServiceKey = Field( ..., description="distinctive name for the node based on the docker registry path", - example=[ + examples=[ "simcore/services/comp/itis/sleeper", "simcore/services/dynamic/3dviewer", ], ) service_version: ServiceVersion = Field( - ..., description="semantic version number", example=["1.0.0", "0.0.1"] + ..., description="semantic version number", examples=["1.0.0", "0.0.1"] ) service_host: str = Field( ..., @@ -90,9 +90,8 @@ class NodeGet(OutputSchema): description="the service message", ) user_id: str = Field(..., description="the user that started the service") - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ # computational { @@ -106,7 +105,7 @@ class Config: "service_basepath": "/x/E1O2E-LAH", "service_state": "pending", "service_message": "no suitable node (insufficient resources on 1 node)", - "user_id": 123, + "user_id": "123", }, # dynamic { @@ -120,10 +119,11 @@ class Config: "service_basepath": "/x/E1O2E-LAH", "service_state": "pending", "service_message": "no suitable node (insufficient resources on 1 node)", - "user_id": 123, + "user_id": "123", }, ] } + ) class NodeGetIdle(OutputSchema): @@ -134,30 +134,32 @@ class NodeGetIdle(OutputSchema): def from_node_id(cls, node_id: NodeID) -> "NodeGetIdle": return cls(service_state="idle", service_uuid=node_id) - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "service_uuid": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "service_state": "idle", } } + ) class NodeGetUnknown(OutputSchema): service_state: Literal["unknown"] service_uuid: NodeID - @classmethod - def from_node_id(cls, node_id: NodeID) -> "NodeGetUnknown": - return cls(service_state="unknown", service_uuid=node_id) - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "service_uuid": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "service_state": "unknown", } } + ) + + @classmethod + def from_node_id(cls, node_id: NodeID) -> "NodeGetUnknown": + return cls(service_state="unknown", service_uuid=node_id) class NodeOutputs(InputSchemaWithoutCamelCase): @@ -169,5 +171,4 @@ class NodeRetrieve(InputSchemaWithoutCamelCase): class NodeRetrieved(RetrieveDataOut): - class Config(OutputSchema.Config): - ... + model_config = OutputSchema.model_config diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects_ports.py b/packages/models-library/src/models_library/api_schemas_webserver/projects_ports.py index df38c862900c..6582542525b7 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/projects_ports.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/projects_ports.py @@ -15,19 +15,16 @@ class _ProjectIOBase(BaseModel): class ProjectInputUpdate(_ProjectIOBase): - class Config(InputSchemaWithoutCamelCase): - ... + model_config = InputSchemaWithoutCamelCase.model_config class ProjectInputGet(OutputSchema, _ProjectIOBase): label: str - class Config(InputSchemaWithoutCamelCase): - ... + model_config = InputSchemaWithoutCamelCase.model_config class ProjectOutputGet(_ProjectIOBase): label: str - class Config(OutputSchema): - ... + model_config = OutputSchema.model_config diff --git a/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py b/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py index fa150f9ffc6b..8242105f55a9 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py @@ -1,7 +1,7 @@ from datetime import datetime from decimal import Decimal -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict from ..projects import ProjectID from ..projects_nodes_io import NodeID @@ -95,9 +95,10 @@ class CreatePricingPlanBodyParams(InputSchema): classification: PricingPlanClassification pricing_plan_key: str - class Config: - anystr_strip_whitespace = True - max_anystr_length = 200 + model_config = ConfigDict( + str_strip_whitespace=True, + str_max_length=200, + ) class UpdatePricingPlanBodyParams(InputSchema): @@ -105,9 +106,10 @@ class UpdatePricingPlanBodyParams(InputSchema): description: str is_active: bool - class Config: - anystr_strip_whitespace = True - max_anystr_length = 200 + model_config = ConfigDict( + str_strip_whitespace=True, + str_max_length=200, + ) class CreatePricingUnitBodyParams(InputSchema): @@ -118,9 +120,10 @@ class CreatePricingUnitBodyParams(InputSchema): cost_per_unit: Decimal comment: str - class Config: - anystr_strip_whitespace = True - max_anystr_length = 200 + model_config = ConfigDict( + str_strip_whitespace=True, + str_max_length=200, + ) class UpdatePricingUnitBodyParams(InputSchema): @@ -130,15 +133,17 @@ class UpdatePricingUnitBodyParams(InputSchema): specific_info: SpecificInfo pricing_unit_cost_update: PricingUnitCostUpdate | None - class Config: - anystr_strip_whitespace = True - max_anystr_length = 200 + model_config = ConfigDict( + str_strip_whitespace=True, + str_max_length=200, + ) class ConnectServiceToPricingPlanBodyParams(InputSchema): service_key: ServiceKey service_version: ServiceVersion - class Config: - anystr_strip_whitespace = True - max_anystr_length = 200 + model_config = ConfigDict( + str_strip_whitespace=True, + str_max_length=200, + ) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/wallets.py b/packages/models-library/src/models_library/api_schemas_webserver/wallets.py index af0aa61ac809..f9ebbd9fb2d2 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/wallets.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/wallets.py @@ -1,8 +1,8 @@ from datetime import datetime from decimal import Decimal -from typing import Any, ClassVar, Literal, TypeAlias +from typing import Literal, TypeAlias -from pydantic import Field, HttpUrl, validator +from pydantic import ConfigDict, Field, HttpUrl, field_validator from ..basic_types import AmountDecimal, IDStr, NonNegativeDecimal from ..users import GroupID @@ -91,8 +91,8 @@ class PaymentMethodInitiated(OutputSchema): ..., description="Link to external site that holds the payment submission form" ) - class Config(OutputSchema.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "wallet_id": 1, @@ -101,6 +101,7 @@ class Config(OutputSchema.Config): } ] } + ) class PaymentMethodTransaction(OutputSchema): @@ -109,8 +110,8 @@ class PaymentMethodTransaction(OutputSchema): payment_method_id: PaymentMethodID state: Literal["PENDING", "SUCCESS", "FAILED", "CANCELED"] - class Config(OutputSchema.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "walletId": 1, @@ -119,6 +120,7 @@ class Config(OutputSchema.Config): } ] } + ) class PaymentMethodGet(OutputSchema): @@ -135,8 +137,8 @@ class PaymentMethodGet(OutputSchema): description="If true, this payment-method is used for auto-recharge", ) - class Config(OutputSchema.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "idr": "pm_1234567890", @@ -157,6 +159,7 @@ class Config(OutputSchema.Config): }, ], } + ) # @@ -194,7 +197,7 @@ class ReplaceWalletAutoRecharge(InputSchema): top_up_amount_in_usd: NonNegativeDecimal monthly_limit_in_usd: NonNegativeDecimal | None - @validator("monthly_limit_in_usd") + @field_validator("monthly_limit_in_usd") @classmethod def _monthly_limit_greater_than_top_up(cls, v, values): top_up = values["top_up_amount_in_usd"] diff --git a/packages/models-library/src/models_library/api_schemas_webserver/workspaces.py b/packages/models-library/src/models_library/api_schemas_webserver/workspaces.py index 0ba98ab4ec31..32f17200ee4c 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/workspaces.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/workspaces.py @@ -4,7 +4,7 @@ from models_library.basic_types import IDStr from models_library.users import GroupID from models_library.workspaces import WorkspaceID -from pydantic import Extra, PositiveInt +from pydantic import ConfigDict, PositiveInt from ..access_rights import AccessRights from ._base import InputSchema, OutputSchema @@ -31,8 +31,7 @@ class CreateWorkspaceBodyParams(InputSchema): description: str | None = None thumbnail: str | None = None - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class PutWorkspaceBodyParams(InputSchema): @@ -40,5 +39,4 @@ class PutWorkspaceBodyParams(InputSchema): description: str | None = None thumbnail: str | None = None - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") diff --git a/packages/models-library/src/models_library/basic_regex.py b/packages/models-library/src/models_library/basic_regex.py index eb202fa188b8..b65c0fd1fe19 100644 --- a/packages/models-library/src/models_library/basic_regex.py +++ b/packages/models-library/src/models_library/basic_regex.py @@ -46,10 +46,12 @@ # Storage basic file ID SIMCORE_S3_FILE_ID_RE = rf"^(api|({UUID_RE_BASE}))\/({UUID_RE_BASE})\/(.+)$" -SIMCORE_S3_DIRECTORY_ID_RE = rf"^({UUID_RE_BASE})\/({UUID_RE_BASE})\/(.+)/$" +SIMCORE_S3_DIRECTORY_ID_RE = rf"^({UUID_RE_BASE})\/({UUID_RE_BASE})\/(.+)\/$" # S3 - AWS bucket names [https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html] -S3_BUCKET_NAME_RE = r"(?!(^xn--|-s3alias$))^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$" +S3_BUCKET_NAME_RE = re.compile( + r"^(?!xn--)[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$(? str: + if cls.curtail_length and len(__input_value) > cls.curtail_length: + __input_value = __input_value[: cls.curtail_length] + return cls(__input_value) + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + return core_schema.no_info_after_validator_function( + cls._validate, + core_schema.str_schema( + pattern=cls.pattern, + min_length=cls.min_length, + max_length=cls.max_length, + strip_whitespace=cls.strip_whitespace, + ), + ) + + class IDStr(ConstrainedStr): strip_whitespace = True min_length = 1 @@ -185,5 +182,4 @@ class BuildTargetEnum(StrEnum): DEVELOPMENT = "development" -class KeyIDStr(ConstrainedStr): - regex = re.compile(PROPERTY_KEY_RE) +KeyIDStr = Annotated[str, StringConstraints(pattern=PROPERTY_KEY_RE)] diff --git a/packages/models-library/src/models_library/boot_options.py b/packages/models-library/src/models_library/boot_options.py index ec1aabd546b7..52756bf10971 100644 --- a/packages/models-library/src/models_library/boot_options.py +++ b/packages/models-library/src/models_library/boot_options.py @@ -1,6 +1,4 @@ -from typing import Any, ClassVar - -from pydantic import BaseModel, validator +from pydantic import BaseModel, ConfigDict, ValidationInfo, field_validator from typing_extensions import TypedDict from .basic_types import EnvVarKey @@ -17,17 +15,17 @@ class BootOption(BaseModel): default: str items: dict[str, BootChoice] - @validator("items") + @field_validator("items") @classmethod - def ensure_default_included(cls, v, values): - default = values["default"] + def ensure_default_included(cls, v, info: ValidationInfo): + default = info.data["default"] if default not in v: msg = f"Expected default={default} to be present a key of items={v}" raise ValueError(msg) return v - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "label": "Boot mode", @@ -61,6 +59,7 @@ class Config: }, ] } + ) BootOptions = dict[EnvVarKey, BootOption] diff --git a/packages/models-library/src/models_library/callbacks_mapping.py b/packages/models-library/src/models_library/callbacks_mapping.py index 9e4e88214cef..498766ed7502 100644 --- a/packages/models-library/src/models_library/callbacks_mapping.py +++ b/packages/models-library/src/models_library/callbacks_mapping.py @@ -1,7 +1,7 @@ from collections.abc import Sequence -from typing import Any, ClassVar, Final +from typing import Final -from pydantic import BaseModel, Extra, Field, NonNegativeFloat, validator +from pydantic import BaseModel, ConfigDict, Field, NonNegativeFloat, field_validator INACTIVITY_TIMEOUT_CAP: Final[NonNegativeFloat] = 5 TIMEOUT_MIN: Final[NonNegativeFloat] = 1 @@ -15,15 +15,15 @@ class UserServiceCommand(BaseModel): timeout: NonNegativeFloat = Field( ..., description="after this interval the command will be timed-out" ) - - class Config: - extra = Extra.forbid - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ {"service": "rt-web", "command": "ls", "timeout": 1}, {"service": "s4l-core", "command": ["ls", "-lah"], "timeout": 1}, ] - } + }, + ) class CallbacksMapping(BaseModel): @@ -47,24 +47,9 @@ class CallbacksMapping(BaseModel): ), ) - @validator("inactivity") - @classmethod - def ensure_inactivity_timeout_is_capped( - cls, v: UserServiceCommand - ) -> UserServiceCommand: - if v is not None and ( - v.timeout < TIMEOUT_MIN or v.timeout > INACTIVITY_TIMEOUT_CAP - ): - msg = ( - f"Constraint not respected for inactivity timeout={v.timeout}: " - f"interval=({TIMEOUT_MIN}, {INACTIVITY_TIMEOUT_CAP})" - ) - raise ValueError(msg) - return v - - class Config: - extra = Extra.forbid - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ { # empty validates @@ -73,21 +58,37 @@ class Config: "metrics": None, "before_shutdown": [], }, - {"metrics": UserServiceCommand.Config.schema_extra["examples"][0]}, + {"metrics": UserServiceCommand.model_config["json_schema_extra"]["examples"][0]}, # type: ignore [index] { - "metrics": UserServiceCommand.Config.schema_extra["examples"][0], + "metrics": UserServiceCommand.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] "before_shutdown": [ - UserServiceCommand.Config.schema_extra["examples"][0], - UserServiceCommand.Config.schema_extra["examples"][1], + UserServiceCommand.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] + UserServiceCommand.model_config["json_schema_extra"]["examples"][1], # type: ignore [index] ], }, { - "metrics": UserServiceCommand.Config.schema_extra["examples"][0], + "metrics": UserServiceCommand.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] "before_shutdown": [ - UserServiceCommand.Config.schema_extra["examples"][0], - UserServiceCommand.Config.schema_extra["examples"][1], + UserServiceCommand.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] + UserServiceCommand.model_config["json_schema_extra"]["examples"][1], # type: ignore [index] ], - "inactivity": UserServiceCommand.Config.schema_extra["examples"][0], + "inactivity": UserServiceCommand.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] }, ] - } + }, + ) + + @field_validator("inactivity") + @classmethod + def ensure_inactivity_timeout_is_capped( + cls, v: UserServiceCommand + ) -> UserServiceCommand: + if v is not None and ( + v.timeout < TIMEOUT_MIN or v.timeout > INACTIVITY_TIMEOUT_CAP + ): + msg = ( + f"Constraint not respected for inactivity timeout={v.timeout}: " + f"interval=({TIMEOUT_MIN}, {INACTIVITY_TIMEOUT_CAP})" + ) + raise ValueError(msg) + return v diff --git a/packages/models-library/src/models_library/clusters.py b/packages/models-library/src/models_library/clusters.py index 1856dc5c287d..c98ea29757ae 100644 --- a/packages/models-library/src/models_library/clusters.py +++ b/packages/models-library/src/models_library/clusters.py @@ -1,16 +1,16 @@ from enum import auto from pathlib import Path -from typing import Any, ClassVar, Final, Literal, TypeAlias +from typing import Final, Literal, TypeAlias from pydantic import ( AnyUrl, BaseModel, - Extra, + ConfigDict, Field, HttpUrl, SecretStr, - root_validator, - validator, + field_validator, + model_validator, ) from pydantic.types import NonNegativeInt @@ -32,8 +32,7 @@ class ClusterAccessRights(BaseModel): write: bool = Field(..., description="allows to modify the cluster") delete: bool = Field(..., description="allows to delete a cluster") - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") CLUSTER_ADMIN_RIGHTS = ClusterAccessRights(read=True, write=True, delete=True) @@ -45,9 +44,7 @@ class Config: class BaseAuthentication(BaseModel): type: str - class Config: - frozen = True - extra = Extra.forbid + model_config = ConfigDict(frozen=True, extra="forbid") class SimpleAuthentication(BaseAuthentication): @@ -55,8 +52,8 @@ class SimpleAuthentication(BaseAuthentication): username: str password: SecretStr - class Config(BaseAuthentication.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "type": "simple", @@ -65,39 +62,40 @@ class Config(BaseAuthentication.Config): }, ] } + ) class KerberosAuthentication(BaseAuthentication): type: Literal["kerberos"] = "kerberos" - # NOTE: the entries here still need to be defined - class Config(BaseAuthentication.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "type": "kerberos", }, ] } + ) class JupyterHubTokenAuthentication(BaseAuthentication): type: Literal["jupyterhub"] = "jupyterhub" api_token: str - class Config(BaseAuthentication.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ {"type": "jupyterhub", "api_token": "some_jupyterhub_token"}, ] } + ) class NoAuthentication(BaseAuthentication): type: Literal["none"] = "none" - class Config(BaseAuthentication.Config): - schema_extra: ClassVar[dict[str, Any]] = {"examples": [{"type": "none"}]} + model_config = ConfigDict(json_schema_extra={"examples": [{"type": "none"}]}) class TLSAuthentication(BaseAuthentication): @@ -106,8 +104,8 @@ class TLSAuthentication(BaseAuthentication): tls_client_cert: Path tls_client_key: Path - class Config(BaseAuthentication.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "type": "tls", @@ -117,6 +115,7 @@ class Config(BaseAuthentication.Config): }, ] } + ) InternalClusterAuthentication: TypeAlias = NoAuthentication | TLSAuthentication @@ -137,6 +136,7 @@ class BaseCluster(BaseModel): default=None, description="url to the image describing this cluster", examples=["https://placeimg.com/171/96/tech/grayscale/?0.jpg"], + validate_default=True, ) endpoint: AnyUrl authentication: ClusterAuthentication = Field( @@ -144,13 +144,11 @@ class BaseCluster(BaseModel): ) access_rights: dict[GroupID, ClusterAccessRights] = Field(default_factory=dict) - _from_equivalent_enums = validator("type", allow_reuse=True, pre=True)( + _from_equivalent_enums = field_validator("type", mode="before")( create_enums_pre_validator(ClusterTypeInModel) ) - class Config: - extra = Extra.forbid - use_enum_values = True + model_config = ConfigDict(extra="forbid", use_enum_values=True) ClusterID: TypeAlias = NonNegativeInt @@ -160,8 +158,8 @@ class Config: class Cluster(BaseCluster): id: ClusterID = Field(..., description="The cluster ID") - class Config(BaseCluster.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "id": DEFAULT_CLUSTER_ID, @@ -196,9 +194,9 @@ class Config(BaseCluster.Config): "endpoint": "https://registry.osparc-development.fake.dev", "authentication": {"type": "kerberos"}, "access_rights": { - 154: CLUSTER_ADMIN_RIGHTS, - 12: CLUSTER_MANAGER_RIGHTS, - 7899: CLUSTER_USER_RIGHTS, + 154: CLUSTER_ADMIN_RIGHTS, # type: ignore[dict-item] + 12: CLUSTER_MANAGER_RIGHTS, # type: ignore[dict-item] + 7899: CLUSTER_USER_RIGHTS, # type: ignore[dict-item] }, }, { @@ -213,15 +211,16 @@ class Config(BaseCluster.Config): "api_token": "some_fake_token", }, "access_rights": { - 154: CLUSTER_ADMIN_RIGHTS, - 12: CLUSTER_MANAGER_RIGHTS, - 7899: CLUSTER_USER_RIGHTS, + 154: CLUSTER_ADMIN_RIGHTS, # type: ignore[dict-item] + 12: CLUSTER_MANAGER_RIGHTS, # type: ignore[dict-item] + 7899: CLUSTER_USER_RIGHTS, # type: ignore[dict-item] }, }, ] } + ) - @root_validator(pre=True) + @model_validator(mode="before") @classmethod def check_owner_has_access_rights(cls, values): is_default_cluster = bool(values["id"] == DEFAULT_CLUSTER_ID) diff --git a/packages/models-library/src/models_library/docker.py b/packages/models-library/src/models_library/docker.py index 732dfc08197a..b16c4ae13ccc 100644 --- a/packages/models-library/src/models_library/docker.py +++ b/packages/models-library/src/models_library/docker.py @@ -1,18 +1,20 @@ import contextlib import re -from typing import Any, ClassVar, Final +from typing import Annotated, Any, Final, TypeAlias from pydantic import ( BaseModel, ByteSize, - ConstrainedStr, + ConfigDict, Field, + StringConstraints, + TypeAdapter, ValidationError, - parse_obj_as, - root_validator, + model_validator, ) from .basic_regex import DOCKER_GENERIC_TAG_KEY_RE, DOCKER_LABEL_KEY_REGEX +from .basic_types import ConstrainedStr from .generated_models.docker_rest_api import Task from .products import ProductName from .projects import ProjectID @@ -23,16 +25,17 @@ class DockerLabelKey(ConstrainedStr): # NOTE: https://docs.docker.com/config/labels-custom-metadata/#key-format-recommendations # good practice: use reverse DNS notation - regex: re.Pattern[str] | None = DOCKER_LABEL_KEY_REGEX + pattern = DOCKER_LABEL_KEY_REGEX @classmethod def from_key(cls, key: str) -> "DockerLabelKey": return cls(key.lower().replace("_", "-")) -class DockerGenericTag(ConstrainedStr): - # NOTE: https://docs.docker.com/engine/reference/commandline/tag/#description - regex: re.Pattern[str] | None = DOCKER_GENERIC_TAG_KEY_RE +# NOTE: https://docs.docker.com/engine/reference/commandline/tag/#description +DockerGenericTag: TypeAlias = Annotated[ + str, StringConstraints(pattern=DOCKER_GENERIC_TAG_KEY_RE) +] class DockerPlacementConstraint(ConstrainedStr): @@ -60,7 +63,7 @@ class DockerPlacementConstraint(ConstrainedStr): DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY: Final[ DockerLabelKey -] = parse_obj_as(DockerLabelKey, "ec2-instance-type") +] = TypeAdapter(DockerLabelKey).validate_python("ec2-instance-type") def to_simcore_runtime_docker_label_key(key: str) -> DockerLabelKey: @@ -99,7 +102,7 @@ class StandardSimcoreDockerLabels(BaseModel): ..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}cpu-limit" ) - @root_validator(pre=True) + @model_validator(mode="before") @classmethod def _backwards_compatibility(cls, values: dict[str, Any]) -> dict[str, Any]: # NOTE: this is necessary for dy-sidecar and legacy service until they are adjusted @@ -122,7 +125,7 @@ def _backwards_compatibility(cls, values: dict[str, Any]) -> dict[str, Any]: def _convert_nano_cpus_to_cpus(nano_cpu: str) -> str: with contextlib.suppress(ValidationError): - return f"{parse_obj_as(float, nano_cpu) / (1.0*10**9):.2f}" + return f"{TypeAdapter(float).validate_python(nano_cpu) / (1.0*10**9):.2f}" return _UNDEFINED_LABEL_VALUE_INT mapped_values.setdefault( @@ -138,19 +141,19 @@ def to_simcore_runtime_docker_labels(self) -> dict[DockerLabelKey, str]: """returns a dictionary of strings as required by docker""" return { to_simcore_runtime_docker_label_key(k): f"{v}" - for k, v in sorted(self.dict().items()) + for k, v in sorted(self.model_dump().items()) } @classmethod def from_docker_task(cls, docker_task: Task) -> "StandardSimcoreDockerLabels": - assert docker_task.Spec # nosec - assert docker_task.Spec.ContainerSpec # nosec - task_labels = docker_task.Spec.ContainerSpec.Labels or {} - return cls.parse_obj(task_labels) - - class Config: - allow_population_by_field_name = True - schema_extra: ClassVar[dict[str, Any]] = { + assert docker_task.spec # nosec + assert docker_task.spec.container_spec # nosec + task_labels = docker_task.spec.container_spec.labels or {} + return cls.model_validate(task_labels) + + model_config = ConfigDict( + populate_by_name=True, + json_schema_extra={ "examples": [ # legacy service labels { @@ -219,4 +222,5 @@ class Config: "io.simcore.runtime.user-id": "5", }, ] - } + }, + ) diff --git a/packages/models-library/src/models_library/emails.py b/packages/models-library/src/models_library/emails.py index 80996eed76fa..72835f4c754a 100644 --- a/packages/models-library/src/models_library/emails.py +++ b/packages/models-library/src/models_library/emails.py @@ -1,7 +1,5 @@ -from pydantic import EmailStr +from typing import Annotated, TypeAlias +from pydantic import AfterValidator, EmailStr -class LowerCaseEmailStr(EmailStr): - @classmethod - def validate(cls, value: str) -> str: - return super().validate(value).lower() +LowerCaseEmailStr: TypeAlias = Annotated[str, EmailStr, AfterValidator(str.lower)] diff --git a/packages/models-library/src/models_library/error_codes.py b/packages/models-library/src/models_library/error_codes.py index 2803e3627ab1..06cd14ac8bc5 100644 --- a/packages/models-library/src/models_library/error_codes.py +++ b/packages/models-library/src/models_library/error_codes.py @@ -9,10 +9,9 @@ import re -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Annotated -from pydantic.tools import parse_obj_as -from pydantic.types import constr +from pydantic import StringConstraints, TypeAdapter _LABEL = "OEC:{}" _PATTERN = r"OEC:\d+" @@ -20,11 +19,13 @@ if TYPE_CHECKING: ErrorCodeStr = str else: - ErrorCodeStr = constr(strip_whitespace=True, regex=_PATTERN) + ErrorCodeStr = Annotated[ + str, StringConstraints(strip_whitespace=True, pattern=_PATTERN) + ] def create_error_code(exception: BaseException) -> ErrorCodeStr: - return parse_obj_as(ErrorCodeStr, _LABEL.format(id(exception))) + return TypeAdapter(ErrorCodeStr).validate_python(_LABEL.format(id(exception))) def parse_error_code(obj) -> set[ErrorCodeStr]: diff --git a/packages/models-library/src/models_library/folders.py b/packages/models-library/src/models_library/folders.py index 73262e1e647c..829e3865c514 100644 --- a/packages/models-library/src/models_library/folders.py +++ b/packages/models-library/src/models_library/folders.py @@ -3,7 +3,7 @@ from models_library.users import GroupID, UserID from models_library.workspaces import WorkspaceID -from pydantic import BaseModel, Field, PositiveInt +from pydantic import BaseModel, ConfigDict, Field, PositiveInt FolderID: TypeAlias = PositiveInt @@ -32,5 +32,4 @@ class FolderDB(BaseModel): user_id: UserID | None workspace_id: WorkspaceID | None - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) diff --git a/packages/models-library/src/models_library/function_services_catalog/_settings.py b/packages/models-library/src/models_library/function_services_catalog/_settings.py index 3ca4260d8ceb..05812b81879a 100644 --- a/packages/models-library/src/models_library/function_services_catalog/_settings.py +++ b/packages/models-library/src/models_library/function_services_catalog/_settings.py @@ -1,7 +1,7 @@ import json import os -from pydantic import BaseSettings +from pydantic_settings import BaseSettings # Expects env var: FUNCTION_SERVICES_AUTHORS='{"OM":{"name": ...}, "EN":{...} }' try: diff --git a/packages/models-library/src/models_library/function_services_catalog/_utils.py b/packages/models-library/src/models_library/function_services_catalog/_utils.py index 4cd1275b5e0b..a58a524d0940 100644 --- a/packages/models-library/src/models_library/function_services_catalog/_utils.py +++ b/packages/models-library/src/models_library/function_services_catalog/_utils.py @@ -14,10 +14,10 @@ "email": "unknown@osparc.io", "affiliation": "unknown", } -EN = Author.parse_obj(AUTHORS.get("EN", _DEFAULT)) -OM = Author.parse_obj(AUTHORS.get("OM", _DEFAULT)) -PC = Author.parse_obj(AUTHORS.get("PC", _DEFAULT)) -WVG = Author.parse_obj(AUTHORS.get("WVG", _DEFAULT)) +EN = Author.model_validate(AUTHORS.get("EN", _DEFAULT)) +OM = Author.model_validate(AUTHORS.get("OM", _DEFAULT)) +PC = Author.model_validate(AUTHORS.get("PC", _DEFAULT)) +WVG = Author.model_validate(AUTHORS.get("WVG", _DEFAULT)) def create_fake_thumbnail_url(label: str) -> str: diff --git a/packages/models-library/src/models_library/function_services_catalog/services/demo_units.py b/packages/models-library/src/models_library/function_services_catalog/services/demo_units.py index 298ac02c82bd..44bd30e08991 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/demo_units.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/demo_units.py @@ -15,7 +15,7 @@ # If this assumption cannot be guaranteed anymore the test must be updated. # -META = ServiceMetaDataPublished.parse_obj( +META = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/data-iterator/demo-units", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/file_picker.py b/packages/models-library/src/models_library/function_services_catalog/services/file_picker.py index 0e0554842fb9..2245a8ba3ff8 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/file_picker.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/file_picker.py @@ -8,7 +8,7 @@ from .._key_labels import FUNCTION_SERVICE_KEY_PREFIX from .._utils import OM, FunctionServices -META: Final = ServiceMetaDataPublished.parse_obj( +META: Final = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/file-picker", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/iter_range.py b/packages/models-library/src/models_library/function_services_catalog/services/iter_range.py index 662cbf327cfe..d59e37735e8c 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/iter_range.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/iter_range.py @@ -12,7 +12,7 @@ def create_metadata( ) -> ServiceMetaDataPublished: prefix = prefix or type_name LABEL = f"{type_name.capitalize()} iterator" - return ServiceMetaDataPublished.parse_obj( + return ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/data-iterator/{prefix}-range", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/iter_sensitivity.py b/packages/models-library/src/models_library/function_services_catalog/services/iter_sensitivity.py index f01993898852..a2be976c6516 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/iter_sensitivity.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/iter_sensitivity.py @@ -13,7 +13,7 @@ LIST_NUMBERS_SCHEMA: dict[str, Any] = schema_of(list[float], title="list[number]") -META = ServiceMetaDataPublished.parse_obj( +META = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/data-iterator/sensitivity", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/nodes_group.py b/packages/models-library/src/models_library/function_services_catalog/services/nodes_group.py index bfde87e52c32..40adb28f3429 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/nodes_group.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/nodes_group.py @@ -7,7 +7,7 @@ # NOTE: DO not mistake with simcore/services/frontend/nodes-group/macros/ # which needs to be redefined. # -META = ServiceMetaDataPublished.parse_obj( +META = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/nodes-group", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/parameters.py b/packages/models-library/src/models_library/function_services_catalog/services/parameters.py index e0e25b6ee110..d62a4a88dfb6 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/parameters.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/parameters.py @@ -12,7 +12,7 @@ def _create_metadata(type_name: str) -> ServiceMetaDataPublished: This is a parametrized node (or param-node in short) """ - meta = ServiceMetaDataPublished.parse_obj( + meta = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/parameter/{type_name}", @@ -45,7 +45,7 @@ def _create_metadata(type_name: str) -> ServiceMetaDataPublished: META_BOOL: Final = _create_metadata(type_name="boolean") META_INT: Final = _create_metadata(type_name="integer") META_STR: Final = _create_metadata(type_name="string") -META_ARRAY: Final = ServiceMetaDataPublished.parse_obj( +META_ARRAY: Final = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/parameter/array", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/probes.py b/packages/models-library/src/models_library/function_services_catalog/services/probes.py index e736efb2fb14..4c710a90ade2 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/probes.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/probes.py @@ -7,7 +7,7 @@ def _create_metadata(type_name: str) -> ServiceMetaDataPublished: - obj: ServiceMetaDataPublished = ServiceMetaDataPublished.parse_obj( + obj: ServiceMetaDataPublished = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/iterator-consumer/probe/{type_name}", @@ -38,7 +38,7 @@ def _create_metadata(type_name: str) -> ServiceMetaDataPublished: META_BOOL: Final = _create_metadata("boolean") META_INT: Final = _create_metadata("integer") META_STR: Final = _create_metadata("string") -META_ARRAY: Final = ServiceMetaDataPublished.parse_obj( +META_ARRAY: Final = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/iterator-consumer/probe/array", @@ -67,7 +67,7 @@ def _create_metadata(type_name: str) -> ServiceMetaDataPublished: } ) -META_FILE: Final = ServiceMetaDataPublished.parse_obj( +META_FILE: Final = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/iterator-consumer/probe/file", diff --git a/packages/models-library/src/models_library/generated_models/docker_rest_api.py b/packages/models-library/src/models_library/generated_models/docker_rest_api.py index 835141ea037e..9083641e0207 100644 --- a/packages/models-library/src/models_library/generated_models/docker_rest_api.py +++ b/packages/models-library/src/models_library/generated_models/docker_rest_api.py @@ -1,18 +1,17 @@ # generated by datamodel-codegen: -# filename: https://docs.docker.com/engine/api/v1.41.yaml -# timestamp: 2022-11-28T14:56:37+00:00 +# filename: https://docs.docker.com/reference/api/engine/version/v1.41.yaml +# timestamp: 2024-10-15T11:03:37+00:00 from __future__ import annotations -from datetime import datetime from enum import Enum from typing import Any -from pydantic import BaseModel, Extra, Field +from pydantic import AwareDatetime, BaseModel, Field, RootModel -class Model(BaseModel): - __root__: Any +class Model(RootModel[Any]): + root: Any class Type(str, Enum): @@ -26,17 +25,23 @@ class Port(BaseModel): An open port on a container """ - IP: str | None = Field( - None, description="Host IP address that the container's port is mapped to" + ip: str | None = Field( + default=None, + alias="IP", + description="Host IP address that the container's port is mapped to", ) - PrivatePort: int = Field(..., description="Port on the container") - PublicPort: int | None = Field(None, description="Port exposed on the host") - Type: Type + private_port: int = Field( + ..., alias="PrivatePort", description="Port on the container" + ) + public_port: int | None = Field( + default=None, alias="PublicPort", description="Port exposed on the host" + ) + type: Type = Field(..., alias="Type") class Type1(str, Enum): """ - The mount type: + The mount type: - `bind` a mount of a file or directory from the host into the container. - `volume` a docker volume with the given `Name`. @@ -53,50 +58,58 @@ class Type1(str, Enum): class MountPoint(BaseModel): """ - MountPoint represents a mount point configuration inside the container. + MountPoint represents a mount point configuration inside the container. This is used for reporting the mountpoints in use by a container. """ - Type: Type1 | None = Field( - None, + type: Type1 | None = Field( + default=None, + alias="Type", description="The mount type:\n\n- `bind` a mount of a file or directory from the host into the container.\n- `volume` a docker volume with the given `Name`.\n- `tmpfs` a `tmpfs`.\n- `npipe` a named pipe from the host into the container.\n", - example="volume", + examples=["volume"], ) - Name: str | None = Field( - None, + name: str | None = Field( + default=None, + alias="Name", description="Name is the name reference to the underlying data defined by `Source`\ne.g., the volume name.\n", - example="myvolume", + examples=["myvolume"], ) - Source: str | None = Field( - None, + source: str | None = Field( + default=None, + alias="Source", description="Source location of the mount.\n\nFor volumes, this contains the storage location of the volume (within\n`/var/lib/docker/volumes/`). For bind-mounts, and `npipe`, this contains\nthe source (host) part of the bind-mount. For `tmpfs` mount points, this\nfield is empty.\n", - example="/var/lib/docker/volumes/myvolume/_data", + examples=["/var/lib/docker/volumes/myvolume/_data"], ) - Destination: str | None = Field( - None, + destination: str | None = Field( + default=None, + alias="Destination", description="Destination is the path relative to the container root (`/`) where\nthe `Source` is mounted inside the container.\n", - example="/usr/share/nginx/html/", + examples=["/usr/share/nginx/html/"], ) - Driver: str | None = Field( - None, + driver: str | None = Field( + default=None, + alias="Driver", description="Driver is the volume driver used to create the volume (if it is a volume).\n", - example="local", + examples=["local"], ) - Mode: str | None = Field( - None, + mode: str | None = Field( + default=None, + alias="Mode", description='Mode is a comma separated list of options supplied by the user when\ncreating the bind/volume mount.\n\nThe default is platform-specific (`"z"` on Linux, empty on Windows).\n', - example="z", + examples=["z"], ) - RW: bool | None = Field( - None, + rw: bool | None = Field( + default=None, + alias="RW", description="Whether the mount is mounted writable (read-write).\n", - example=True, + examples=[True], ) - Propagation: str | None = Field( - None, + propagation: str | None = Field( + default=None, + alias="Propagation", description="Propagation describes how mounts are propagated from the host into the\nmount point, and vice-versa. Refer to the [Linux kernel documentation](https://www.kernel.org/doc/Documentation/filesystems/sharedsubtree.txt)\nfor details. This field is not used on Windows.\n", - example="", + examples=[""], ) @@ -105,9 +118,9 @@ class DeviceMapping(BaseModel): A device mapping between the host and container """ - PathOnHost: str | None = None - PathInContainer: str | None = None - CgroupPermissions: str | None = None + path_on_host: str | None = Field(default=None, alias="PathOnHost") + path_in_container: str | None = Field(default=None, alias="PathInContainer") + cgroup_permissions: str | None = Field(default=None, alias="CgroupPermissions") class DeviceRequest(BaseModel): @@ -115,25 +128,46 @@ class DeviceRequest(BaseModel): A request for devices to be sent to device drivers """ - Driver: str | None = Field(None, example="nvidia") - Count: int | None = Field(None, example=-1) - DeviceIDs: list[str] | None = Field( - None, example=["0", "1", "GPU-fef8089b-4820-abfc-e83e-94318197576e"] + driver: str | None = Field(default=None, alias="Driver", examples=["nvidia"]) + count: int | None = Field(default=None, alias="Count", examples=[-1]) + device_i_ds: list[str] | None = Field( + default=None, + alias="DeviceIDs", + examples=[["0", "1", "GPU-fef8089b-4820-abfc-e83e-94318197576e"]], ) - Capabilities: list[list[str]] | None = Field( - None, + capabilities: list[list[str]] | None = Field( + default=None, + alias="Capabilities", description="A list of capabilities; an OR list of AND lists of capabilities.\n", - example=[["gpu", "nvidia", "compute"]], + examples=[[["gpu", "nvidia", "compute"]]], ) - Options: dict[str, str] | None = Field( - None, + options: dict[str, str] | None = Field( + default=None, + alias="Options", description="Driver-specific options, specified as a key/value pairs. These options\nare passed directly to the driver.\n", ) class ThrottleDevice(BaseModel): - Path: str | None = Field(None, description="Device path") - Rate: int | None = Field(None, description="Rate", ge=0) + path: str | None = Field(default=None, alias="Path", description="Device path") + rate: int | None = Field(default=None, alias="Rate", description="Rate", ge=0) + + +class Type2(str, Enum): + """ + The mount type. Available types: + + - `bind` Mounts a file or directory from the host into the container. Must exist prior to creating the container. + - `volume` Creates a volume with the given name and options (or uses a pre-existing volume with the same name and options). These are **not** removed when the container is removed. + - `tmpfs` Create a tmpfs with the given options. The mount source cannot be specified for tmpfs. + - `npipe` Mounts a named pipe from the host into the container. Must exist prior to creating the container. + + """ + + bind = "bind" + volume = "volume" + tmpfs = "tmpfs" + npipe = "npipe" class Propagation(str, Enum): @@ -154,12 +188,13 @@ class BindOptions(BaseModel): Optional configuration for the `bind` type. """ - Propagation: Propagation | None = Field( - None, + propagation: Propagation | None = Field( + default=None, + alias="Propagation", description="A propagation mode with the value `[r]private`, `[r]shared`, or `[r]slave`.", ) - NonRecursive: bool | None = Field( - False, description="Disable recursive bind mount." + non_recursive: bool | None = Field( + default=False, alias="NonRecursive", description="Disable recursive bind mount." ) @@ -168,11 +203,15 @@ class DriverConfig(BaseModel): Map of driver specific options """ - Name: str | None = Field( - None, description="Name of the driver to use to create the volume." + name: str | None = Field( + default=None, + alias="Name", + description="Name of the driver to use to create the volume.", ) - Options: dict[str, str] | None = Field( - None, description="key/value map of driver specific options." + options: dict[str, str] | None = Field( + default=None, + alias="Options", + description="key/value map of driver specific options.", ) @@ -181,14 +220,16 @@ class VolumeOptions(BaseModel): Optional configuration for the `volume` type. """ - NoCopy: bool | None = Field( - False, description="Populate volume with data from the target." + no_copy: bool | None = Field( + default=False, + alias="NoCopy", + description="Populate volume with data from the target.", ) - Labels: dict[str, str] | None = Field( - None, description="User-defined key/value metadata." + labels: dict[str, str] | None = Field( + default=None, alias="Labels", description="User-defined key/value metadata." ) - DriverConfig: DriverConfig | None = Field( - None, description="Map of driver specific options" + driver_config: DriverConfig | None = Field( + default=None, alias="DriverConfig", description="Map of driver specific options" ) @@ -197,44 +238,62 @@ class TmpfsOptions(BaseModel): Optional configuration for the `tmpfs` type. """ - SizeBytes: int | None = Field( - None, description="The size for the tmpfs mount in bytes." + size_bytes: int | None = Field( + default=None, + alias="SizeBytes", + description="The size for the tmpfs mount in bytes.", ) - Mode: int | None = Field( - None, description="The permission mode for the tmpfs mount in an integer." + mode: int | None = Field( + default=None, + alias="Mode", + description="The permission mode for the tmpfs mount in an integer.", ) class Mount(BaseModel): - Target: str | None = Field(None, description="Container path.") - Source: str | None = Field( - None, description="Mount source (e.g. a volume name, a host path)." + target: str | None = Field( + default=None, alias="Target", description="Container path." ) - Type: Type1 | None = Field( - None, + source: str | None = Field( + default=None, + alias="Source", + description="Mount source (e.g. a volume name, a host path).", + ) + type: Type2 | None = Field( + default=None, + alias="Type", description="The mount type. Available types:\n\n- `bind` Mounts a file or directory from the host into the container. Must exist prior to creating the container.\n- `volume` Creates a volume with the given name and options (or uses a pre-existing volume with the same name and options). These are **not** removed when the container is removed.\n- `tmpfs` Create a tmpfs with the given options. The mount source cannot be specified for tmpfs.\n- `npipe` Mounts a named pipe from the host into the container. Must exist prior to creating the container.\n", ) - ReadOnly: bool | None = Field( - None, description="Whether the mount should be read-only." + read_only: bool | None = Field( + default=None, + alias="ReadOnly", + description="Whether the mount should be read-only.", ) - Consistency: str | None = Field( - None, + consistency: str | None = Field( + default=None, + alias="Consistency", description="The consistency requirement for the mount: `default`, `consistent`, `cached`, or `delegated`.", ) - BindOptions: BindOptions | None = Field( - None, description="Optional configuration for the `bind` type." + bind_options: BindOptions | None = Field( + default=None, + alias="BindOptions", + description="Optional configuration for the `bind` type.", ) - VolumeOptions: VolumeOptions | None = Field( - None, description="Optional configuration for the `volume` type." + volume_options: VolumeOptions | None = Field( + default=None, + alias="VolumeOptions", + description="Optional configuration for the `volume` type.", ) - TmpfsOptions: TmpfsOptions | None = Field( - None, description="Optional configuration for the `tmpfs` type." + tmpfs_options: TmpfsOptions | None = Field( + default=None, + alias="TmpfsOptions", + description="Optional configuration for the `tmpfs` type.", ) class Name(str, Enum): """ - - Empty string means not to restart + - Empty string means not to restart - `no` Do not automatically restart - `always` Always restart - `unless-stopped` Restart always except when the user has manually stopped the container @@ -242,7 +301,7 @@ class Name(str, Enum): """ - _ = "" + field_ = "" no = "no" always = "always" unless_stopped = "unless-stopped" @@ -251,7 +310,7 @@ class Name(str, Enum): class RestartPolicy(BaseModel): """ - The behavior to apply when the container exits. The default is not to + The behavior to apply when the container exits. The default is not to restart. An ever increasing delay (double the previous delay, starting at 100ms) is @@ -259,25 +318,27 @@ class RestartPolicy(BaseModel): """ - Name: Name | None = Field( - None, + name: Name | None = Field( + default=None, + alias="Name", description="- Empty string means not to restart\n- `no` Do not automatically restart\n- `always` Always restart\n- `unless-stopped` Restart always except when the user has manually stopped the container\n- `on-failure` Restart only when the container exit code is non-zero\n", ) - MaximumRetryCount: int | None = Field( - None, + maximum_retry_count: int | None = Field( + default=None, + alias="MaximumRetryCount", description="If `on-failure` is used, the number of times to retry before giving up.\n", ) class BlkioWeightDeviceItem(BaseModel): - Path: str | None = None - Weight: int | None = Field(None, ge=0) + path: str | None = Field(default=None, alias="Path") + weight: int | None = Field(default=None, alias="Weight", ge=0) class Ulimit(BaseModel): - Name: str | None = Field(None, description="Name of ulimit") - Soft: int | None = Field(None, description="Soft limit") - Hard: int | None = Field(None, description="Hard limit") + name: str | None = Field(default=None, alias="Name", description="Name of ulimit") + soft: int | None = Field(default=None, alias="Soft", description="Soft limit") + hard: int | None = Field(default=None, alias="Hard", description="Hard limit") class Resources(BaseModel): @@ -285,124 +346,168 @@ class Resources(BaseModel): A container's resources (cgroups config, ulimits, etc) """ - CpuShares: int | None = Field( - None, + cpu_shares: int | None = Field( + default=None, + alias="CpuShares", description="An integer value representing this container's relative CPU weight\nversus other containers.\n", ) - Memory: int | None = Field(0, description="Memory limit in bytes.") - CgroupParent: str | None = Field( - None, + memory: int | None = Field( + default=0, alias="Memory", description="Memory limit in bytes." + ) + cgroup_parent: str | None = Field( + default=None, + alias="CgroupParent", description="Path to `cgroups` under which the container's `cgroup` is created. If\nthe path is not absolute, the path is considered to be relative to the\n`cgroups` path of the init process. Cgroups are created if they do not\nalready exist.\n", ) - BlkioWeight: int | None = Field( - None, description="Block IO weight (relative weight).", ge=0, le=1000 + blkio_weight: int | None = Field( + default=None, + alias="BlkioWeight", + description="Block IO weight (relative weight).", + ge=0, + le=1000, ) - BlkioWeightDevice: list[BlkioWeightDeviceItem] | None = Field( - None, + blkio_weight_device: list[BlkioWeightDeviceItem] | None = Field( + default=None, + alias="BlkioWeightDevice", description='Block IO weight (relative device weight) in the form:\n\n```\n[{"Path": "device_path", "Weight": weight}]\n```\n', ) - BlkioDeviceReadBps: list[ThrottleDevice] | None = Field( - None, + blkio_device_read_bps: list[ThrottleDevice] | None = Field( + default=None, + alias="BlkioDeviceReadBps", description='Limit read rate (bytes per second) from a device, in the form:\n\n```\n[{"Path": "device_path", "Rate": rate}]\n```\n', ) - BlkioDeviceWriteBps: list[ThrottleDevice] | None = Field( - None, + blkio_device_write_bps: list[ThrottleDevice] | None = Field( + default=None, + alias="BlkioDeviceWriteBps", description='Limit write rate (bytes per second) to a device, in the form:\n\n```\n[{"Path": "device_path", "Rate": rate}]\n```\n', ) - BlkioDeviceReadIOps: list[ThrottleDevice] | None = Field( - None, + blkio_device_read_i_ops: list[ThrottleDevice] | None = Field( + default=None, + alias="BlkioDeviceReadIOps", description='Limit read rate (IO per second) from a device, in the form:\n\n```\n[{"Path": "device_path", "Rate": rate}]\n```\n', ) - BlkioDeviceWriteIOps: list[ThrottleDevice] | None = Field( - None, + blkio_device_write_i_ops: list[ThrottleDevice] | None = Field( + default=None, + alias="BlkioDeviceWriteIOps", description='Limit write rate (IO per second) to a device, in the form:\n\n```\n[{"Path": "device_path", "Rate": rate}]\n```\n', ) - CpuPeriod: int | None = Field( - None, description="The length of a CPU period in microseconds." + cpu_period: int | None = Field( + default=None, + alias="CpuPeriod", + description="The length of a CPU period in microseconds.", ) - CpuQuota: int | None = Field( - None, + cpu_quota: int | None = Field( + default=None, + alias="CpuQuota", description="Microseconds of CPU time that the container can get in a CPU period.\n", ) - CpuRealtimePeriod: int | None = Field( - None, + cpu_realtime_period: int | None = Field( + default=None, + alias="CpuRealtimePeriod", description="The length of a CPU real-time period in microseconds. Set to 0 to\nallocate no time allocated to real-time tasks.\n", ) - CpuRealtimeRuntime: int | None = Field( - None, + cpu_realtime_runtime: int | None = Field( + default=None, + alias="CpuRealtimeRuntime", description="The length of a CPU real-time runtime in microseconds. Set to 0 to\nallocate no time allocated to real-time tasks.\n", ) - CpusetCpus: str | None = Field( - None, + cpuset_cpus: str | None = Field( + default=None, + alias="CpusetCpus", description="CPUs in which to allow execution (e.g., `0-3`, `0,1`).\n", - example="0-3", + examples=["0-3"], ) - CpusetMems: str | None = Field( - None, + cpuset_mems: str | None = Field( + default=None, + alias="CpusetMems", description="Memory nodes (MEMs) in which to allow execution (0-3, 0,1). Only\neffective on NUMA systems.\n", ) - Devices: list[DeviceMapping] | None = Field( - None, description="A list of devices to add to the container." + devices: list[DeviceMapping] | None = Field( + default=None, + alias="Devices", + description="A list of devices to add to the container.", ) - DeviceCgroupRules: list[str] | None = Field( - None, description="a list of cgroup rules to apply to the container" + device_cgroup_rules: list[str] | None = Field( + default=None, + alias="DeviceCgroupRules", + description="a list of cgroup rules to apply to the container", ) - DeviceRequests: list[DeviceRequest] | None = Field( - None, + device_requests: list[DeviceRequest] | None = Field( + default=None, + alias="DeviceRequests", description="A list of requests for devices to be sent to device drivers.\n", ) - KernelMemory: int | None = Field( - None, + kernel_memory: int | None = Field( + default=None, + alias="KernelMemory", description="Kernel memory limit in bytes.\n\n


\n\n> **Deprecated**: This field is deprecated as the kernel 5.4 deprecated\n> `kmem.limit_in_bytes`.\n", - example=209715200, + examples=[209715200], ) - KernelMemoryTCP: int | None = Field( - None, description="Hard limit for kernel TCP buffer memory (in bytes)." + kernel_memory_tcp: int | None = Field( + default=None, + alias="KernelMemoryTCP", + description="Hard limit for kernel TCP buffer memory (in bytes).", ) - MemoryReservation: int | None = Field( - None, description="Memory soft limit in bytes." + memory_reservation: int | None = Field( + default=None, + alias="MemoryReservation", + description="Memory soft limit in bytes.", ) - MemorySwap: int | None = Field( - None, + memory_swap: int | None = Field( + default=None, + alias="MemorySwap", description="Total memory limit (memory + swap). Set as `-1` to enable unlimited\nswap.\n", ) - MemorySwappiness: int | None = Field( - None, + memory_swappiness: int | None = Field( + default=None, + alias="MemorySwappiness", description="Tune a container's memory swappiness behavior. Accepts an integer\nbetween 0 and 100.\n", ge=0, le=100, ) - NanoCpus: int | None = Field( - None, description="CPU quota in units of 10-9 CPUs." + nano_cpus: int | None = Field( + default=None, + alias="NanoCpus", + description="CPU quota in units of 10-9 CPUs.", ) - OomKillDisable: bool | None = Field( - None, description="Disable OOM Killer for the container." + oom_kill_disable: bool | None = Field( + default=None, + alias="OomKillDisable", + description="Disable OOM Killer for the container.", ) - Init: bool | None = Field( - None, + init: bool | None = Field( + default=None, + alias="Init", description="Run an init inside the container that forwards signals and reaps\nprocesses. This field is omitted if empty, and the default (as\nconfigured on the daemon) is used.\n", ) - PidsLimit: int | None = Field( - None, + pids_limit: int | None = Field( + default=None, + alias="PidsLimit", description="Tune a container's PIDs limit. Set `0` or `-1` for unlimited, or `null`\nto not change.\n", ) - Ulimits: list[Ulimit] | None = Field( - None, + ulimits: list[Ulimit] | None = Field( + default=None, + alias="Ulimits", description='A list of resource limits to set in the container. For example:\n\n```\n{"Name": "nofile", "Soft": 1024, "Hard": 2048}\n```\n', ) - CpuCount: int | None = Field( - None, + cpu_count: int | None = Field( + default=None, + alias="CpuCount", description="The number of usable CPUs (Windows only).\n\nOn Windows Server containers, the processor resource controls are\nmutually exclusive. The order of precedence is `CPUCount` first, then\n`CPUShares`, and `CPUPercent` last.\n", ) - CpuPercent: int | None = Field( - None, + cpu_percent: int | None = Field( + default=None, + alias="CpuPercent", description="The usable percentage of the available CPUs (Windows only).\n\nOn Windows Server containers, the processor resource controls are\nmutually exclusive. The order of precedence is `CPUCount` first, then\n`CPUShares`, and `CPUPercent` last.\n", ) - IOMaximumIOps: int | None = Field( - None, description="Maximum IOps for the container system drive (Windows only)" + io_maximum_i_ops: int | None = Field( + default=None, + alias="IOMaximumIOps", + description="Maximum IOps for the container system drive (Windows only)", ) - IOMaximumBandwidth: int | None = Field( - None, + io_maximum_bandwidth: int | None = Field( + default=None, + alias="IOMaximumBandwidth", description="Maximum IO in bytes per second for the container system drive\n(Windows only).\n", ) @@ -413,44 +518,55 @@ class Limit(BaseModel): """ - NanoCPUs: int | None = Field(None, example=4000000000) - MemoryBytes: int | None = Field(None, example=8272408576) - Pids: int | None = Field( - 0, + nano_cp_us: int | None = Field( + default=None, alias="NanoCPUs", examples=[4000000000] + ) + memory_bytes: int | None = Field( + default=None, alias="MemoryBytes", examples=[8272408576] + ) + pids: int | None = Field( + default=0, + alias="Pids", description="Limits the maximum number of PIDs in the container. Set `0` for unlimited.\n", - example=100, + examples=[100], ) class NamedResourceSpec(BaseModel): - Kind: str | None = None - Value: str | None = None + kind: str | None = Field(default=None, alias="Kind") + value: str | None = Field(default=None, alias="Value") class DiscreteResourceSpec(BaseModel): - Kind: str | None = None - Value: int | None = None + kind: str | None = Field(default=None, alias="Kind") + value: int | None = Field(default=None, alias="Value") class GenericResource(BaseModel): - NamedResourceSpec: NamedResourceSpec | None = None - DiscreteResourceSpec: DiscreteResourceSpec | None = None + named_resource_spec: NamedResourceSpec | None = Field( + default=None, alias="NamedResourceSpec" + ) + discrete_resource_spec: DiscreteResourceSpec | None = Field( + default=None, alias="DiscreteResourceSpec" + ) -class GenericResources(BaseModel): +class GenericResources(RootModel[list[GenericResource]]): """ - User-defined resources can be either Integer resources (e.g, `SSD=3`) or + User-defined resources can be either Integer resources (e.g, `SSD=3`) or String resources (e.g, `GPU=UUID1`). """ - __root__: list[GenericResource] = Field( + root: list[GenericResource] = Field( ..., description="User-defined resources can be either Integer resources (e.g, `SSD=3`) or\nString resources (e.g, `GPU=UUID1`).\n", - example=[ - {"DiscreteResourceSpec": {"Kind": "SSD", "Value": 3}}, - {"NamedResourceSpec": {"Kind": "GPU", "Value": "UUID1"}}, - {"NamedResourceSpec": {"Kind": "GPU", "Value": "UUID2"}}, + examples=[ + [ + {"DiscreteResourceSpec": {"Kind": "SSD", "Value": 3}}, + {"NamedResourceSpec": {"Kind": "GPU", "Value": "UUID1"}}, + {"NamedResourceSpec": {"Kind": "GPU", "Value": "UUID2"}}, + ] ], ) @@ -460,31 +576,36 @@ class HealthConfig(BaseModel): A test to perform to check that the container is healthy. """ - Test: list[str] | None = Field( - None, + test: list[str] | None = Field( + default=None, + alias="Test", description='The test to perform. Possible values are:\n\n- `[]` inherit healthcheck from image or parent image\n- `["NONE"]` disable healthcheck\n- `["CMD", args...]` exec arguments directly\n- `["CMD-SHELL", command]` run command with system\'s default shell\n', ) - Interval: int | None = Field( - None, + interval: int | None = Field( + default=None, + alias="Interval", description="The time to wait between checks in nanoseconds. It should be 0 or at\nleast 1000000 (1 ms). 0 means inherit.\n", ) - Timeout: int | None = Field( - None, + timeout: int | None = Field( + default=None, + alias="Timeout", description="The time to wait before considering the check to have hung. It should\nbe 0 or at least 1000000 (1 ms). 0 means inherit.\n", ) - Retries: int | None = Field( - None, + retries: int | None = Field( + default=None, + alias="Retries", description="The number of consecutive failures needed to consider a container as\nunhealthy. 0 means inherit.\n", ) - StartPeriod: int | None = Field( - None, + start_period: int | None = Field( + default=None, + alias="StartPeriod", description="Start period for the container to initialize before starting\nhealth-retries countdown in nanoseconds. It should be 0 or at least\n1000000 (1 ms). 0 means inherit.\n", ) class Status(str, Enum): """ - Status is one of `none`, `starting`, `healthy` or `unhealthy` + Status is one of `none`, `starting`, `healthy` or `unhealthy` - "none" Indicates there is no healthcheck - "starting" Starting indicates that the container is not yet ready @@ -505,22 +626,27 @@ class HealthcheckResult(BaseModel): """ - Start: datetime | None = Field( - None, + start: AwareDatetime | None = Field( + default=None, + alias="Start", description="Date and time at which this check started in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n", - example="2020-01-04T10:44:24.496525531Z", + examples=["2020-01-04T10:44:24.496525531Z"], ) - End: str | None = Field( - None, + end: str | None = Field( + default=None, + alias="End", description="Date and time at which this check ended in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n", - example="2020-01-04T10:45:21.364524523Z", + examples=["2020-01-04T10:45:21.364524523Z"], ) - ExitCode: int | None = Field( - None, + exit_code: int | None = Field( + default=None, + alias="ExitCode", description="ExitCode meanings:\n\n- `0` healthy\n- `1` unhealthy\n- `2` reserved (considered unhealthy)\n- other values: error running probe\n", - example=0, + examples=[0], + ) + output: str | None = Field( + default=None, alias="Output", description="Output from last check" ) - Output: str | None = Field(None, description="Output from last check") class Type3(str, Enum): @@ -540,13 +666,13 @@ class LogConfig(BaseModel): The logging configuration for this container """ - Type: Type3 | None = None - Config_: dict[str, str] | None = Field(None, alias="Config") + type: Type3 | None = Field(default=None, alias="Type") + config: dict[str, str] | None = Field(default=None, alias="Config") class CgroupnsMode(str, Enum): """ - cgroup namespace mode for the container. Possible values are: + cgroup namespace mode for the container. Possible values are: - `"private"`: the container runs in its own private cgroup namespace - `"host"`: use the host system's cgroup namespace @@ -560,8 +686,8 @@ class CgroupnsMode(str, Enum): host = "host" -class ConsoleSizeItem(BaseModel): - __root__: int = Field(..., ge=0) +class ConsoleSizeItem(RootModel[int]): + root: int = Field(..., ge=0) class Isolation(str, Enum): @@ -577,7 +703,7 @@ class Isolation(str, Enum): class ContainerConfig(BaseModel): """ - Configuration for a container that is portable between hosts. + Configuration for a container that is portable between hosts. When used as `ContainerConfig` field in an image, `ContainerConfig` is an optional field containing the configuration of the container that was last @@ -588,193 +714,388 @@ class ContainerConfig(BaseModel): """ - Hostname: str | None = Field( - None, + hostname: str | None = Field( + default=None, + alias="Hostname", description="The hostname to use for the container, as a valid RFC 1123 hostname.\n", - example="439f4e91bd1d", + examples=["439f4e91bd1d"], + ) + domainname: str | None = Field( + default=None, + alias="Domainname", + description="The domain name to use for the container.\n", ) - Domainname: str | None = Field( - None, description="The domain name to use for the container.\n" + user: str | None = Field( + default=None, + alias="User", + description="The user that commands are run as inside the container.", ) - User: str | None = Field( - None, description="The user that commands are run as inside the container." + attach_stdin: bool | None = Field( + default=False, alias="AttachStdin", description="Whether to attach to `stdin`." ) - AttachStdin: bool | None = Field(False, description="Whether to attach to `stdin`.") - AttachStdout: bool | None = Field( - True, description="Whether to attach to `stdout`." + attach_stdout: bool | None = Field( + default=True, alias="AttachStdout", description="Whether to attach to `stdout`." ) - AttachStderr: bool | None = Field( - True, description="Whether to attach to `stderr`." + attach_stderr: bool | None = Field( + default=True, alias="AttachStderr", description="Whether to attach to `stderr`." ) - ExposedPorts: dict[str, dict[str, Any]] | None = Field( - None, + exposed_ports: dict[str, dict[str, Any]] | None = Field( + default=None, + alias="ExposedPorts", description='An object mapping ports to an empty object in the form:\n\n`{"/": {}}`\n', - example={"80/tcp": {}, "443/tcp": {}}, + examples=[{"80/tcp": {}, "443/tcp": {}}], ) - Tty: bool | None = Field( - False, + tty: bool | None = Field( + default=False, + alias="Tty", description="Attach standard streams to a TTY, including `stdin` if it is not closed.\n", ) - OpenStdin: bool | None = Field(False, description="Open `stdin`") - StdinOnce: bool | None = Field( - False, description="Close `stdin` after one attached client disconnects" + open_stdin: bool | None = Field( + default=False, alias="OpenStdin", description="Open `stdin`" ) - Env: list[str] | None = Field( - None, + stdin_once: bool | None = Field( + default=False, + alias="StdinOnce", + description="Close `stdin` after one attached client disconnects", + ) + env: list[str] | None = Field( + default=None, + alias="Env", description='A list of environment variables to set inside the container in the\nform `["VAR=value", ...]`. A variable without `=` is removed from the\nenvironment, rather than to have an empty value.\n', - example=["PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"], + examples=[ + ["PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"] + ], ) - Cmd: list[str] | None = Field( - None, + cmd: list[str] | None = Field( + default=None, + alias="Cmd", description="Command to run specified as a string or an array of strings.\n", - example=["/bin/sh"], - ) - Healthcheck: HealthConfig | None = None - ArgsEscaped: bool | None = Field( - False, description="Command is already escaped (Windows only)", example=False - ) - Image: str | None = Field( - None, + examples=[["/bin/sh"]], + ) + healthcheck: HealthConfig | None = Field(default=None, alias="Healthcheck") + args_escaped: bool | None = Field( + default=False, + alias="ArgsEscaped", + description="Command is already escaped (Windows only)", + examples=[False], + ) + image: str | None = Field( + default=None, + alias="Image", description="The name (or reference) of the image to use when creating the container,\nor which was used when the container was created.\n", - example="example-image:1.0", + examples=["example-image:1.0"], ) - Volumes: dict[str, dict[str, Any]] | None = Field( - None, + volumes: dict[str, dict[str, Any]] | None = Field( + default=None, + alias="Volumes", description="An object mapping mount point paths inside the container to empty\nobjects.\n", ) - WorkingDir: str | None = Field( - None, + working_dir: str | None = Field( + default=None, + alias="WorkingDir", description="The working directory for commands to run in.", - example="/public/", + examples=["/public/"], ) - Entrypoint: list[str] | None = Field( - None, + entrypoint: list[str] | None = Field( + default=None, + alias="Entrypoint", description='The entry point for the container as a string or an array of strings.\n\nIf the array consists of exactly one empty string (`[""]`) then the\nentry point is reset to system default (i.e., the entry point used by\ndocker when there is no `ENTRYPOINT` instruction in the `Dockerfile`).\n', - example=[], + examples=[[]], + ) + network_disabled: bool | None = Field( + default=None, + alias="NetworkDisabled", + description="Disable networking for the container.", ) - NetworkDisabled: bool | None = Field( - None, description="Disable networking for the container." + mac_address: str | None = Field( + default=None, alias="MacAddress", description="MAC address of the container." ) - MacAddress: str | None = Field(None, description="MAC address of the container.") - OnBuild: list[str] | None = Field( - None, + on_build: list[str] | None = Field( + default=None, + alias="OnBuild", description="`ONBUILD` metadata that were defined in the image's `Dockerfile`.\n", - example=[], + examples=[[]], ) - Labels: dict[str, str] | None = Field( - None, + labels: dict[str, str] | None = Field( + default=None, + alias="Labels", description="User-defined key/value metadata.", - example={ - "com.example.some-label": "some-value", - "com.example.some-other-label": "some-other-value", - }, + examples=[ + { + "com.example.some-label": "some-value", + "com.example.some-other-label": "some-other-value", + } + ], ) - StopSignal: str | None = Field( - None, + stop_signal: str | None = Field( + default=None, + alias="StopSignal", description="Signal to stop a container as a string or unsigned integer.\n", - example="SIGTERM", + examples=["SIGTERM"], ) - StopTimeout: int | None = Field( - 10, description="Timeout to stop a container in seconds." + stop_timeout: int | None = Field( + default=10, + alias="StopTimeout", + description="Timeout to stop a container in seconds.", ) - Shell: list[str] | None = Field( - None, + shell: list[str] | None = Field( + default=None, + alias="Shell", description="Shell for when `RUN`, `CMD`, and `ENTRYPOINT` uses a shell.\n", - example=["/bin/sh", "-c"], + examples=[["/bin/sh", "-c"]], ) -class Address(BaseModel): +class ImageConfig(BaseModel): """ - Address represents an IPv4 or IPv6 IP address. + Configuration of the image. These fields are used as defaults + when starting a container from the image. + """ - Addr: str | None = Field(None, description="IP address.") - PrefixLen: int | None = Field(None, description="Mask length of the IP address.") + hostname: str | None = Field( + default=None, + alias="Hostname", + description="The hostname to use for the container, as a valid RFC 1123 hostname.\n\n


\n\n> **Note**: this field is always empty and must not be used.\n", + examples=[""], + ) + domainname: str | None = Field( + default=None, + alias="Domainname", + description="The domain name to use for the container.\n\n


\n\n> **Note**: this field is always empty and must not be used.\n", + examples=[""], + ) + user: str | None = Field( + default=None, + alias="User", + description="The user that commands are run as inside the container.", + examples=["web:web"], + ) + attach_stdin: bool | None = Field( + default=False, + alias="AttachStdin", + description="Whether to attach to `stdin`.\n\n


\n\n> **Note**: this field is always false and must not be used.\n", + examples=[False], + ) + attach_stdout: bool | None = Field( + default=False, + alias="AttachStdout", + description="Whether to attach to `stdout`.\n\n


\n\n> **Note**: this field is always false and must not be used.\n", + examples=[False], + ) + attach_stderr: bool | None = Field( + default=False, + alias="AttachStderr", + description="Whether to attach to `stderr`.\n\n


\n\n> **Note**: this field is always false and must not be used.\n", + examples=[False], + ) + exposed_ports: dict[str, dict[str, Any]] | None = Field( + default=None, + alias="ExposedPorts", + description='An object mapping ports to an empty object in the form:\n\n`{"/": {}}`\n', + examples=[{"80/tcp": {}, "443/tcp": {}}], + ) + tty: bool | None = Field( + default=False, + alias="Tty", + description="Attach standard streams to a TTY, including `stdin` if it is not closed.\n\n


\n\n> **Note**: this field is always false and must not be used.\n", + examples=[False], + ) + open_stdin: bool | None = Field( + default=False, + alias="OpenStdin", + description="Open `stdin`\n\n


\n\n> **Note**: this field is always false and must not be used.\n", + examples=[False], + ) + stdin_once: bool | None = Field( + default=False, + alias="StdinOnce", + description="Close `stdin` after one attached client disconnects.\n\n


\n\n> **Note**: this field is always false and must not be used.\n", + examples=[False], + ) + env: list[str] | None = Field( + default=None, + alias="Env", + description='A list of environment variables to set inside the container in the\nform `["VAR=value", ...]`. A variable without `=` is removed from the\nenvironment, rather than to have an empty value.\n', + examples=[ + ["PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"] + ], + ) + cmd: list[str] | None = Field( + default=None, + alias="Cmd", + description="Command to run specified as a string or an array of strings.\n", + examples=[["/bin/sh"]], + ) + healthcheck: HealthConfig | None = Field(default=None, alias="Healthcheck") + args_escaped: bool | None = Field( + default=False, + alias="ArgsEscaped", + description="Command is already escaped (Windows only)", + examples=[False], + ) + image: str | None = Field( + default="", + alias="Image", + description="The name (or reference) of the image to use when creating the container,\nor which was used when the container was created.\n\n


\n\n> **Note**: this field is always empty and must not be used.\n", + examples=[""], + ) + volumes: dict[str, dict[str, Any]] | None = Field( + default=None, + alias="Volumes", + description="An object mapping mount point paths inside the container to empty\nobjects.\n", + examples=[{"/app/data": {}, "/app/config": {}}], + ) + working_dir: str | None = Field( + default=None, + alias="WorkingDir", + description="The working directory for commands to run in.", + examples=["/public/"], + ) + entrypoint: list[str] | None = Field( + default=None, + alias="Entrypoint", + description='The entry point for the container as a string or an array of strings.\n\nIf the array consists of exactly one empty string (`[""]`) then the\nentry point is reset to system default (i.e., the entry point used by\ndocker when there is no `ENTRYPOINT` instruction in the `Dockerfile`).\n', + examples=[[]], + ) + network_disabled: bool | None = Field( + default=False, + alias="NetworkDisabled", + description="Disable networking for the container.\n\n


\n\n> **Note**: this field is always omitted and must not be used.\n", + examples=[False], + ) + mac_address: str | None = Field( + default="", + alias="MacAddress", + description="MAC address of the container.\n\n


\n\n> **Note**: this field is always omitted and must not be used.\n", + examples=[""], + ) + on_build: list[str] | None = Field( + default=None, + alias="OnBuild", + description="`ONBUILD` metadata that were defined in the image's `Dockerfile`.\n", + examples=[[]], + ) + labels: dict[str, str] | None = Field( + default=None, + alias="Labels", + description="User-defined key/value metadata.", + examples=[ + { + "com.example.some-label": "some-value", + "com.example.some-other-label": "some-other-value", + } + ], + ) + stop_signal: str | None = Field( + default=None, + alias="StopSignal", + description="Signal to stop a container as a string or unsigned integer.\n", + examples=["SIGTERM"], + ) + stop_timeout: int | None = Field( + default=10, + alias="StopTimeout", + description="Timeout to stop a container in seconds.\n\n


\n\n> **Note**: this field is always omitted and must not be used.\n", + ) + shell: list[str] | None = Field( + default=None, + alias="Shell", + description="Shell for when `RUN`, `CMD`, and `ENTRYPOINT` uses a shell.\n", + examples=[["/bin/sh", "-c"]], + ) -class PortMap(BaseModel): +class Address(BaseModel): """ - PortMap describes the mapping of container ports to host ports, using the - container's port-number and protocol as key in the format `/`, - for example, `80/udp`. - - If a container's port is mapped for multiple protocols, separate entries - are added to the mapping table. - + Address represents an IPv4 or IPv6 IP address. """ - class Config: - extra = Extra.allow + addr: str | None = Field(default=None, alias="Addr", description="IP address.") + prefix_len: int | None = Field( + default=None, alias="PrefixLen", description="Mask length of the IP address." + ) class PortBinding(BaseModel): """ - PortBinding represents a binding between a host IP address and a host + PortBinding represents a binding between a host IP address and a host port. """ - HostIp: str | None = Field( - None, + host_ip: str | None = Field( + default=None, + alias="HostIp", description="Host IP address that the container's port is mapped to.", - example="127.0.0.1", + examples=["127.0.0.1"], ) - HostPort: str | None = Field( - None, + host_port: str | None = Field( + default=None, + alias="HostPort", description="Host port number that the container's port is mapped to.", - example="4443", + examples=["4443"], ) class GraphDriverData(BaseModel): """ - Information about the storage driver used to store the container's and + Information about the storage driver used to store the container's and image's filesystem. """ - Name: str = Field( - ..., description="Name of the storage driver.", example="overlay2" + name: str = Field( + ..., + alias="Name", + description="Name of the storage driver.", + examples=["overlay2"], ) - Data: dict[str, str] = Field( + data: dict[str, str] = Field( ..., + alias="Data", description="Low-level storage metadata, provided as key/value pairs.\n\nThis information is driver-specific, and depends on the storage-driver\nin use, and should be used for informational purposes only.\n", - example={ - "MergedDir": "/var/lib/docker/overlay2/ef749362d13333e65fc95c572eb525abbe0052e16e086cb64bc3b98ae9aa6d74/merged", - "UpperDir": "/var/lib/docker/overlay2/ef749362d13333e65fc95c572eb525abbe0052e16e086cb64bc3b98ae9aa6d74/diff", - "WorkDir": "/var/lib/docker/overlay2/ef749362d13333e65fc95c572eb525abbe0052e16e086cb64bc3b98ae9aa6d74/work", - }, + examples=[ + { + "MergedDir": "/var/lib/docker/overlay2/ef749362d13333e65fc95c572eb525abbe0052e16e086cb64bc3b98ae9aa6d74/merged", + "UpperDir": "/var/lib/docker/overlay2/ef749362d13333e65fc95c572eb525abbe0052e16e086cb64bc3b98ae9aa6d74/diff", + "WorkDir": "/var/lib/docker/overlay2/ef749362d13333e65fc95c572eb525abbe0052e16e086cb64bc3b98ae9aa6d74/work", + } + ], ) -class RootFS(BaseModel): +class RootFs(BaseModel): """ Information about the image's RootFS, including the layer IDs. """ - Type: str = Field(..., example="layers") - Layers: list[str] | None = Field( - None, - example=[ - "sha256:1834950e52ce4d5a88a1bbd131c537f4d0e56d10ff0dd69e66be3b7dfa9df7e6", - "sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef", + type: str = Field(..., alias="Type", examples=["layers"]) + layers: list[str] | None = Field( + default=None, + alias="Layers", + examples=[ + [ + "sha256:1834950e52ce4d5a88a1bbd131c537f4d0e56d10ff0dd69e66be3b7dfa9df7e6", + "sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef", + ] ], ) class Metadata(BaseModel): """ - Additional metadata of the image in the local cache. This information + Additional metadata of the image in the local cache. This information is local to the daemon, and not part of the image itself. """ - LastTagTime: str | None = Field( - None, + last_tag_time: str | None = Field( + default=None, + alias="LastTagTime", description="Date and time at which the image was last tagged in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n\nThis information is only available if the image was tagged locally,\nand omitted otherwise.\n", - example="2022-02-28T14:40:02.623929178Z", + examples=["2022-02-28T14:40:02.623929178Z"], ) @@ -784,163 +1105,206 @@ class ImageInspect(BaseModel): """ - Id: str | None = Field( - None, + id: str | None = Field( + default=None, + alias="Id", description="ID is the content-addressable ID of an image.\n\nThis identifier is a content-addressable digest calculated from the\nimage's configuration (which includes the digests of layers used by\nthe image).\n\nNote that this digest differs from the `RepoDigests` below, which\nholds digests of image manifests that reference the image.\n", - example="sha256:ec3f0931a6e6b6855d76b2d7b0be30e81860baccd891b2e243280bf1cd8ad710", - ) - RepoTags: list[str] | None = Field( - None, - description='List of image names/tags in the local image cache that reference this\nimage.\n\nMultiple image tags can refer to the same imagem and this list may be\nempty if no tags reference the image, in which case the image is\n"untagged", in which case it can still be referenced by its ID.\n', - example=[ - "example:1.0", - "example:latest", - "example:stable", - "internal.registry.example.com:5000/example:1.0", + examples=[ + "sha256:ec3f0931a6e6b6855d76b2d7b0be30e81860baccd891b2e243280bf1cd8ad710" + ], + ) + repo_tags: list[str] | None = Field( + default=None, + alias="RepoTags", + description='List of image names/tags in the local image cache that reference this\nimage.\n\nMultiple image tags can refer to the same image, and this list may be\nempty if no tags reference the image, in which case the image is\n"untagged", in which case it can still be referenced by its ID.\n', + examples=[ + [ + "example:1.0", + "example:latest", + "example:stable", + "internal.registry.example.com:5000/example:1.0", + ] ], ) - RepoDigests: list[str] | None = Field( - None, + repo_digests: list[str] | None = Field( + default=None, + alias="RepoDigests", description="List of content-addressable digests of locally available image manifests\nthat the image is referenced from. Multiple manifests can refer to the\nsame image.\n\nThese digests are usually only available if the image was either pulled\nfrom a registry, or if the image was pushed to a registry, which is when\nthe manifest is generated and its digest calculated.\n", - example=[ - "example@sha256:afcc7f1ac1b49db317a7196c902e61c6c3c4607d63599ee1a82d702d249a0ccb", - "internal.registry.example.com:5000/example@sha256:b69959407d21e8a062e0416bf13405bb2b71ed7a84dde4158ebafacfa06f5578", + examples=[ + [ + "example@sha256:afcc7f1ac1b49db317a7196c902e61c6c3c4607d63599ee1a82d702d249a0ccb", + "internal.registry.example.com:5000/example@sha256:b69959407d21e8a062e0416bf13405bb2b71ed7a84dde4158ebafacfa06f5578", + ] ], ) - Parent: str | None = Field( - None, + parent: str | None = Field( + default=None, + alias="Parent", description="ID of the parent image.\n\nDepending on how the image was created, this field may be empty and\nis only set for images that were built/created locally. This field\nis empty if the image was pulled from an image registry.\n", - example="", + examples=[""], ) - Comment: str | None = Field( - None, + comment: str | None = Field( + default=None, + alias="Comment", description="Optional message that was set when committing or importing the image.\n", - example="", + examples=[""], ) - Created: str | None = Field( - None, + created: str | None = Field( + default=None, + alias="Created", description="Date and time at which the image was created, formatted in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n", - example="2022-02-04T21:20:12.497794809Z", + examples=["2022-02-04T21:20:12.497794809Z"], ) - Container: str | None = Field( - None, + container: str | None = Field( + default=None, + alias="Container", description="The ID of the container that was used to create the image.\n\nDepending on how the image was created, this field may be empty.\n", - example="65974bc86f1770ae4bff79f651ebdbce166ae9aada632ee3fa9af3a264911735", + examples=["65974bc86f1770ae4bff79f651ebdbce166ae9aada632ee3fa9af3a264911735"], ) - ContainerConfig: ContainerConfig | None = None - DockerVersion: str | None = Field( - None, + container_config: ContainerConfig | None = Field( + default=None, alias="ContainerConfig" + ) + docker_version: str | None = Field( + default=None, + alias="DockerVersion", description="The version of Docker that was used to build the image.\n\nDepending on how the image was created, this field may be empty.\n", - example="20.10.7", + examples=["20.10.7"], ) - Author: str | None = Field( - None, + author: str | None = Field( + default=None, + alias="Author", description="Name of the author that was specified when committing the image, or as\nspecified through MAINTAINER (deprecated) in the Dockerfile.\n", - example="", + examples=[""], ) - Config_: ContainerConfig | None = Field(None, alias="Config") # type: ignore - Architecture: str | None = Field( - None, + config: ImageConfig | None = Field(default=None, alias="Config") + architecture: str | None = Field( + default=None, + alias="Architecture", description="Hardware CPU architecture that the image runs on.\n", - example="arm", + examples=["arm"], ) - Variant: str | None = Field( - None, + variant: str | None = Field( + default=None, + alias="Variant", description="CPU architecture variant (presently ARM-only).\n", - example="v7", + examples=["v7"], ) - Os: str | None = Field( - None, + os: str | None = Field( + default=None, + alias="Os", description="Operating System the image is built to run on.\n", - example="linux", + examples=["linux"], ) - OsVersion: str | None = Field( - None, + os_version: str | None = Field( + default=None, + alias="OsVersion", description="Operating System version the image is built to run on (especially\nfor Windows).\n", - example="", + examples=[""], ) - Size: int | None = Field( - None, + size: int | None = Field( + default=None, + alias="Size", description="Total size of the image including all layers it is composed of.\n", - example=1239828, + examples=[1239828], ) - VirtualSize: int | None = Field( - None, + virtual_size: int | None = Field( + default=None, + alias="VirtualSize", description="Total size of the image including all layers it is composed of.\n\nIn versions of Docker before v1.10, this field was calculated from\nthe image itself and all of its parent images. Docker v1.10 and up\nstore images self-contained, and no longer use a parent-chain, making\nthis field an equivalent of the Size field.\n\nThis field is kept for backward compatibility, but may be removed in\na future version of the API.\n", - example=1239828, + examples=[1239828], ) - GraphDriver: GraphDriverData | None = None - RootFS: RootFS | None = Field( - None, + graph_driver: GraphDriverData | None = Field(default=None, alias="GraphDriver") + root_fs: RootFs | None = Field( + default=None, + alias="RootFS", description="Information about the image's RootFS, including the layer IDs.\n", ) - Metadata: Metadata | None = Field( - None, + metadata: Metadata | None = Field( + default=None, + alias="Metadata", description="Additional metadata of the image in the local cache. This information\nis local to the daemon, and not part of the image itself.\n", ) class ImageSummary(BaseModel): - Id: str = Field( + id: str = Field( ..., + alias="Id", description="ID is the content-addressable ID of an image.\n\nThis identifier is a content-addressable digest calculated from the\nimage's configuration (which includes the digests of layers used by\nthe image).\n\nNote that this digest differs from the `RepoDigests` below, which\nholds digests of image manifests that reference the image.\n", - example="sha256:ec3f0931a6e6b6855d76b2d7b0be30e81860baccd891b2e243280bf1cd8ad710", + examples=[ + "sha256:ec3f0931a6e6b6855d76b2d7b0be30e81860baccd891b2e243280bf1cd8ad710" + ], ) - ParentId: str = Field( + parent_id: str = Field( ..., + alias="ParentId", description="ID of the parent image.\n\nDepending on how the image was created, this field may be empty and\nis only set for images that were built/created locally. This field\nis empty if the image was pulled from an image registry.\n", - example="", + examples=[""], ) - RepoTags: list[str] = Field( + repo_tags: list[str] = Field( ..., - description='List of image names/tags in the local image cache that reference this\nimage.\n\nMultiple image tags can refer to the same imagem and this list may be\nempty if no tags reference the image, in which case the image is\n"untagged", in which case it can still be referenced by its ID.\n', - example=[ - "example:1.0", - "example:latest", - "example:stable", - "internal.registry.example.com:5000/example:1.0", + alias="RepoTags", + description='List of image names/tags in the local image cache that reference this\nimage.\n\nMultiple image tags can refer to the same image, and this list may be\nempty if no tags reference the image, in which case the image is\n"untagged", in which case it can still be referenced by its ID.\n', + examples=[ + [ + "example:1.0", + "example:latest", + "example:stable", + "internal.registry.example.com:5000/example:1.0", + ] ], ) - RepoDigests: list[str] = Field( + repo_digests: list[str] = Field( ..., + alias="RepoDigests", description="List of content-addressable digests of locally available image manifests\nthat the image is referenced from. Multiple manifests can refer to the\nsame image.\n\nThese digests are usually only available if the image was either pulled\nfrom a registry, or if the image was pushed to a registry, which is when\nthe manifest is generated and its digest calculated.\n", - example=[ - "example@sha256:afcc7f1ac1b49db317a7196c902e61c6c3c4607d63599ee1a82d702d249a0ccb", - "internal.registry.example.com:5000/example@sha256:b69959407d21e8a062e0416bf13405bb2b71ed7a84dde4158ebafacfa06f5578", + examples=[ + [ + "example@sha256:afcc7f1ac1b49db317a7196c902e61c6c3c4607d63599ee1a82d702d249a0ccb", + "internal.registry.example.com:5000/example@sha256:b69959407d21e8a062e0416bf13405bb2b71ed7a84dde4158ebafacfa06f5578", + ] ], ) - Created: int = Field( + created: int = Field( ..., - description="Date and time at which the image was created as a Unix timestamp\n(number of seconds sinds EPOCH).\n", - example="1644009612", + alias="Created", + description="Date and time at which the image was created as a Unix timestamp\n(number of seconds since EPOCH).\n", + examples=["1644009612"], ) - Size: int = Field( + size: int = Field( ..., + alias="Size", description="Total size of the image including all layers it is composed of.\n", - example=172064416, + examples=[172064416], ) - SharedSize: int = Field( + shared_size: int = Field( ..., + alias="SharedSize", description="Total size of image layers that are shared between this image and other\nimages.\n\nThis size is not calculated by default. `-1` indicates that the value\nhas not been set / calculated.\n", - example=1239828, + examples=[1239828], ) - VirtualSize: int = Field( + virtual_size: int = Field( ..., + alias="VirtualSize", description="Total size of the image including all layers it is composed of.\n\nIn versions of Docker before v1.10, this field was calculated from\nthe image itself and all of its parent images. Docker v1.10 and up\nstore images self-contained, and no longer use a parent-chain, making\nthis field an equivalent of the Size field.\n\nThis field is kept for backward compatibility, but may be removed in\na future version of the API.\n", - example=172064416, + examples=[172064416], ) - Labels: dict[str, str] = Field( + labels: dict[str, str] = Field( ..., + alias="Labels", description="User-defined key/value metadata.", - example={ - "com.example.some-label": "some-value", - "com.example.some-other-label": "some-other-value", - }, + examples=[ + { + "com.example.some-label": "some-value", + "com.example.some-other-label": "some-other-value", + } + ], ) - Containers: int = Field( + containers: int = Field( ..., + alias="Containers", description="Number of containers using this image. Includes both stopped and running\ncontainers.\n\nThis size is not calculated by default, and depends on which API endpoint\nis used. `-1` indicates that the value has not been set / calculated.\n", - example=2, + examples=[2], ) @@ -961,7 +1325,7 @@ class ProcessConfig(BaseModel): class Scope(str, Enum): """ - The level at which the volume exists. Either `global` for cluster-wide, + The level at which the volume exists. Either `global` for cluster-wide, or `local` for machine level. """ @@ -972,63 +1336,77 @@ class Scope(str, Enum): class UsageData(BaseModel): """ - Usage details about the volume. This information is used by the + Usage details about the volume. This information is used by the `GET /system/df` endpoint, and omitted in other endpoints. """ - Size: int = Field( + size: int = Field( ..., + alias="Size", description='Amount of disk space used by the volume (in bytes). This information\nis only available for volumes created with the `"local"` volume\ndriver. For volumes created with other volume drivers, this field\nis set to `-1` ("not available")\n', ) - RefCount: int = Field( + ref_count: int = Field( ..., + alias="RefCount", description="The number of containers referencing this volume. This field\nis set to `-1` if the reference-count is not available.\n", ) class Volume(BaseModel): - Name: str = Field(..., description="Name of the volume.", example="tardis") - Driver: str = Field( + name: str = Field( + ..., alias="Name", description="Name of the volume.", examples=["tardis"] + ) + driver: str = Field( ..., + alias="Driver", description="Name of the volume driver used by the volume.", - example="custom", + examples=["custom"], ) - Mountpoint: str = Field( + mountpoint: str = Field( ..., + alias="Mountpoint", description="Mount path of the volume on the host.", - example="/var/lib/docker/volumes/tardis", + examples=["/var/lib/docker/volumes/tardis"], ) - CreatedAt: str | None = Field( - None, + created_at: str | None = Field( + default=None, + alias="CreatedAt", description="Date/Time the volume was created.", - example="2016-06-07T20:31:11.853781916Z", + examples=["2016-06-07T20:31:11.853781916Z"], ) - Status: dict[str, dict[str, Any]] | None = Field( - None, + status: dict[str, dict[str, Any]] | None = Field( + default=None, + alias="Status", description='Low-level details about the volume, provided by the volume driver.\nDetails are returned as a map with key/value pairs:\n`{"key":"value","key2":"value2"}`.\n\nThe `Status` field is optional, and is omitted if the volume driver\ndoes not support this feature.\n', - example={"hello": "world"}, + examples=[{"hello": "world"}], ) - Labels: dict[str, str] = Field( + labels: dict[str, str] = Field( ..., + alias="Labels", description="User-defined key/value metadata.", - example={ - "com.example.some-label": "some-value", - "com.example.some-other-label": "some-other-value", - }, + examples=[ + { + "com.example.some-label": "some-value", + "com.example.some-other-label": "some-other-value", + } + ], ) - Scope: Scope = Field( + scope: Scope = Field( ..., + alias="Scope", description="The level at which the volume exists. Either `global` for cluster-wide,\nor `local` for machine level.\n", - example="local", + examples=["local"], ) - Options: dict[str, str] = Field( + options: dict[str, str] = Field( ..., + alias="Options", description="The driver specific options used when creating the volume.\n", - example={"device": "tmpfs", "o": "size=100m,uid=1000", "type": "tmpfs"}, + examples=[{"device": "tmpfs", "o": "size=100m,uid=1000", "type": "tmpfs"}], ) - UsageData: UsageData | None = Field( - None, + usage_data: UsageData | None = Field( + default=None, + alias="UsageData", description="Usage details about the volume. This information is used by the\n`GET /system/df` endpoint, and omitted in other endpoints.\n", ) @@ -1038,42 +1416,115 @@ class VolumeConfig(BaseModel): Volume configuration """ - Name: str | None = Field( - None, + name: str | None = Field( + default=None, + alias="Name", description="The new volume's name. If not specified, Docker generates a name.\n", - example="tardis", + examples=["tardis"], ) - Driver: str | None = Field( - "local", description="Name of the volume driver to use.", example="custom" + driver: str | None = Field( + default="local", + alias="Driver", + description="Name of the volume driver to use.", + examples=["custom"], ) - DriverOpts: dict[str, str] | None = Field( - None, + driver_opts: dict[str, str] | None = Field( + default=None, + alias="DriverOpts", description="A mapping of driver options and values. These options are\npassed directly to the driver and are driver specific.\n", - example={"device": "tmpfs", "o": "size=100m,uid=1000", "type": "tmpfs"}, + examples=[{"device": "tmpfs", "o": "size=100m,uid=1000", "type": "tmpfs"}], ) - Labels: dict[str, str] | None = Field( - None, + labels: dict[str, str] | None = Field( + default=None, + alias="Labels", description="User-defined key/value metadata.", - example={ - "com.example.some-label": "some-value", - "com.example.some-other-label": "some-other-value", - }, + examples=[ + { + "com.example.some-label": "some-value", + "com.example.some-other-label": "some-other-value", + } + ], + ) + + +class VolumeListResponse(BaseModel): + """ + Volume list response + """ + + volumes: list[Volume] | None = Field( + default=None, alias="Volumes", description="List of volumes" + ) + warnings: list[str] | None = Field( + default=None, + alias="Warnings", + description="Warnings that occurred when fetching the list of volumes.\n", + examples=[[]], + ) + + +class ConfigReference(BaseModel): + """ + The config-only network source to provide the configuration for + this network. + + """ + + network: str | None = Field( + default=None, + alias="Network", + description="The name of the config-only network that provides the network's\nconfiguration. The specified network must be an existing config-only\nnetwork. Only network names are allowed, not network IDs.\n", + examples=["config_only_network_01"], ) class IPAMConfig(BaseModel): - Subnet: str | None = None - IPRange: str | None = None - Gateway: str | None = None - AuxiliaryAddresses: dict[str, str] | None = None + subnet: str | None = Field(default=None, alias="Subnet", examples=["172.20.0.0/16"]) + ip_range: str | None = Field( + default=None, alias="IPRange", examples=["172.20.10.0/24"] + ) + gateway: str | None = Field( + default=None, alias="Gateway", examples=["172.20.10.11"] + ) + auxiliary_addresses: dict[str, str] | None = Field( + default=None, alias="AuxiliaryAddresses" + ) class NetworkContainer(BaseModel): - Name: str | None = None - EndpointID: str | None = None - MacAddress: str | None = None - IPv4Address: str | None = None - IPv6Address: str | None = None + name: str | None = Field(default=None, alias="Name", examples=["container_1"]) + endpoint_id: str | None = Field( + default=None, + alias="EndpointID", + examples=["628cadb8bcb92de107b2a1e516cbffe463e321f548feb37697cce00ad694f21a"], + ) + mac_address: str | None = Field( + default=None, alias="MacAddress", examples=["02:42:ac:13:00:02"] + ) + i_pv4_address: str | None = Field( + default=None, alias="IPv4Address", examples=["172.19.0.2/16"] + ) + i_pv6_address: str | None = Field(default=None, alias="IPv6Address", examples=[""]) + + +class PeerInfo(BaseModel): + """ + PeerInfo represents one peer of an overlay network. + + """ + + name: str | None = Field( + default=None, + alias="Name", + description="ID of the peer-node in the Swarm cluster.", + examples=["6869d7c1732b"], + ) + ip: str | None = Field( + default=None, + alias="IP", + description="IP-address of the peer-node in the Swarm cluster.", + examples=["10.133.77.91"], + ) class Type4(str, Enum): @@ -1096,46 +1547,63 @@ class BuildCache(BaseModel): """ - ID: str | None = Field( - None, + id: str | None = Field( + default=None, + alias="ID", description="Unique ID of the build cache record.\n", - example="ndlpt0hhvkqcdfkputsk4cq9c", + examples=["ndlpt0hhvkqcdfkputsk4cq9c"], ) - Parent: str | None = Field( - None, + parent: str | None = Field( + default=None, + alias="Parent", description="ID of the parent build cache record.\n", - example="hw53o5aio51xtltp5xjp8v7fx", + examples=["hw53o5aio51xtltp5xjp8v7fx"], ) - Type: Type4 | None = Field( - None, description="Cache record type.\n", example="regular" + type: Type4 | None = Field( + default=None, + alias="Type", + description="Cache record type.\n", + examples=["regular"], ) - Description: str | None = Field( - None, + description: str | None = Field( + default=None, + alias="Description", description="Description of the build-step that produced the build cache.\n", - example="mount / from exec /bin/sh -c echo 'Binary::apt::APT::Keep-Downloaded-Packages \"true\";' > /etc/apt/apt.conf.d/keep-cache", + examples=[ + "mount / from exec /bin/sh -c echo 'Binary::apt::APT::Keep-Downloaded-Packages \"true\";' > /etc/apt/apt.conf.d/keep-cache" + ], ) - InUse: bool | None = Field( - None, description="Indicates if the build cache is in use.\n", example=False + in_use: bool | None = Field( + default=None, + alias="InUse", + description="Indicates if the build cache is in use.\n", + examples=[False], ) - Shared: bool | None = Field( - None, description="Indicates if the build cache is shared.\n", example=True + shared: bool | None = Field( + default=None, + alias="Shared", + description="Indicates if the build cache is shared.\n", + examples=[True], ) - Size: int | None = Field( - None, + size: int | None = Field( + default=None, + alias="Size", description="Amount of disk space used by the build cache (in bytes).\n", - example=51, + examples=[51], ) - CreatedAt: str | None = Field( - None, + created_at: str | None = Field( + default=None, + alias="CreatedAt", description="Date and time at which the build cache was created in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n", - example="2016-08-18T10:44:24.496525531Z", + examples=["2016-08-18T10:44:24.496525531Z"], ) - LastUsedAt: str | None = Field( - None, + last_used_at: str | None = Field( + default=None, + alias="LastUsedAt", description="Date and time at which the build cache was last used in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n", - example="2017-08-09T07:09:37.632105588Z", + examples=["2017-08-09T07:09:37.632105588Z"], ) - UsageCount: int | None = Field(None, example=26) + usage_count: int | None = Field(default=None, alias="UsageCount", examples=[26]) class ImageID(BaseModel): @@ -1143,7 +1611,7 @@ class ImageID(BaseModel): Image ID or Digest """ - ID: str | None = None + id: str | None = Field(default=None, alias="ID") class ErrorDetail(BaseModel): @@ -1169,7 +1637,7 @@ class IdResponse(BaseModel): Response to an API call that returns just an Id """ - Id: str = Field(..., description="The id of the newly created object.") + id: str = Field(..., alias="Id", description="The id of the newly created object.") class EndpointIPAMConfig(BaseModel): @@ -1178,53 +1646,61 @@ class EndpointIPAMConfig(BaseModel): """ - IPv4Address: str | None = Field(None, example="172.20.30.33") - IPv6Address: str | None = Field(None, example="2001:db8:abcd::3033") - LinkLocalIPs: list[str] | None = Field( - None, example=["169.254.34.68", "fe80::3468"] + i_pv4_address: str | None = Field( + default=None, alias="IPv4Address", examples=["172.20.30.33"] + ) + i_pv6_address: str | None = Field( + default=None, alias="IPv6Address", examples=["2001:db8:abcd::3033"] + ) + link_local_i_ps: list[str] | None = Field( + default=None, alias="LinkLocalIPs", examples=[["169.254.34.68", "fe80::3468"]] ) class PluginMount(BaseModel): - Name: str = Field(..., example="some-mount") - Description: str = Field(..., example="This is a mount that's used by the plugin.") - Settable: list[str] - Source: str = Field(..., example="/var/lib/docker/plugins/") - Destination: str = Field(..., example="/mnt/state") - Type: str = Field(..., example="bind") - Options: list[str] = Field(..., example=["rbind", "rw"]) + name: str = Field(..., alias="Name", examples=["some-mount"]) + description: str = Field( + ..., + alias="Description", + examples=["This is a mount that's used by the plugin."], + ) + settable: list[str] = Field(..., alias="Settable") + source: str = Field(..., alias="Source", examples=["/var/lib/docker/plugins/"]) + destination: str = Field(..., alias="Destination", examples=["/mnt/state"]) + type: str = Field(..., alias="Type", examples=["bind"]) + options: list[str] = Field(..., alias="Options", examples=[["rbind", "rw"]]) class PluginDevice(BaseModel): - Name: str - Description: str - Settable: list[str] - Path: str = Field(..., example="/dev/fuse") + name: str = Field(..., alias="Name") + description: str = Field(..., alias="Description") + settable: list[str] = Field(..., alias="Settable") + path: str = Field(..., alias="Path", examples=["/dev/fuse"]) class PluginEnv(BaseModel): - Name: str - Description: str - Settable: list[str] - Value: str + name: str = Field(..., alias="Name") + description: str = Field(..., alias="Description") + settable: list[str] = Field(..., alias="Settable") + value: str = Field(..., alias="Value") class PluginInterfaceType(BaseModel): - Prefix: str - Capability: str - Version: str + prefix: str = Field(..., alias="Prefix") + capability: str = Field(..., alias="Capability") + version: str = Field(..., alias="Version") class PluginPrivilege(BaseModel): """ - Describes a permission the user has to accept upon installing + Describes a permission the user has to accept upon installing the plugin. """ - Name: str | None = Field(None, example="network") - Description: str | None = None - Value: list[str] | None = Field(None, example=["host"]) + name: str | None = Field(default=None, alias="Name", examples=["network"]) + description: str | None = Field(default=None, alias="Description") + value: list[str] | None = Field(default=None, alias="Value", examples=[["host"]]) class Settings(BaseModel): @@ -1232,10 +1708,10 @@ class Settings(BaseModel): Settings that can be modified by users. """ - Mounts: list[PluginMount] - Env: list[str] = Field(..., example=["DEBUG=0"]) - Args: list[str] - Devices: list[PluginDevice] + mounts: list[PluginMount] = Field(..., alias="Mounts") + env: list[str] = Field(..., alias="Env", examples=[["DEBUG=0"]]) + args: list[str] = Field(..., alias="Args") + devices: list[PluginDevice] = Field(..., alias="Devices") class ProtocolScheme(str, Enum): @@ -1243,7 +1719,7 @@ class ProtocolScheme(str, Enum): Protocol to use for clients connecting to the plugin. """ - _ = "" + field_ = "" moby_plugins_http_v1 = "moby.plugins.http/v1" @@ -1252,44 +1728,53 @@ class Interface(BaseModel): The interface between Docker and the plugin """ - Types: list[PluginInterfaceType] = Field(..., example=["docker.volumedriver/1.0"]) - Socket: str = Field(..., example="plugins.sock") - ProtocolScheme: ProtocolScheme | None = Field( - None, + types: list[PluginInterfaceType] = Field( + ..., alias="Types", examples=[["docker.volumedriver/1.0"]] + ) + socket: str = Field(..., alias="Socket", examples=["plugins.sock"]) + protocol_scheme: ProtocolScheme | None = Field( + default=None, + alias="ProtocolScheme", description="Protocol to use for clients connecting to the plugin.", - example="some.protocol/v1.0", + examples=["some.protocol/v1.0"], ) class User(BaseModel): - UID: int | None = Field(None, example=1000) - GID: int | None = Field(None, example=1000) + uid: int | None = Field(default=None, alias="UID", examples=[1000]) + gid: int | None = Field(default=None, alias="GID", examples=[1000]) class Network1(BaseModel): - Type: str = Field(..., example="host") + type: str = Field(..., alias="Type", examples=["host"]) class Linux(BaseModel): - Capabilities: list[str] = Field(..., example=["CAP_SYS_ADMIN", "CAP_SYSLOG"]) - AllowAllDevices: bool = Field(..., example=False) - Devices: list[PluginDevice] + capabilities: list[str] = Field( + ..., alias="Capabilities", examples=[["CAP_SYS_ADMIN", "CAP_SYSLOG"]] + ) + allow_all_devices: bool = Field(..., alias="AllowAllDevices", examples=[False]) + devices: list[PluginDevice] = Field(..., alias="Devices") class Args(BaseModel): - Name: str = Field(..., example="args") - Description: str = Field(..., example="command line arguments") - Settable: list[str] - Value: list[str] + name: str = Field(..., alias="Name", examples=["args"]) + description: str = Field( + ..., alias="Description", examples=["command line arguments"] + ) + settable: list[str] = Field(..., alias="Settable") + value: list[str] = Field(..., alias="Value") class Rootfs(BaseModel): - type: str | None = Field(None, example="layers") + type: str | None = Field(default=None, examples=["layers"]) diff_ids: list[str] | None = Field( - None, - example=[ - "sha256:675532206fbf3030b8458f88d6e26d4eb1577688a25efec97154c94e8b6b4887", - "sha256:e216a057b1cb1efc11f8a268f37ef62083e70b1b38323ba252e25ac88904a7e8", + default=None, + examples=[ + [ + "sha256:675532206fbf3030b8458f88d6e26d4eb1577688a25efec97154c94e8b6b4887", + "sha256:e216a057b1cb1efc11f8a268f37ef62083e70b1b38323ba252e25ac88904a7e8", + ] ], ) @@ -1299,39 +1784,53 @@ class Config(BaseModel): The config of a plugin. """ - DockerVersion: str | None = Field( - None, + docker_version: str | None = Field( + default=None, + alias="DockerVersion", description="Docker Version used to create the plugin", - example="17.06.0-ce", - ) - Description: str = Field(..., example="A sample volume plugin for Docker") - Documentation: str = Field(..., example="/engine/extend/plugins/") - Interface: Interface = Field( - ..., description="The interface between Docker and the plugin" - ) - Entrypoint: list[str] = Field( - ..., example=["/usr/bin/sample-volume-plugin", "/data"] - ) - WorkDir: str = Field(..., example="/bin/") - User: User | None = None - Network: Network1 - Linux: Linux - PropagatedMount: str = Field(..., example="/mnt/volumes") - IpcHost: bool = Field(..., example=False) - PidHost: bool = Field(..., example=False) - Mounts: list[PluginMount] - Env: list[PluginEnv] = Field( + examples=["17.06.0-ce"], + ) + description: str = Field( + ..., alias="Description", examples=["A sample volume plugin for Docker"] + ) + documentation: str = Field( ..., - example=[ - { - "Name": "DEBUG", - "Description": "If set, prints debug messages", - "Settable": None, - "Value": "0", - } + alias="Documentation", + examples=["https://docs.docker.com/engine/extend/plugins/"], + ) + interface: Interface = Field( + ..., + alias="Interface", + description="The interface between Docker and the plugin", + ) + entrypoint: list[str] = Field( + ..., alias="Entrypoint", examples=[["/usr/bin/sample-volume-plugin", "/data"]] + ) + work_dir: str = Field(..., alias="WorkDir", examples=["/bin/"]) + user: User | None = Field(default=None, alias="User") + network: Network1 = Field(..., alias="Network") + linux: Linux = Field(..., alias="Linux") + propagated_mount: str = Field( + ..., alias="PropagatedMount", examples=["/mnt/volumes"] + ) + ipc_host: bool = Field(..., alias="IpcHost", examples=[False]) + pid_host: bool = Field(..., alias="PidHost", examples=[False]) + mounts: list[PluginMount] = Field(..., alias="Mounts") + env: list[PluginEnv] = Field( + ..., + alias="Env", + examples=[ + [ + { + "Name": "DEBUG", + "Description": "If set, prints debug messages", + "Settable": None, + "Value": "0", + } + ] ], ) - Args: Args + args: Args = Field(..., alias="Args") rootfs: Rootfs | None = None @@ -1340,29 +1839,33 @@ class Plugin(BaseModel): A plugin for the Engine API """ - Id: str | None = Field( - None, example="5724e2c8652da337ab2eedd19fc6fc0ec908e4bd907c7421bf6a8dfc70c4c078" + id: str | None = Field( + default=None, + alias="Id", + examples=["5724e2c8652da337ab2eedd19fc6fc0ec908e4bd907c7421bf6a8dfc70c4c078"], ) - Name: str = Field(..., example="tiborvass/sample-volume-plugin") - Enabled: bool = Field( + name: str = Field(..., alias="Name", examples=["tiborvass/sample-volume-plugin"]) + enabled: bool = Field( ..., + alias="Enabled", description="True if the plugin is running. False if the plugin is not running, only installed.", - example=True, + examples=[True], ) - Settings: Settings = Field( - ..., description="Settings that can be modified by users." + settings: Settings = Field( + ..., alias="Settings", description="Settings that can be modified by users." ) - PluginReference: str | None = Field( - None, + plugin_reference: str | None = Field( + default=None, + alias="PluginReference", description="plugin remote reference used to push/pull the plugin", - example="localhost:5000/tiborvass/sample-volume-plugin:latest", + examples=["localhost:5000/tiborvass/sample-volume-plugin:latest"], ) - Config_: Config = Field(..., alias="Config", description="The config of a plugin.") + config: Config = Field(..., alias="Config", description="The config of a plugin.") class ObjectVersion(BaseModel): """ - The version number of the object such as node, service, etc. This is needed + The version number of the object such as node, service, etc. This is needed to avoid conflicting writes. The client must send the version number along with the modified specification when updating these objects. @@ -1375,7 +1878,7 @@ class ObjectVersion(BaseModel): """ - Index: int | None = Field(None, example=373531) + index: int | None = Field(default=None, alias="Index", examples=[373531]) class Role(str, Enum): @@ -1398,13 +1901,26 @@ class Availability(str, Enum): class NodeSpec(BaseModel): - Name: str | None = Field(None, description="Name for the node.", example="my-node") - Labels: dict[str, str] | None = Field( - None, description="User-defined key/value metadata." + name: str | None = Field( + default=None, + alias="Name", + description="Name for the node.", + examples=["my-node"], ) - Role: Role | None = Field(None, description="Role of the node.", example="manager") - Availability: Availability | None = Field( - None, description="Availability of the node.", example="active" + labels: dict[str, str] | None = Field( + default=None, alias="Labels", description="User-defined key/value metadata." + ) + role: Role | None = Field( + default=None, + alias="Role", + description="Role of the node.", + examples=["manager"], + ) + availability: Availability | None = Field( + default=None, + alias="Availability", + description="Availability of the node.", + examples=["active"], ) @@ -1414,21 +1930,23 @@ class Platform(BaseModel): """ - Architecture: str | None = Field( - None, + architecture: str | None = Field( + default=None, + alias="Architecture", description="Architecture represents the hardware architecture (for example,\n`x86_64`).\n", - example="x86_64", + examples=["x86_64"], ) - OS: str | None = Field( - None, + os: str | None = Field( + default=None, + alias="OS", description="OS represents the Operating System (for example, `linux` or `windows`).\n", - example="linux", + examples=["linux"], ) class Plugin1(BaseModel): - Type: str | None = None - Name: str | None = None + type: str | None = Field(default=None, alias="Type") + name: str | None = Field(default=None, alias="Name") class EngineDescription(BaseModel): @@ -1436,49 +1954,59 @@ class EngineDescription(BaseModel): EngineDescription provides information about an engine. """ - EngineVersion: str | None = Field(None, example="17.06.0") - Labels: dict[str, str] | None = Field(None, example={"foo": "bar"}) - Plugins: list[Plugin1] | None = Field( - None, - example=[ - {"Type": "Log", "Name": "awslogs"}, - {"Type": "Log", "Name": "fluentd"}, - {"Type": "Log", "Name": "gcplogs"}, - {"Type": "Log", "Name": "gelf"}, - {"Type": "Log", "Name": "journald"}, - {"Type": "Log", "Name": "json-file"}, - {"Type": "Log", "Name": "logentries"}, - {"Type": "Log", "Name": "splunk"}, - {"Type": "Log", "Name": "syslog"}, - {"Type": "Network", "Name": "bridge"}, - {"Type": "Network", "Name": "host"}, - {"Type": "Network", "Name": "ipvlan"}, - {"Type": "Network", "Name": "macvlan"}, - {"Type": "Network", "Name": "null"}, - {"Type": "Network", "Name": "overlay"}, - {"Type": "Volume", "Name": "local"}, - {"Type": "Volume", "Name": "localhost:5000/vieux/sshfs:latest"}, - {"Type": "Volume", "Name": "vieux/sshfs:latest"}, + engine_version: str | None = Field( + default=None, alias="EngineVersion", examples=["17.06.0"] + ) + labels: dict[str, str] | None = Field( + default=None, alias="Labels", examples=[{"foo": "bar"}] + ) + plugins: list[Plugin1] | None = Field( + default=None, + alias="Plugins", + examples=[ + [ + {"Type": "Log", "Name": "awslogs"}, + {"Type": "Log", "Name": "fluentd"}, + {"Type": "Log", "Name": "gcplogs"}, + {"Type": "Log", "Name": "gelf"}, + {"Type": "Log", "Name": "journald"}, + {"Type": "Log", "Name": "json-file"}, + {"Type": "Log", "Name": "splunk"}, + {"Type": "Log", "Name": "syslog"}, + {"Type": "Network", "Name": "bridge"}, + {"Type": "Network", "Name": "host"}, + {"Type": "Network", "Name": "ipvlan"}, + {"Type": "Network", "Name": "macvlan"}, + {"Type": "Network", "Name": "null"}, + {"Type": "Network", "Name": "overlay"}, + {"Type": "Volume", "Name": "local"}, + {"Type": "Volume", "Name": "localhost:5000/vieux/sshfs:latest"}, + {"Type": "Volume", "Name": "vieux/sshfs:latest"}, + ] ], ) class TLSInfo(BaseModel): """ - Information about the issuer of leaf TLS certificates and the trusted root + Information about the issuer of leaf TLS certificates and the trusted root CA certificate. """ - TrustRoot: str | None = Field( - None, + trust_root: str | None = Field( + default=None, + alias="TrustRoot", description="The root CA certificate(s) that are used to validate leaf TLS\ncertificates.\n", ) - CertIssuerSubject: str | None = Field( - None, description="The base64-url-safe-encoded raw subject bytes of the issuer." + cert_issuer_subject: str | None = Field( + default=None, + alias="CertIssuerSubject", + description="The base64-url-safe-encoded raw subject bytes of the issuer.", ) - CertIssuerPublicKey: str | None = Field( - None, + cert_issuer_public_key: str | None = Field( + default=None, + alias="CertIssuerPublicKey", description="The base64-url-safe-encoded raw public key bytes of the issuer.\n", ) @@ -1509,10 +2037,11 @@ class Orchestration(BaseModel): Orchestration configuration. """ - TaskHistoryRetentionLimit: int | None = Field( - None, + task_history_retention_limit: int | None = Field( + default=None, + alias="TaskHistoryRetentionLimit", description="The number of historic tasks to keep per instance or node. If\nnegative, never remove completed or failed tasks.\n", - example=10, + examples=[10], ) @@ -1521,27 +2050,34 @@ class Raft(BaseModel): Raft configuration. """ - SnapshotInterval: int | None = Field( - None, description="The number of log entries between snapshots.", example=10000 + snapshot_interval: int | None = Field( + default=None, + alias="SnapshotInterval", + description="The number of log entries between snapshots.", + examples=[10000], ) - KeepOldSnapshots: int | None = Field( - None, + keep_old_snapshots: int | None = Field( + default=None, + alias="KeepOldSnapshots", description="The number of snapshots to keep beyond the current snapshot.\n", ) - LogEntriesForSlowFollowers: int | None = Field( - None, + log_entries_for_slow_followers: int | None = Field( + default=None, + alias="LogEntriesForSlowFollowers", description="The number of log entries to keep around to sync up slow followers\nafter a snapshot is created.\n", - example=500, + examples=[500], ) - ElectionTick: int | None = Field( - None, + election_tick: int | None = Field( + default=None, + alias="ElectionTick", description="The number of ticks that a follower will wait for a message from\nthe leader before becoming a candidate and starting an election.\n`ElectionTick` must be greater than `HeartbeatTick`.\n\nA tick currently defaults to one second, so these translate\ndirectly to seconds currently, but this is NOT guaranteed.\n", - example=3, + examples=[3], ) - HeartbeatTick: int | None = Field( - None, + heartbeat_tick: int | None = Field( + default=None, + alias="HeartbeatTick", description="The number of ticks between heartbeats. Every HeartbeatTick ticks,\nthe leader will send a heartbeat to the followers.\n\nA tick currently defaults to one second, so these translate\ndirectly to seconds currently, but this is NOT guaranteed.\n", - example=1, + examples=[1], ) @@ -1550,16 +2086,17 @@ class Dispatcher(BaseModel): Dispatcher configuration. """ - HeartbeatPeriod: int | None = Field( - None, + heartbeat_period: int | None = Field( + default=None, + alias="HeartbeatPeriod", description="The delay for an agent to send a heartbeat to the dispatcher.\n", - example=5000000000, + examples=[5000000000], ) class Protocol(str, Enum): """ - Protocol for communication with the external CA (currently + Protocol for communication with the external CA (currently only `cfssl` is supported). """ @@ -1568,47 +2105,57 @@ class Protocol(str, Enum): class ExternalCA(BaseModel): - Protocol: Protocol | None = Field( - Protocol.cfssl, + protocol: Protocol | None = Field( + default=Protocol.cfssl, + alias="Protocol", description="Protocol for communication with the external CA (currently\nonly `cfssl` is supported).\n", ) - URL: str | None = Field( - None, description="URL where certificate signing requests should be sent.\n" + url: str | None = Field( + default=None, + alias="URL", + description="URL where certificate signing requests should be sent.\n", ) - Options: dict[str, str] | None = Field( - None, + options: dict[str, str] | None = Field( + default=None, + alias="Options", description="An object with key/value pairs that are interpreted as\nprotocol-specific options for the external CA driver.\n", ) - CACert: str | None = Field( - None, + ca_cert: str | None = Field( + default=None, + alias="CACert", description="The root CA certificate (in PEM format) this external CA uses\nto issue TLS certificates (assumed to be to the current swarm\nroot CA certificate if not provided).\n", ) -class CAConfig(BaseModel): +class CaConfig(BaseModel): """ CA configuration. """ - NodeCertExpiry: int | None = Field( - None, + node_cert_expiry: int | None = Field( + default=None, + alias="NodeCertExpiry", description="The duration node certificates are issued for.", - example=7776000000000000, + examples=[7776000000000000], ) - ExternalCAs: list[ExternalCA] | None = Field( - None, + external_c_as: list[ExternalCA] | None = Field( + default=None, + alias="ExternalCAs", description="Configuration for forwarding signing requests to an external\ncertificate authority.\n", ) - SigningCACert: str | None = Field( - None, + signing_ca_cert: str | None = Field( + default=None, + alias="SigningCACert", description="The desired signing CA certificate for all swarm node TLS leaf\ncertificates, in PEM format.\n", ) - SigningCAKey: str | None = Field( - None, + signing_ca_key: str | None = Field( + default=None, + alias="SigningCAKey", description="The desired signing CA key for all swarm node TLS leaf certificates,\nin PEM format.\n", ) - ForceRotate: int | None = Field( - None, + force_rotate: int | None = Field( + default=None, + alias="ForceRotate", description="An integer whose purpose is to force swarm to generate a new\nsigning CA certificate and key, if none have been specified in\n`SigningCACert` and `SigningCAKey`\n", ) @@ -1618,16 +2165,17 @@ class EncryptionConfig(BaseModel): Parameters related to encryption-at-rest. """ - AutoLockManagers: bool | None = Field( - None, + auto_lock_managers: bool | None = Field( + default=None, + alias="AutoLockManagers", description="If set, generate a key and use it to lock data stored on the\nmanagers.\n", - example=False, + examples=[False], ) class LogDriver(BaseModel): """ - The log driver to use for tasks created in the orchestrator if + The log driver to use for tasks created in the orchestrator if unspecified by a service. Updating this value only affects new tasks. Existing tasks continue @@ -1635,15 +2183,17 @@ class LogDriver(BaseModel): """ - Name: str | None = Field( - None, + name: str | None = Field( + default=None, + alias="Name", description="The log driver to use as a default for new tasks.\n", - example="json-file", + examples=["json-file"], ) - Options: dict[str, str] | None = Field( - None, - description="Driver-specific options for the selectd log driver, specified\nas key/value pairs.\n", - example={"max-file": "10", "max-size": "100m"}, + options: dict[str, str] | None = Field( + default=None, + alias="Options", + description="Driver-specific options for the selected log driver, specified\nas key/value pairs.\n", + examples=[{"max-file": "10", "max-size": "100m"}], ) @@ -1652,8 +2202,9 @@ class TaskDefaults(BaseModel): Defaults for creating tasks in this cluster. """ - LogDriver: LogDriver | None = Field( - None, + log_driver: LogDriver | None = Field( + default=None, + alias="LogDriver", description="The log driver to use for tasks created in the orchestrator if\nunspecified by a service.\n\nUpdating this value only affects new tasks. Existing tasks continue\nto use their previously configured log driver until recreated.\n", ) @@ -1663,70 +2214,97 @@ class SwarmSpec(BaseModel): User modifiable swarm configuration. """ - Name: str | None = Field(None, description="Name of the swarm.", example="default") - Labels: dict[str, str] | None = Field( - None, + name: str | None = Field( + default=None, + alias="Name", + description="Name of the swarm.", + examples=["default"], + ) + labels: dict[str, str] | None = Field( + default=None, + alias="Labels", description="User-defined key/value metadata.", - example={ - "com.example.corp.type": "production", - "com.example.corp.department": "engineering", - }, + examples=[ + { + "com.example.corp.type": "production", + "com.example.corp.department": "engineering", + } + ], + ) + orchestration: Orchestration | None = Field( + default=None, alias="Orchestration", description="Orchestration configuration." ) - Orchestration: Orchestration | None = Field( - None, description="Orchestration configuration." + raft: Raft | None = Field( + default=None, alias="Raft", description="Raft configuration." ) - Raft: Raft | None = Field(None, description="Raft configuration.") - Dispatcher: Dispatcher | None = Field(None, description="Dispatcher configuration.") - CAConfig: CAConfig | None = Field(None, description="CA configuration.") - EncryptionConfig: EncryptionConfig | None = Field( - None, description="Parameters related to encryption-at-rest." + dispatcher: Dispatcher | None = Field( + default=None, alias="Dispatcher", description="Dispatcher configuration." ) - TaskDefaults: TaskDefaults | None = Field( - None, description="Defaults for creating tasks in this cluster." + ca_config: CaConfig | None = Field( + default=None, alias="CAConfig", description="CA configuration." + ) + encryption_config: EncryptionConfig | None = Field( + default=None, + alias="EncryptionConfig", + description="Parameters related to encryption-at-rest.", + ) + task_defaults: TaskDefaults | None = Field( + default=None, + alias="TaskDefaults", + description="Defaults for creating tasks in this cluster.", ) class ClusterInfo(BaseModel): """ - ClusterInfo represents information about the swarm as is returned by the + ClusterInfo represents information about the swarm as is returned by the "/info" endpoint. Join-tokens are not included. """ - ID: str | None = Field( - None, description="The ID of the swarm.", example="abajmipo7b4xz5ip2nrla6b11" + id: str | None = Field( + default=None, + alias="ID", + description="The ID of the swarm.", + examples=["abajmipo7b4xz5ip2nrla6b11"], ) - Version: ObjectVersion | None = None - CreatedAt: str | None = Field( - None, + version: ObjectVersion | None = Field(default=None, alias="Version") + created_at: str | None = Field( + default=None, + alias="CreatedAt", description="Date and time at which the swarm was initialised in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n", - example="2016-08-18T10:44:24.496525531Z", + examples=["2016-08-18T10:44:24.496525531Z"], ) - UpdatedAt: str | None = Field( - None, + updated_at: str | None = Field( + default=None, + alias="UpdatedAt", description="Date and time at which the swarm was last updated in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n", - example="2017-08-09T07:09:37.632105588Z", + examples=["2017-08-09T07:09:37.632105588Z"], ) - Spec: SwarmSpec | None = None - TLSInfo: TLSInfo | None = None - RootRotationInProgress: bool | None = Field( - None, + spec: SwarmSpec | None = Field(default=None, alias="Spec") + tls_info: TLSInfo | None = Field(default=None, alias="TLSInfo") + root_rotation_in_progress: bool | None = Field( + default=None, + alias="RootRotationInProgress", description="Whether there is currently a root CA rotation in progress for the swarm\n", - example=False, + examples=[False], ) - DataPathPort: int | None = Field( - 4789, + data_path_port: int | None = Field( + default=4789, + alias="DataPathPort", description="DataPathPort specifies the data path port number for data traffic.\nAcceptable port range is 1024 to 49151.\nIf no port is set or is set to 0, the default port (4789) is used.\n", - example=4789, + examples=[4789], ) - DefaultAddrPool: list[str] | None = Field( - None, + default_addr_pool: list[str] | None = Field( + default=None, + alias="DefaultAddrPool", description="Default Address Pool specifies default subnet pools for global scope\nnetworks.\n", ) - SubnetSize: int | None = Field( - 24, + subnet_size: int | None = Field( + default=24, + alias="SubnetSize", description="SubnetSize specifies the subnet size of the networks created from the\ndefault subnet pool.\n", - example=24, + examples=[24], le=29, ) @@ -1737,25 +2315,31 @@ class JoinTokens(BaseModel): """ - Worker: str | None = Field( - None, + worker: str | None = Field( + default=None, + alias="Worker", description="The token workers can use to join the swarm.\n", - example="SWMTKN-1-3pu6hszjas19xyp7ghgosyx9k8atbfcr8p2is99znpy26u2lkl-1awxwuwd3z9j1z3puu7rcgdbx", + examples=[ + "SWMTKN-1-3pu6hszjas19xyp7ghgosyx9k8atbfcr8p2is99znpy26u2lkl-1awxwuwd3z9j1z3puu7rcgdbx" + ], ) - Manager: str | None = Field( - None, + manager: str | None = Field( + default=None, + alias="Manager", description="The token managers can use to join the swarm.\n", - example="SWMTKN-1-3pu6hszjas19xyp7ghgosyx9k8atbfcr8p2is99znpy26u2lkl-7p73s1dx5in4tatdymyhg9hu2", + examples=[ + "SWMTKN-1-3pu6hszjas19xyp7ghgosyx9k8atbfcr8p2is99znpy26u2lkl-7p73s1dx5in4tatdymyhg9hu2" + ], ) class Swarm(ClusterInfo): - JoinTokens: JoinTokens | None = None + join_tokens: JoinTokens | None = Field(default=None, alias="JoinTokens") class PluginSpec(BaseModel): """ - Plugin spec for the service. *(Experimental release only.)* + Plugin spec for the service. *(Experimental release only.)*


@@ -1766,14 +2350,20 @@ class PluginSpec(BaseModel): """ - Name: str | None = Field( - None, description="The name or 'alias' to use for the plugin." + name: str | None = Field( + default=None, + alias="Name", + description="The name or 'alias' to use for the plugin.", + ) + remote: str | None = Field( + default=None, alias="Remote", description="The plugin image reference to use." ) - Remote: str | None = Field(None, description="The plugin image reference to use.") - Disabled: bool | None = Field( - None, description="Disable the plugin once scheduled." + disabled: bool | None = Field( + default=None, alias="Disabled", description="Disable the plugin once scheduled." + ) + plugin_privilege: list[PluginPrivilege] | None = Field( + default=None, alias="PluginPrivilege" ) - PluginPrivilege: list[PluginPrivilege] | None = None class CredentialSpec(BaseModel): @@ -1781,33 +2371,45 @@ class CredentialSpec(BaseModel): CredentialSpec for managed service account (Windows only) """ - Config_: str | None = Field( - None, + config: str | None = Field( + default=None, alias="Config", description="Load credential spec from a Swarm Config with the given ID.\nThe specified config must also be present in the Configs\nfield with the Runtime property set.\n\n


\n\n\n> **Note**: `CredentialSpec.File`, `CredentialSpec.Registry`,\n> and `CredentialSpec.Config` are mutually exclusive.\n", - example="0bt9dmxjvjiqermk6xrop3ekq", + examples=["0bt9dmxjvjiqermk6xrop3ekq"], ) - File: str | None = Field( - None, + file: str | None = Field( + default=None, + alias="File", description="Load credential spec from this file. The file is read by\nthe daemon, and must be present in the `CredentialSpecs`\nsubdirectory in the docker data directory, which defaults\nto `C:\\ProgramData\\Docker\\` on Windows.\n\nFor example, specifying `spec.json` loads\n`C:\\ProgramData\\Docker\\CredentialSpecs\\spec.json`.\n\n


\n\n> **Note**: `CredentialSpec.File`, `CredentialSpec.Registry`,\n> and `CredentialSpec.Config` are mutually exclusive.\n", - example="spec.json", + examples=["spec.json"], ) - Registry: str | None = Field( - None, + registry: str | None = Field( + default=None, + alias="Registry", description="Load credential spec from this value in the Windows\nregistry. The specified registry value must be located in:\n\n`HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Virtualization\\Containers\\CredentialSpecs`\n\n


\n\n\n> **Note**: `CredentialSpec.File`, `CredentialSpec.Registry`,\n> and `CredentialSpec.Config` are mutually exclusive.\n", ) -class SELinuxContext(BaseModel): +class SeLinuxContext(BaseModel): """ SELinux labels of the container """ - Disable: bool | None = Field(None, description="Disable SELinux") - User: str | None = Field(None, description="SELinux user label") - Role: str | None = Field(None, description="SELinux role label") - Type: str | None = Field(None, description="SELinux type label") - Level: str | None = Field(None, description="SELinux level label") + disable: bool | None = Field( + default=None, alias="Disable", description="Disable SELinux" + ) + user: str | None = Field( + default=None, alias="User", description="SELinux user label" + ) + role: str | None = Field( + default=None, alias="Role", description="SELinux role label" + ) + type: str | None = Field( + default=None, alias="Type", description="SELinux type label" + ) + level: str | None = Field( + default=None, alias="Level", description="SELinux level label" + ) class Privileges(BaseModel): @@ -1815,29 +2417,36 @@ class Privileges(BaseModel): Security options for the container """ - CredentialSpec: CredentialSpec | None = Field( - None, description="CredentialSpec for managed service account (Windows only)" + credential_spec: CredentialSpec | None = Field( + default=None, + alias="CredentialSpec", + description="CredentialSpec for managed service account (Windows only)", ) - SELinuxContext: SELinuxContext | None = Field( - None, description="SELinux labels of the container" + se_linux_context: SeLinuxContext | None = Field( + default=None, + alias="SELinuxContext", + description="SELinux labels of the container", ) -class DNSConfig(BaseModel): +class DnsConfig(BaseModel): """ - Specification for DNS related configurations in resolver configuration + Specification for DNS related configurations in resolver configuration file (`resolv.conf`). """ - Nameservers: list[str] | None = Field( - None, description="The IP addresses of the name servers." + nameservers: list[str] | None = Field( + default=None, + alias="Nameservers", + description="The IP addresses of the name servers.", ) - Search: list[str] | None = Field( - None, description="A search list for host-name lookup." + search: list[str] | None = Field( + default=None, alias="Search", description="A search list for host-name lookup." ) - Options: list[str] | None = Field( - None, + options: list[str] | None = Field( + default=None, + alias="Options", description="A list of internal resolver variables to be modified (e.g.,\n`debug`, `ndots:3`, etc.).\n", ) @@ -1848,34 +2457,45 @@ class File(BaseModel): """ - Name: str | None = Field( - None, description="Name represents the final filename in the filesystem.\n" + name: str | None = Field( + default=None, + alias="Name", + description="Name represents the final filename in the filesystem.\n", + ) + uid: str | None = Field( + default=None, alias="UID", description="UID represents the file UID." ) - UID: str | None = Field(None, description="UID represents the file UID.") - GID: str | None = Field(None, description="GID represents the file GID.") - Mode: int | None = Field( - None, description="Mode represents the FileMode of the file." + gid: str | None = Field( + default=None, alias="GID", description="GID represents the file GID." + ) + mode: int | None = Field( + default=None, + alias="Mode", + description="Mode represents the FileMode of the file.", ) class Secret(BaseModel): - File: File | None = Field( - None, + file: File | None = Field( + default=None, + alias="File", description="File represents a specific target that is backed by a file.\n", ) - SecretID: str | None = Field( - None, + secret_id: str | None = Field( + default=None, + alias="SecretID", description="SecretID represents the ID of the specific secret that we're\nreferencing.\n", ) - SecretName: str | None = Field( - None, + secret_name: str | None = Field( + default=None, + alias="SecretName", description="SecretName is the name of the secret that this references,\nbut this is just provided for lookup/display purposes. The\nsecret in the reference will be identified by its ID.\n", ) -class File1(File): +class File1(BaseModel): """ - File represents a specific target that is backed by a file. + File represents a specific target that is backed by a file.


@@ -1883,33 +2503,62 @@ class File1(File): """ + name: str | None = Field( + default=None, + alias="Name", + description="Name represents the final filename in the filesystem.\n", + ) + uid: str | None = Field( + default=None, alias="UID", description="UID represents the file UID." + ) + gid: str | None = Field( + default=None, alias="GID", description="GID represents the file GID." + ) + mode: int | None = Field( + default=None, + alias="Mode", + description="Mode represents the FileMode of the file.", + ) + class Config1(BaseModel): - File: File1 | None = Field( - None, + file: File1 | None = Field( + default=None, + alias="File", description="File represents a specific target that is backed by a file.\n\n


\n\n> **Note**: `Configs.File` and `Configs.Runtime` are mutually exclusive\n", ) - Runtime: dict[str, Any] | None = Field( - None, + runtime: dict[str, Any] | None = Field( + default=None, + alias="Runtime", description="Runtime represents a target that is not mounted into the\ncontainer but is used by the task\n\n


\n\n> **Note**: `Configs.File` and `Configs.Runtime` are mutually\n> exclusive\n", ) - ConfigID: str | None = Field( - None, + config_id: str | None = Field( + default=None, + alias="ConfigID", description="ConfigID represents the ID of the specific config that we're\nreferencing.\n", ) - ConfigName: str | None = Field( - None, + config_name: str | None = Field( + default=None, + alias="ConfigName", description="ConfigName is the name of the config that this references,\nbut this is just provided for lookup/display purposes. The\nconfig in the reference will be identified by its ID.\n", ) -class Ulimit1(Ulimit): - pass +class Isolation1(str, Enum): + """ + Isolation technology of the containers running the service. + (Windows only) + + """ + + default = "default" + process = "process" + hyperv = "hyperv" class ContainerSpec(BaseModel): """ - Container spec for the service. + Container spec for the service.


@@ -1920,98 +2569,132 @@ class ContainerSpec(BaseModel): """ - Image: str | None = Field( - None, description="The image name to use for the container" + image: str | None = Field( + default=None, + alias="Image", + description="The image name to use for the container", + ) + labels: dict[str, str] | None = Field( + default=None, alias="Labels", description="User-defined key/value data." ) - Labels: dict[str, str] | None = Field( - None, description="User-defined key/value data." + command: list[str] | None = Field( + default=None, alias="Command", description="The command to be run in the image." ) - Command: list[str] | None = Field( - None, description="The command to be run in the image." + args: list[str] | None = Field( + default=None, alias="Args", description="Arguments to the command." ) - Args: list[str] | None = Field(None, description="Arguments to the command.") - Hostname: str | None = Field( - None, + hostname: str | None = Field( + default=None, + alias="Hostname", description="The hostname to use for the container, as a valid\n[RFC 1123](https://tools.ietf.org/html/rfc1123) hostname.\n", ) - Env: list[str] | None = Field( - None, description="A list of environment variables in the form `VAR=value`.\n" + env: list[str] | None = Field( + default=None, + alias="Env", + description="A list of environment variables in the form `VAR=value`.\n", ) - Dir: str | None = Field( - None, description="The working directory for commands to run in." + dir: str | None = Field( + default=None, + alias="Dir", + description="The working directory for commands to run in.", + ) + user: str | None = Field( + default=None, alias="User", description="The user inside the container." ) - User: str | None = Field(None, description="The user inside the container.") - Groups: list[str] | None = Field( - None, + groups: list[str] | None = Field( + default=None, + alias="Groups", description="A list of additional groups that the container process will run as.\n", ) - Privileges: Privileges | None = Field( - None, description="Security options for the container" + privileges: Privileges | None = Field( + default=None, + alias="Privileges", + description="Security options for the container", + ) + tty: bool | None = Field( + default=None, + alias="TTY", + description="Whether a pseudo-TTY should be allocated.", ) - TTY: bool | None = Field( - None, description="Whether a pseudo-TTY should be allocated." + open_stdin: bool | None = Field( + default=None, alias="OpenStdin", description="Open `stdin`" ) - OpenStdin: bool | None = Field(None, description="Open `stdin`") - ReadOnly: bool | None = Field( - None, description="Mount the container's root filesystem as read only." + read_only: bool | None = Field( + default=None, + alias="ReadOnly", + description="Mount the container's root filesystem as read only.", ) - Mounts: list[Mount] | None = Field( - None, + mounts: list[Mount] | None = Field( + default=None, + alias="Mounts", description="Specification for mounts to be added to containers created as part\nof the service.\n", ) - StopSignal: str | None = Field(None, description="Signal to stop the container.") - StopGracePeriod: int | None = Field( - None, + stop_signal: str | None = Field( + default=None, alias="StopSignal", description="Signal to stop the container." + ) + stop_grace_period: int | None = Field( + default=None, + alias="StopGracePeriod", description="Amount of time to wait for the container to terminate before\nforcefully killing it.\n", ) - HealthCheck: HealthConfig | None = None - Hosts: list[str] | None = Field( - None, + health_check: HealthConfig | None = Field(default=None, alias="HealthCheck") + hosts: list[str] | None = Field( + default=None, + alias="Hosts", description="A list of hostname/IP mappings to add to the container's `hosts`\nfile. The format of extra hosts is specified in the\n[hosts(5)](http://man7.org/linux/man-pages/man5/hosts.5.html)\nman page:\n\n IP_address canonical_hostname [aliases...]\n", ) - DNSConfig: DNSConfig | None = Field( - None, + dns_config: DnsConfig | None = Field( + default=None, + alias="DNSConfig", description="Specification for DNS related configurations in resolver configuration\nfile (`resolv.conf`).\n", ) - Secrets: list[Secret] | None = Field( - None, + secrets: list[Secret] | None = Field( + default=None, + alias="Secrets", description="Secrets contains references to zero or more secrets that will be\nexposed to the service.\n", ) - Configs: list[Config1] | None = Field( - None, + configs: list[Config1] | None = Field( + default=None, + alias="Configs", description="Configs contains references to zero or more configs that will be\nexposed to the service.\n", ) - Isolation: Isolation | None = Field( - None, + isolation: Isolation1 | None = Field( + default=None, + alias="Isolation", description="Isolation technology of the containers running the service.\n(Windows only)\n", ) - Init: bool | None = Field( - None, + init: bool | None = Field( + default=None, + alias="Init", description="Run an init inside the container that forwards signals and reaps\nprocesses. This field is omitted if empty, and the default (as\nconfigured on the daemon) is used.\n", ) - Sysctls: dict[str, str] | None = Field( - None, + sysctls: dict[str, str] | None = Field( + default=None, + alias="Sysctls", description="Set kernel namedspaced parameters (sysctls) in the container.\nThe Sysctls option on services accepts the same sysctls as the\nare supported on containers. Note that while the same sysctls are\nsupported, no guarantees or checks are made about their\nsuitability for a clustered environment, and it's up to the user\nto determine whether a given sysctl will work properly in a\nService.\n", ) - CapabilityAdd: list[str] | None = Field( - None, + capability_add: list[str] | None = Field( + default=None, + alias="CapabilityAdd", description="A list of kernel capabilities to add to the default set\nfor the container.\n", - example=["CAP_NET_RAW", "CAP_SYS_ADMIN", "CAP_SYS_CHROOT", "CAP_SYSLOG"], + examples=[["CAP_NET_RAW", "CAP_SYS_ADMIN", "CAP_SYS_CHROOT", "CAP_SYSLOG"]], ) - CapabilityDrop: list[str] | None = Field( - None, + capability_drop: list[str] | None = Field( + default=None, + alias="CapabilityDrop", description="A list of kernel capabilities to drop from the default set\nfor the container.\n", - example=["CAP_NET_RAW"], + examples=[["CAP_NET_RAW"]], ) - Ulimits: list[Ulimit1] | None = Field( - None, + ulimits: list[Ulimit] | None = Field( + default=None, + alias="Ulimits", description='A list of resource limits to set in the container. For example: `{"Name": "nofile", "Soft": 1024, "Hard": 2048}`"\n', ) class NetworkAttachmentSpec(BaseModel): """ - Read-only spec type for non-swarm containers attached to swarm overlay + Read-only spec type for non-swarm containers attached to swarm overlay networks.


@@ -2023,8 +2706,10 @@ class NetworkAttachmentSpec(BaseModel): """ - ContainerID: str | None = Field( - None, description="ID of the container represented by this task" + container_id: str | None = Field( + default=None, + alias="ContainerID", + description="ID of the container represented by this task", ) @@ -2040,73 +2725,89 @@ class Condition(str, Enum): class RestartPolicy1(BaseModel): """ - Specification for the restart policy which applies to containers + Specification for the restart policy which applies to containers created as part of this service. """ - Condition: Condition | None = Field(None, description="Condition for restart.") - Delay: int | None = Field(None, description="Delay between restart attempts.") - MaxAttempts: int | None = Field( - 0, + condition: Condition | None = Field( + default=None, alias="Condition", description="Condition for restart." + ) + delay: int | None = Field( + default=None, alias="Delay", description="Delay between restart attempts." + ) + max_attempts: int | None = Field( + default=0, + alias="MaxAttempts", description="Maximum attempts to restart a given container before giving up\n(default value is 0, which is ignored).\n", ) - Window: int | None = Field( - 0, + window: int | None = Field( + default=0, + alias="Window", description="Windows is the time window used to evaluate the restart policy\n(default value is 0, which is unbounded).\n", ) class Spread(BaseModel): - SpreadDescriptor: str | None = Field( - None, description="label descriptor, such as `engine.labels.az`.\n" + spread_descriptor: str | None = Field( + default=None, + alias="SpreadDescriptor", + description="label descriptor, such as `engine.labels.az`.\n", ) class Preference(BaseModel): - Spread: Spread | None = None + spread: Spread | None = Field(default=None, alias="Spread") class Placement(BaseModel): - Constraints: list[str] | None = Field( - None, + constraints: list[str] | None = Field( + default=None, + alias="Constraints", description="An array of constraint expressions to limit the set of nodes where\na task can be scheduled. Constraint expressions can either use a\n_match_ (`==`) or _exclude_ (`!=`) rule. Multiple constraints find\nnodes that satisfy every expression (AND match). Constraints can\nmatch node or Docker Engine labels as follows:\n\nnode attribute | matches | example\n---------------------|--------------------------------|-----------------------------------------------\n`node.id` | Node ID | `node.id==2ivku8v2gvtg4`\n`node.hostname` | Node hostname | `node.hostname!=node-2`\n`node.role` | Node role (`manager`/`worker`) | `node.role==manager`\n`node.platform.os` | Node operating system | `node.platform.os==windows`\n`node.platform.arch` | Node architecture | `node.platform.arch==x86_64`\n`node.labels` | User-defined node labels | `node.labels.security==high`\n`engine.labels` | Docker Engine's labels | `engine.labels.operatingsystem==ubuntu-14.04`\n\n`engine.labels` apply to Docker Engine labels like operating system,\ndrivers, etc. Swarm administrators add `node.labels` for operational\npurposes by using the [`node update endpoint`](#operation/NodeUpdate).\n", - example=[ - "node.hostname!=node3.corp.example.com", - "node.role!=manager", - "node.labels.type==production", - "node.platform.os==linux", - "node.platform.arch==x86_64", + examples=[ + [ + "node.hostname!=node3.corp.example.com", + "node.role!=manager", + "node.labels.type==production", + "node.platform.os==linux", + "node.platform.arch==x86_64", + ] ], ) - Preferences: list[Preference] | None = Field( - None, + preferences: list[Preference] | None = Field( + default=None, + alias="Preferences", description="Preferences provide a way to make the scheduler aware of factors\nsuch as topology. They are provided in order from highest to\nlowest precedence.\n", - example=[ - {"Spread": {"SpreadDescriptor": "node.labels.datacenter"}}, - {"Spread": {"SpreadDescriptor": "node.labels.rack"}}, + examples=[ + [ + {"Spread": {"SpreadDescriptor": "node.labels.datacenter"}}, + {"Spread": {"SpreadDescriptor": "node.labels.rack"}}, + ] ], ) - MaxReplicas: int | None = Field( - 0, + max_replicas: int | None = Field( + default=0, + alias="MaxReplicas", description="Maximum number of replicas for per node (default value is 0, which\nis unlimited)\n", ) - Platforms: list[Platform] | None = Field( - None, + platforms: list[Platform] | None = Field( + default=None, + alias="Platforms", description="Platforms stores all the platforms that the service's image can\nrun on. This field is used in the platform filter for scheduling.\nIf empty, then the platform filter is off, meaning there are no\nscheduling restrictions.\n", ) class LogDriver1(BaseModel): """ - Specifies the log driver to use for tasks created from this spec. If + Specifies the log driver to use for tasks created from this spec. If not present, the default one for the swarm will be used, finally falling back to the engine default if not specified. """ - Name: str | None = None - Options: dict[str, str] | None = None + name: str | None = Field(default=None, alias="Name") + options: dict[str, str] | None = Field(default=None, alias="Options") class TaskState(str, Enum): @@ -2128,35 +2829,40 @@ class TaskState(str, Enum): class ContainerStatus(BaseModel): - ContainerID: str | None = None - PID: int | None = None - ExitCode: int | None = None + container_id: str | None = Field(default=None, alias="ContainerID") + pid: int | None = Field(default=None, alias="PID") + exit_code: int | None = Field(default=None, alias="ExitCode") class Status1(BaseModel): - Timestamp: str | None = None - State: TaskState | None = None - Message: str | None = None - Err: str | None = None - ContainerStatus: ContainerStatus | None = None + timestamp: str | None = Field(default=None, alias="Timestamp") + state: TaskState | None = Field(default=None, alias="State") + message: str | None = Field(default=None, alias="Message") + err: str | None = Field(default=None, alias="Err") + container_status: ContainerStatus | None = Field( + default=None, alias="ContainerStatus" + ) class Replicated(BaseModel): - Replicas: int | None = None + replicas: int | None = Field(default=None, alias="Replicas") class ReplicatedJob(BaseModel): """ - The mode used for services with a finite number of tasks that run + The mode used for services with a finite number of tasks that run to a completed state. """ - MaxConcurrent: int | None = Field( - 1, description="The maximum number of replicas to run simultaneously.\n" + max_concurrent: int | None = Field( + default=1, + alias="MaxConcurrent", + description="The maximum number of replicas to run simultaneously.\n", ) - TotalCompletions: int | None = Field( - None, + total_completions: int | None = Field( + default=None, + alias="TotalCompletions", description="The total number of replicas desired to reach the Completed\nstate. If unset, will default to the value of `MaxConcurrent`\n", ) @@ -2166,21 +2872,23 @@ class Mode(BaseModel): Scheduling mode for the service. """ - Replicated: Replicated | None = None - Global: dict[str, Any] | None = None - ReplicatedJob: ReplicatedJob | None = Field( - None, + replicated: Replicated | None = Field(default=None, alias="Replicated") + global_: dict[str, Any] | None = Field(default=None, alias="Global") + replicated_job: ReplicatedJob | None = Field( + default=None, + alias="ReplicatedJob", description="The mode used for services with a finite number of tasks that run\nto a completed state.\n", ) - GlobalJob: dict[str, Any] | None = Field( - None, + global_job: dict[str, Any] | None = Field( + default=None, + alias="GlobalJob", description="The mode used for services which run a task to the completed state\non each valid node.\n", ) class FailureAction(str, Enum): """ - Action to take if an updated task fails to run, or stops running + Action to take if an updated task fails to run, or stops running during the update. """ @@ -2192,7 +2900,7 @@ class FailureAction(str, Enum): class Order(str, Enum): """ - The order of operations when rolling out an updated task. Either + The order of operations when rolling out an updated task. Either the old task is shut down before the new task is started, or the new task is started before the old task is shut down. @@ -2207,34 +2915,41 @@ class UpdateConfig(BaseModel): Specification for the update strategy of the service. """ - Parallelism: int | None = Field( - None, + parallelism: int | None = Field( + default=None, + alias="Parallelism", description="Maximum number of tasks to be updated in one iteration (0 means\nunlimited parallelism).\n", ) - Delay: int | None = Field( - None, description="Amount of time between updates, in nanoseconds." + delay: int | None = Field( + default=None, + alias="Delay", + description="Amount of time between updates, in nanoseconds.", ) - FailureAction: FailureAction | None = Field( - None, + failure_action: FailureAction | None = Field( + default=None, + alias="FailureAction", description="Action to take if an updated task fails to run, or stops running\nduring the update.\n", ) - Monitor: int | None = Field( - None, + monitor: int | None = Field( + default=None, + alias="Monitor", description="Amount of time to monitor each updated task for failures, in\nnanoseconds.\n", ) - MaxFailureRatio: float | None = Field( - 0, + max_failure_ratio: float | None = Field( + default=0, + alias="MaxFailureRatio", description="The fraction of tasks that may fail during an update before the\nfailure action is invoked, specified as a floating point number\nbetween 0 and 1.\n", ) - Order: Order | None = Field( - None, + order: Order | None = Field( + default=None, + alias="Order", description="The order of operations when rolling out an updated task. Either\nthe old task is shut down before the new task is started, or the\nnew task is started before the old task is shut down.\n", ) class FailureAction1(str, Enum): """ - Action to take if an rolled back task fails to run, or stops + Action to take if an rolled back task fails to run, or stops running during the rollback. """ @@ -2243,40 +2958,58 @@ class FailureAction1(str, Enum): pause = "pause" +class Order1(str, Enum): + """ + The order of operations when rolling back a task. Either the old + task is shut down before the new task is started, or the new task + is started before the old task is shut down. + + """ + + stop_first = "stop-first" + start_first = "start-first" + + class RollbackConfig(BaseModel): """ Specification for the rollback strategy of the service. """ - Parallelism: int | None = Field( - None, + parallelism: int | None = Field( + default=None, + alias="Parallelism", description="Maximum number of tasks to be rolled back in one iteration (0 means\nunlimited parallelism).\n", ) - Delay: int | None = Field( - None, + delay: int | None = Field( + default=None, + alias="Delay", description="Amount of time between rollback iterations, in nanoseconds.\n", ) - FailureAction: FailureAction1 | None = Field( - None, + failure_action: FailureAction1 | None = Field( + default=None, + alias="FailureAction", description="Action to take if an rolled back task fails to run, or stops\nrunning during the rollback.\n", ) - Monitor: int | None = Field( - None, + monitor: int | None = Field( + default=None, + alias="Monitor", description="Amount of time to monitor each rolled back task for failures, in\nnanoseconds.\n", ) - MaxFailureRatio: float | None = Field( - 0, + max_failure_ratio: float | None = Field( + default=0, + alias="MaxFailureRatio", description="The fraction of tasks that may fail during a rollback before the\nfailure action is invoked, specified as a floating point number\nbetween 0 and 1.\n", ) - Order: Order | None = Field( - None, + order: Order1 | None = Field( + default=None, + alias="Order", description="The order of operations when rolling back a task. Either the old\ntask is shut down before the new task is started, or the new task\nis started before the old task is shut down.\n", ) class PublishMode(str, Enum): """ - The mode in which port is published. + The mode in which port is published.


@@ -2293,14 +3026,19 @@ class PublishMode(str, Enum): class EndpointPortConfig(BaseModel): - Name: str | None = None - Protocol: Type | None = None - TargetPort: int | None = Field(None, description="The port inside the container.") - PublishedPort: int | None = Field(None, description="The port on the swarm hosts.") - PublishMode: PublishMode | None = Field( - PublishMode.ingress, + name: str | None = Field(default=None, alias="Name") + protocol: Type | None = Field(default=None, alias="Protocol") + target_port: int | None = Field( + default=None, alias="TargetPort", description="The port inside the container." + ) + published_port: int | None = Field( + default=None, alias="PublishedPort", description="The port on the swarm hosts." + ) + publish_mode: PublishMode | None = Field( + default=PublishMode.ingress, + alias="PublishMode", description='The mode in which port is published.\n\n


\n\n- "ingress" makes the target port accessible on every node,\n regardless of whether there is a task for the service running on\n that node or not.\n- "host" bypasses the routing mesh and publish the port directly on\n the swarm node where that service is running.\n', - example="ingress", + examples=["ingress"], ) @@ -2319,25 +3057,27 @@ class EndpointSpec(BaseModel): Properties that can be configured to access and load balance a service. """ - Mode: Mode1 | None = Field( - Mode1.vip, + mode: Mode1 | None = Field( + default=Mode1.vip, + alias="Mode", description="The mode of resolution to use for internal load balancing between tasks.\n", ) - Ports: list[EndpointPortConfig] | None = Field( - None, + ports: list[EndpointPortConfig] | None = Field( + default=None, + alias="Ports", description="List of exposed ports that this service is accessible on from the\noutside. Ports can only be provided if `vip` resolution mode is used.\n", ) class VirtualIP(BaseModel): - NetworkID: str | None = None - Addr: str | None = None + network_id: str | None = Field(default=None, alias="NetworkID") + addr: str | None = Field(default=None, alias="Addr") class Endpoint(BaseModel): - Spec: EndpointSpec | None = None - Ports: list[EndpointPortConfig] | None = None - VirtualIPs: list[VirtualIP] | None = None + spec: EndpointSpec | None = Field(default=None, alias="Spec") + ports: list[EndpointPortConfig] | None = Field(default=None, alias="Ports") + virtual_i_ps: list[VirtualIP] | None = Field(default=None, alias="VirtualIPs") class State(str, Enum): @@ -2351,69 +3091,80 @@ class UpdateStatus(BaseModel): The status of a service update. """ - State: State | None = None - StartedAt: str | None = None - CompletedAt: str | None = None - Message: str | None = None + state: State | None = Field(default=None, alias="State") + started_at: str | None = Field(default=None, alias="StartedAt") + completed_at: str | None = Field(default=None, alias="CompletedAt") + message: str | None = Field(default=None, alias="Message") class ServiceStatus(BaseModel): """ - The status of the service's tasks. Provided only when requested as + The status of the service's tasks. Provided only when requested as part of a ServiceList operation. """ - RunningTasks: int | None = Field( - None, + running_tasks: int | None = Field( + default=None, + alias="RunningTasks", description="The number of tasks for the service currently in the Running state.\n", - example=7, + examples=[7], ) - DesiredTasks: int | None = Field( - None, + desired_tasks: int | None = Field( + default=None, + alias="DesiredTasks", description="The number of tasks for the service desired to be running.\nFor replicated services, this is the replica count from the\nservice spec. For global services, this is computed by taking\ncount of all tasks for the service with a Desired State other\nthan Shutdown.\n", - example=10, + examples=[10], ) - CompletedTasks: int | None = Field( - None, + completed_tasks: int | None = Field( + default=None, + alias="CompletedTasks", description="The number of tasks for a job that are in the Completed state.\nThis field must be cross-referenced with the service type, as the\nvalue of 0 may mean the service is not in a job mode, or it may\nmean the job-mode service has no tasks yet Completed.\n", ) class JobStatus(BaseModel): """ - The status of the service when it is in one of ReplicatedJob or + The status of the service when it is in one of ReplicatedJob or GlobalJob modes. Absent on Replicated and Global mode services. The JobIteration is an ObjectVersion, but unlike the Service's version, does not need to be sent with an update request. """ - JobIteration: ObjectVersion | None = Field( - None, + job_iteration: ObjectVersion | None = Field( + default=None, + alias="JobIteration", description='JobIteration is a value increased each time a Job is executed,\nsuccessfully or otherwise. "Executed", in this case, means the\njob as a whole has been started, not that an individual Task has\nbeen launched. A job is "Executed" when its ServiceSpec is\nupdated. JobIteration can be used to disambiguate Tasks belonging\nto different executions of a job. Though JobIteration will\nincrease with each subsequent execution, it may not necessarily\nincrease by 1, and so JobIteration should not be used to\n', ) - LastExecution: str | None = Field( - None, + last_execution: str | None = Field( + default=None, + alias="LastExecution", description="The last time, as observed by the server, that this job was\nstarted.\n", ) class ImageDeleteResponseItem(BaseModel): - Untagged: str | None = Field( - None, description="The image ID of an image that was untagged" + untagged: str | None = Field( + default=None, + alias="Untagged", + description="The image ID of an image that was untagged", ) - Deleted: str | None = Field( - None, description="The image ID of an image that was deleted" + deleted: str | None = Field( + default=None, + alias="Deleted", + description="The image ID of an image that was deleted", ) class ServiceUpdateResponse(BaseModel): - Warnings: list[str] | None = Field(None, description="Optional warning messages") + warnings: list[str] | None = Field( + default=None, alias="Warnings", description="Optional warning messages" + ) class HostConfig1(BaseModel): - NetworkMode: str | None = None + network_mode: str | None = Field(default=None, alias="NetworkMode") class Driver(BaseModel): @@ -2421,76 +3172,99 @@ class Driver(BaseModel): Driver represents a driver (network, logging, secrets). """ - name: str = Field(..., description="Name of the driver.") + name: str = Field( + ..., alias="Name", description="Name of the driver.", examples=["some-driver"] + ) options: dict[str, str] | None = Field( - None, + default=None, + alias="Options", description="Key/value map of driver-specific options.", - example={ - "OptionA": "value for driver-specific option A", - "OptionB": "value for driver-specific option B", - }, + examples=[ + { + "OptionA": "value for driver-specific option A", + "OptionB": "value for driver-specific option B", + } + ], ) class SecretSpec(BaseModel): - name: str | None = Field(None, description="User-defined name of the secret.") + name: str | None = Field( + default=None, alias="Name", description="User-defined name of the secret." + ) labels: dict[str, str] | None = Field( - None, + default=None, + alias="Labels", description="User-defined key/value metadata.", - example={ - "com.example.some-label": "some-value", - "com.example.some-other-label": "some-other-value", - }, + examples=[ + { + "com.example.some-label": "some-value", + "com.example.some-other-label": "some-other-value", + } + ], ) data: str | None = Field( - None, + default=None, + alias="Data", description="Base64-url-safe-encoded ([RFC 4648](https://tools.ietf.org/html/rfc4648#section-5))\ndata to store as secret.\n\nThis field is only used to _create_ a secret, and is not returned by\nother endpoints.\n", - example="", + examples=[""], ) driver: Driver | None = Field( - None, + default=None, + alias="Driver", description="Name of the secrets driver used to fetch the secret's value from an\nexternal secret store.\n", ) templating: Driver | None = Field( - None, + default=None, + alias="Templating", description="Templating driver, if applicable\n\nTemplating controls whether and how to evaluate the config payload as\na template. If no driver is set, no templating is used.\n", ) class Secret1(BaseModel): - ID: str | None = Field(None, example="blt1owaxmitz71s9v5zh81zun") - Version: ObjectVersion | None = None - CreatedAt: str | None = Field(None, example="2017-07-20T13:55:28.678958722Z") - UpdatedAt: str | None = Field(None, example="2017-07-20T13:55:28.678958722Z") - Spec: SecretSpec | None = None + id: str | None = Field( + default=None, alias="ID", examples=["blt1owaxmitz71s9v5zh81zun"] + ) + version: ObjectVersion | None = Field(default=None, alias="Version") + created_at: str | None = Field( + default=None, alias="CreatedAt", examples=["2017-07-20T13:55:28.678958722Z"] + ) + updated_at: str | None = Field( + default=None, alias="UpdatedAt", examples=["2017-07-20T13:55:28.678958722Z"] + ) + spec: SecretSpec | None = Field(default=None, alias="Spec") class ConfigSpec(BaseModel): - Name: str | None = Field(None, description="User-defined name of the config.") - Labels: dict[str, str] | None = Field( - None, description="User-defined key/value metadata." + name: str | None = Field( + default=None, alias="Name", description="User-defined name of the config." + ) + labels: dict[str, str] | None = Field( + default=None, alias="Labels", description="User-defined key/value metadata." ) - Data: str | None = Field( - None, + data: str | None = Field( + default=None, + alias="Data", description="Base64-url-safe-encoded ([RFC 4648](https://tools.ietf.org/html/rfc4648#section-5))\nconfig data.\n", ) - Templating: Driver | None = Field( - None, + templating: Driver | None = Field( + default=None, + alias="Templating", description="Templating driver, if applicable\n\nTemplating controls whether and how to evaluate the config payload as\na template. If no driver is set, no templating is used.\n", ) class Config2(BaseModel): - ID: str | None = None - Version: ObjectVersion | None = None - CreatedAt: str | None = None - UpdatedAt: str | None = None - Spec: ConfigSpec | None = None + id: str | None = Field(default=None, alias="ID") + version: ObjectVersion | None = Field(default=None, alias="Version") + created_at: str | None = Field(default=None, alias="CreatedAt") + updated_at: str | None = Field(default=None, alias="UpdatedAt") + spec: ConfigSpec | None = Field(default=None, alias="Spec") class Status2(str, Enum): """ - String representation of the container state. Can be one of "created", + String representation of the container state. Can be one of "created", "running", "paused", "restarting", "removing", "exited", or "dead". """ @@ -2509,20 +3283,28 @@ class ContainerWaitExitError(BaseModel): container waiting error, if any """ - Message: str | None = Field(None, description="Details of an error") + message: str | None = Field( + default=None, alias="Message", description="Details of an error" + ) class Platform1(BaseModel): - Name: str + name: str = Field(..., alias="Name") class Component(BaseModel): - Name: str = Field(..., description="Name of the component\n", example="Engine") - Version: str = Field( - ..., description="Version of the component\n", example="19.03.12" + name: str = Field( + ..., alias="Name", description="Name of the component\n", examples=["Engine"] ) - Details: dict[str, Any] | None = Field( - None, + version: str = Field( + ..., + alias="Version", + description="Version of the component\n", + examples=["19.03.12"], + ) + details: dict[str, Any] | None = Field( + default=None, + alias="Details", description="Key/value pairs of strings with additional information about the\ncomponent. These values are intended for informational purposes\nonly, and their content is not defined, and not part of the API\nspecification.\n\nThese messages can be printed by the client as information to the user.\n", ) @@ -2533,57 +3315,71 @@ class SystemVersion(BaseModel): """ - Platform: Platform1 | None = None - Components: list[Component] | None = Field( - None, description="Information about system components\n" + platform: Platform1 | None = Field(default=None, alias="Platform") + components: list[Component] | None = Field( + default=None, + alias="Components", + description="Information about system components\n", ) - Version: str | None = Field( - None, description="The version of the daemon", example="19.03.12" + version: str | None = Field( + default=None, + alias="Version", + description="The version of the daemon", + examples=["19.03.12"], ) - ApiVersion: str | None = Field( - None, + api_version: str | None = Field( + default=None, + alias="ApiVersion", description="The default (and highest) API version that is supported by the daemon\n", - example="1.40", + examples=["1.40"], ) - MinAPIVersion: str | None = Field( - None, + min_api_version: str | None = Field( + default=None, + alias="MinAPIVersion", description="The minimum API version that is supported by the daemon\n", - example="1.12", + examples=["1.12"], ) - GitCommit: str | None = Field( - None, + git_commit: str | None = Field( + default=None, + alias="GitCommit", description="The Git commit of the source code that was used to build the daemon\n", - example="48a66213fe", + examples=["48a66213fe"], ) - GoVersion: str | None = Field( - None, + go_version: str | None = Field( + default=None, + alias="GoVersion", description="The version Go used to compile the daemon, and the version of the Go\nruntime in use.\n", - example="go1.13.14", + examples=["go1.13.14"], ) - Os: str | None = Field( - None, + os: str | None = Field( + default=None, + alias="Os", description='The operating system that the daemon is running on ("linux" or "windows")\n', - example="linux", + examples=["linux"], ) - Arch: str | None = Field( - None, + arch: str | None = Field( + default=None, + alias="Arch", description="The architecture that the daemon is running on\n", - example="amd64", + examples=["amd64"], ) - KernelVersion: str | None = Field( - None, + kernel_version: str | None = Field( + default=None, + alias="KernelVersion", description="The kernel version (`uname -r`) that the daemon is running on.\n\nThis field is omitted when empty.\n", - example="4.19.76-linuxkit", + examples=["4.19.76-linuxkit"], ) - Experimental: bool | None = Field( - None, + experimental: bool | None = Field( + default=None, + alias="Experimental", description="Indicates if the daemon is started with experimental features enabled.\n\nThis field is omitted when empty / false.\n", - example=True, + examples=[True], ) - BuildTime: str | None = Field( - None, + build_time: str | None = Field( + default=None, + alias="BuildTime", description="The date and time that the daemon was compiled.\n", - example="2020-06-22T15:49:27.000000000+00:00", + examples=["2020-06-22T15:49:27.000000000+00:00"], ) @@ -2610,7 +3406,7 @@ class CgroupVersion(str, Enum): class Isolation2(str, Enum): """ - Represents the isolation technology to use as a default for containers. + Represents the isolation technology to use as a default for containers. The supported values are platform-specific. If no isolation value is specified on daemon start, on Windows client, @@ -2626,15 +3422,20 @@ class Isolation2(str, Enum): class DefaultAddressPool(BaseModel): - Base: str | None = Field( - None, description="The network address in CIDR format", example="10.10.0.0/16" + base: str | None = Field( + default=None, + alias="Base", + description="The network address in CIDR format", + examples=["10.10.0.0/16"], + ) + size: int | None = Field( + default=None, alias="Size", description="The network pool size", examples=["24"] ) - Size: int | None = Field(None, description="The network pool size", example="24") class PluginsInfo(BaseModel): """ - Available plugins per type. + Available plugins per type.


@@ -2644,34 +3445,39 @@ class PluginsInfo(BaseModel): """ - Volume: list[str] | None = Field( - None, + volume: list[str] | None = Field( + default=None, + alias="Volume", description="Names of available volume-drivers, and network-driver plugins.", - example=["local"], + examples=[["local"]], ) - Network: list[str] | None = Field( - None, + network: list[str] | None = Field( + default=None, + alias="Network", description="Names of available network-drivers, and network-driver plugins.", - example=["bridge", "host", "ipvlan", "macvlan", "null", "overlay"], + examples=[["bridge", "host", "ipvlan", "macvlan", "null", "overlay"]], ) - Authorization: list[str] | None = Field( - None, + authorization: list[str] | None = Field( + default=None, + alias="Authorization", description="Names of available authorization plugins.", - example=["img-authz-plugin", "hbm"], + examples=[["img-authz-plugin", "hbm"]], ) - Log: list[str] | None = Field( - None, + log: list[str] | None = Field( + default=None, + alias="Log", description="Names of available logging-drivers, and logging-driver plugins.", - example=[ - "awslogs", - "fluentd", - "gcplogs", - "gelf", - "journald", - "json-file", - "logentries", - "splunk", - "syslog", + examples=[ + [ + "awslogs", + "fluentd", + "gcplogs", + "gelf", + "journald", + "json-file", + "splunk", + "syslog", + ] ], ) @@ -2681,35 +3487,41 @@ class IndexInfo(BaseModel): IndexInfo contains information about a registry. """ - Name: str | None = Field( - None, + name: str | None = Field( + default=None, + alias="Name", description='Name of the registry, such as "docker.io".\n', - example="docker.io", + examples=["docker.io"], ) - Mirrors: list[str] | None = Field( - None, + mirrors: list[str] | None = Field( + default=None, + alias="Mirrors", description="List of mirrors, expressed as URIs.\n", - example=[ - "https://hub-mirror.corp.example.com:5000/", - "https://registry-2.docker.io/", - "https://registry-3.docker.io/", + examples=[ + [ + "https://hub-mirror.corp.example.com:5000/", + "https://registry-2.docker.io/", + "https://registry-3.docker.io/", + ] ], ) - Secure: bool | None = Field( - None, + secure: bool | None = Field( + default=None, + alias="Secure", description="Indicates if the registry is part of the list of insecure\nregistries.\n\nIf `false`, the registry is insecure. Insecure registries accept\nun-encrypted (HTTP) and/or untrusted (HTTPS with certificates from\nunknown CAs) communication.\n\n> **Warning**: Insecure registries can be useful when running a local\n> registry. However, because its use creates security vulnerabilities\n> it should ONLY be enabled for testing purposes. For increased\n> security, users should add their CA to their system's list of\n> trusted CAs instead of enabling this option.\n", - example=True, + examples=[True], ) - Official: bool | None = Field( - None, + official: bool | None = Field( + default=None, + alias="Official", description="Indicates whether this is an official registry (i.e., Docker Hub / docker.io)\n", - example=True, + examples=[True], ) class Runtime(BaseModel): """ - Runtime describes an [OCI compliant](https://github.com/opencontainers/runtime-spec) + Runtime describes an [OCI compliant](https://github.com/opencontainers/runtime-spec) runtime. The runtime is invoked by the daemon via the `containerd` daemon. OCI @@ -2719,34 +3531,37 @@ class Runtime(BaseModel): """ path: str | None = Field( - None, + default=None, description="Name and, optional, path, of the OCI executable binary.\n\nIf the path is omitted, the daemon searches the host's `$PATH` for the\nbinary and uses the first result.\n", - example="/usr/local/bin/my-oci-runtime", + examples=["/usr/local/bin/my-oci-runtime"], ) - runtimeArgs: list[str] | None = Field( - None, + runtime_args: list[str] | None = Field( + default=None, + alias="runtimeArgs", description="List of command-line arguments to pass to the runtime when invoked.\n", - example=["--debug", "--systemd-cgroup=false"], + examples=[["--debug", "--systemd-cgroup=false"]], ) class Commit(BaseModel): """ - Commit holds the Git-commit (SHA1) that a binary was built from, as + Commit holds the Git-commit (SHA1) that a binary was built from, as reported in the version-string of external tools, such as `containerd`, or `runC`. """ - ID: str | None = Field( - None, + id: str | None = Field( + default=None, + alias="ID", description="Actual commit ID of external tool.", - example="cfb82a876ecc11b5ca0977d1733adbe58599088a", + examples=["cfb82a876ecc11b5ca0977d1733adbe58599088a"], ) - Expected: str | None = Field( - None, + expected: str | None = Field( + default=None, + alias="Expected", description="Commit ID of external tool expected by dockerd as set at build time.\n", - example="2d41c047c83e09a6d61d464906feb2a2f3c52aa4", + examples=["2d41c047c83e09a6d61d464906feb2a2f3c52aa4"], ) @@ -2755,7 +3570,7 @@ class LocalNodeState(str, Enum): Current local status of this node. """ - _ = "" + field_ = "" inactive = "inactive" pending = "pending" active = "active" @@ -2768,11 +3583,15 @@ class PeerNode(BaseModel): Represents a peer-node in the swarm """ - NodeID: str | None = Field( - None, description="Unique identifier of for this node in the swarm." + node_id: str | None = Field( + default=None, + alias="NodeID", + description="Unique identifier of for this node in the swarm.", ) - Addr: str | None = Field( - None, description="IP address and ports at which this node can be reached.\n" + addr: str | None = Field( + default=None, + alias="Addr", + description="IP address and ports at which this node can be reached.\n", ) @@ -2782,39 +3601,47 @@ class NetworkAttachmentConfig(BaseModel): """ - Target: str | None = Field( - None, + target: str | None = Field( + default=None, + alias="Target", description="The target network for attachment. Must be a network name or ID.\n", ) - Aliases: list[str] | None = Field( - None, + aliases: list[str] | None = Field( + default=None, + alias="Aliases", description="Discoverable alternate names for the service on this network.\n", ) - DriverOpts: dict[str, str] | None = Field( - None, description="Driver attachment options for the network target.\n" + driver_opts: dict[str, str] | None = Field( + default=None, + alias="DriverOpts", + description="Driver attachment options for the network target.\n", ) class EventActor(BaseModel): """ - Actor describes something that generates events, like a container, network, + Actor describes something that generates events, like a container, network, or a volume. """ - ID: str | None = Field( - None, + id: str | None = Field( + default=None, + alias="ID", description="The ID of the object emitting the event", - example="ede54ee1afda366ab42f824e8a5ffd195155d853ceaec74a927f249ea270c743", + examples=["ede54ee1afda366ab42f824e8a5ffd195155d853ceaec74a927f249ea270c743"], ) - Attributes: dict[str, str] | None = Field( - None, + attributes: dict[str, str] | None = Field( + default=None, + alias="Attributes", description="Various key/value attributes of the object, depending on its type.\n", - example={ - "com.example.some-label": "some-label-value", - "image": "alpine:latest", - "name": "my-container", - }, + examples=[ + { + "com.example.some-label": "some-label-value", + "image": "alpine:latest", + "name": "my-container", + } + ], ) @@ -2831,14 +3658,14 @@ class Type5(str, Enum): network = "network" node = "node" plugin = "plugin" - secret = "secret" # nosec + secret = "secret" service = "service" volume = "volume" class Scope1(str, Enum): """ - Scope of the event. Engine events are `local` scope. Cluster (Swarm) + Scope of the event. Engine events are `local` scope. Cluster (Swarm) events are `swarm` scope. """ @@ -2853,104 +3680,126 @@ class SystemEventsResponse(BaseModel): """ - Type: Type5 | None = Field( - None, description="The type of object emitting the event", example="container" + type: Type5 | None = Field( + default=None, + alias="Type", + description="The type of object emitting the event", + examples=["container"], + ) + action: str | None = Field( + default=None, + alias="Action", + description="The type of event", + examples=["create"], ) - Action: str | None = Field(None, description="The type of event", example="create") - Actor: EventActor | None = None + actor: EventActor | None = Field(default=None, alias="Actor") scope: Scope1 | None = Field( - None, + default=None, description="Scope of the event. Engine events are `local` scope. Cluster (Swarm)\nevents are `swarm` scope.\n", ) - time: int | None = Field(None, description="Timestamp of event", example=1629574695) - timeNano: int | None = Field( - None, + time: int | None = Field( + default=None, description="Timestamp of event", examples=[1629574695] + ) + time_nano: int | None = Field( + default=None, + alias="timeNano", description="Timestamp of event, with nanosecond accuracy", - example=1629574695515050031, + examples=[1629574695515050031], ) class OCIDescriptor(BaseModel): """ - A descriptor struct containing digest, media type, and size, as defined in + A descriptor struct containing digest, media type, and size, as defined in the [OCI Content Descriptors Specification](https://github.com/opencontainers/image-spec/blob/v1.0.1/descriptor.md). """ - mediaType: str | None = Field( - None, + media_type: str | None = Field( + default=None, + alias="mediaType", description="The media type of the object this schema refers to.\n", - example="application/vnd.docker.distribution.manifest.v2+json", + examples=["application/vnd.docker.distribution.manifest.v2+json"], ) digest: str | None = Field( - None, + default=None, description="The digest of the targeted content.\n", - example="sha256:c0537ff6a5218ef531ece93d4984efc99bbf3f7497c0a7726c88e2bb7584dc96", + examples=[ + "sha256:c0537ff6a5218ef531ece93d4984efc99bbf3f7497c0a7726c88e2bb7584dc96" + ], ) size: int | None = Field( - None, description="The size in bytes of the blob.\n", example=3987495 + default=None, description="The size in bytes of the blob.\n", examples=[3987495] ) class OCIPlatform(BaseModel): """ - Describes the platform which the image in the manifest runs on, as defined + Describes the platform which the image in the manifest runs on, as defined in the [OCI Image Index Specification](https://github.com/opencontainers/image-spec/blob/v1.0.1/image-index.md). """ architecture: str | None = Field( - None, + default=None, description="The CPU architecture, for example `amd64` or `ppc64`.\n", - example="arm", + examples=["arm"], ) os: str | None = Field( - None, + default=None, description="The operating system, for example `linux` or `windows`.\n", - example="windows", + examples=["windows"], ) os_version: str | None = Field( - None, + default=None, alias="os.version", description="Optional field specifying the operating system version, for example on\nWindows `10.0.19041.1165`.\n", - example="10.0.19041.1165", + examples=["10.0.19041.1165"], ) os_features: list[str] | None = Field( - None, + default=None, alias="os.features", description="Optional field specifying an array of strings, each listing a required\nOS feature (for example on Windows `win32k`).\n", - example=["win32k"], + examples=[["win32k"]], ) variant: str | None = Field( - None, + default=None, description="Optional field specifying a variant of the CPU, for example `v7` to\nspecify ARMv7 when architecture is `arm`.\n", - example="v7", + examples=["v7"], ) class DistributionInspectResponse(BaseModel): """ - Describes the result obtained from contacting the registry to retrieve + Describes the result obtained from contacting the registry to retrieve image metadata. """ - Descriptor: OCIDescriptor - Platforms: list[OCIPlatform] = Field( - ..., description="An array containing all platforms supported by the image.\n" + descriptor: OCIDescriptor = Field(..., alias="Descriptor") + platforms: list[OCIPlatform] = Field( + ..., + alias="Platforms", + description="An array containing all platforms supported by the image.\n", ) class ResourceObject(BaseModel): """ - An object describing the resources which can be advertised by a node and + An object describing the resources which can be advertised by a node and requested by a task. """ - NanoCPUs: int | None = Field(None, example=4000000000) - MemoryBytes: int | None = Field(None, example=8272408576) - GenericResources: GenericResources | None = None + nano_cp_us: int | None = Field( + default=None, alias="NanoCPUs", examples=[4000000000] + ) + memory_bytes: int | None = Field( + default=None, alias="MemoryBytes", examples=[8272408576] + ) + generic_resources: GenericResources | None = Field( + default=None, alias="GenericResources" + ) class Health(BaseModel): @@ -2959,168 +3808,56 @@ class Health(BaseModel): """ - Status: Status | None = Field( - None, + status: Status | None = Field( + default=None, + alias="Status", description='Status is one of `none`, `starting`, `healthy` or `unhealthy`\n\n- "none" Indicates there is no healthcheck\n- "starting" Starting indicates that the container is not yet ready\n- "healthy" Healthy indicates that the container is running correctly\n- "unhealthy" Unhealthy indicates that the container has a problem\n', - example="healthy", + examples=["healthy"], ) - FailingStreak: int | None = Field( - None, + failing_streak: int | None = Field( + default=None, + alias="FailingStreak", description="FailingStreak is the number of consecutive failures", - example=0, + examples=[0], ) - Log: list[HealthcheckResult] | None = Field( - None, description="Log contains the last few results (oldest first)\n" + log: list[HealthcheckResult] | None = Field( + default=None, + alias="Log", + description="Log contains the last few results (oldest first)\n", ) -class HostConfig(Resources): +class PortMap(RootModel[dict[str, list[PortBinding]] | None]): """ - Container configuration that depends on the host we are running on + PortMap describes the mapping of container ports to host ports, using the + container's port-number and protocol as key in the format `/`, + for example, `80/udp`. + + If a container's port is mapped for multiple protocols, separate entries + are added to the mapping table. + """ - Binds: list[str] | None = Field( - None, - description="A list of volume bindings for this container. Each volume binding\nis a string in one of these forms:\n\n- `host-src:container-dest[:options]` to bind-mount a host path\n into the container. Both `host-src`, and `container-dest` must\n be an _absolute_ path.\n- `volume-name:container-dest[:options]` to bind-mount a volume\n managed by a volume driver into the container. `container-dest`\n must be an _absolute_ path.\n\n`options` is an optional, comma-delimited list of:\n\n- `nocopy` disables automatic copying of data from the container\n path to the volume. The `nocopy` flag only applies to named volumes.\n- `[ro|rw]` mounts a volume read-only or read-write, respectively.\n If omitted or set to `rw`, volumes are mounted read-write.\n- `[z|Z]` applies SELinux labels to allow or deny multiple containers\n to read and write to the same volume.\n - `z`: a _shared_ content label is applied to the content. This\n label indicates that multiple containers can share the volume\n content, for both reading and writing.\n - `Z`: a _private unshared_ label is applied to the content.\n This label indicates that only the current container can use\n a private volume. Labeling systems such as SELinux require\n proper labels to be placed on volume content that is mounted\n into a container. Without a label, the security system can\n prevent a container's processes from using the content. By\n default, the labels set by the host operating system are not\n modified.\n- `[[r]shared|[r]slave|[r]private]` specifies mount\n [propagation behavior](https://www.kernel.org/doc/Documentation/filesystems/sharedsubtree.txt).\n This only applies to bind-mounted volumes, not internal volumes\n or named volumes. Mount propagation requires the source mount\n point (the location where the source directory is mounted in the\n host operating system) to have the correct propagation properties.\n For shared volumes, the source mount point must be set to `shared`.\n For slave volumes, the mount must be set to either `shared` or\n `slave`.\n", - ) - ContainerIDFile: str | None = Field( - None, description="Path to a file where the container ID is written" - ) - LogConfig: LogConfig | None = Field( - None, description="The logging configuration for this container" - ) - NetworkMode: str | None = Field( - None, - description="Network mode to use for this container. Supported standard values\nare: `bridge`, `host`, `none`, and `container:`. Any\nother value is taken as a custom network's name to which this\ncontainer should connect to.\n", - ) - PortBindings: PortMap | None = None - RestartPolicy: RestartPolicy | None = None - AutoRemove: bool | None = Field( - None, - description="Automatically remove the container when the container's process\nexits. This has no effect if `RestartPolicy` is set.\n", - ) - VolumeDriver: str | None = Field( - None, description="Driver that this container uses to mount volumes." - ) - VolumesFrom: list[str] | None = Field( - None, - description="A list of volumes to inherit from another container, specified in\nthe form `[:]`.\n", - ) - Mounts: list[Mount] | None = Field( - None, description="Specification for mounts to be added to the container.\n" - ) - CapAdd: list[str] | None = Field( - None, - description="A list of kernel capabilities to add to the container. Conflicts\nwith option 'Capabilities'.\n", - ) - CapDrop: list[str] | None = Field( - None, - description="A list of kernel capabilities to drop from the container. Conflicts\nwith option 'Capabilities'.\n", - ) - CgroupnsMode: CgroupnsMode | None = Field( - None, - description='cgroup namespace mode for the container. Possible values are:\n\n- `"private"`: the container runs in its own private cgroup namespace\n- `"host"`: use the host system\'s cgroup namespace\n\nIf not specified, the daemon default is used, which can either be `"private"`\nor `"host"`, depending on daemon version, kernel support and configuration.\n', - ) - Dns: list[str] | None = Field( - None, description="A list of DNS servers for the container to use." - ) - DnsOptions: list[str] | None = Field(None, description="A list of DNS options.") - DnsSearch: list[str] | None = Field( - None, description="A list of DNS search domains." - ) - ExtraHosts: list[str] | None = Field( - None, - description='A list of hostnames/IP mappings to add to the container\'s `/etc/hosts`\nfile. Specified in the form `["hostname:IP"]`.\n', - ) - GroupAdd: list[str] | None = Field( - None, - description="A list of additional groups that the container process will run as.\n", - ) - IpcMode: str | None = Field( - None, - description='IPC sharing mode for the container. Possible values are:\n\n- `"none"`: own private IPC namespace, with /dev/shm not mounted\n- `"private"`: own private IPC namespace\n- `"shareable"`: own private IPC namespace, with a possibility to share it with other containers\n- `"container:"`: join another (shareable) container\'s IPC namespace\n- `"host"`: use the host system\'s IPC namespace\n\nIf not specified, daemon default is used, which can either be `"private"`\nor `"shareable"`, depending on daemon version and configuration.\n', - ) - Cgroup: str | None = Field(None, description="Cgroup to use for the container.") - Links: list[str] | None = Field( - None, - description="A list of links for the container in the form `container_name:alias`.\n", - ) - OomScoreAdj: int | None = Field( - None, - description="An integer value containing the score given to the container in\norder to tune OOM killer preferences.\n", - example=500, - ) - PidMode: str | None = Field( - None, - description='Set the PID (Process) Namespace mode for the container. It can be\neither:\n\n- `"container:"`: joins another container\'s PID namespace\n- `"host"`: use the host\'s PID namespace inside the container\n', - ) - Privileged: bool | None = Field( - None, description="Gives the container full access to the host." - ) - PublishAllPorts: bool | None = Field( - None, - description="Allocates an ephemeral host port for all of a container's\nexposed ports.\n\nPorts are de-allocated when the container stops and allocated when\nthe container starts. The allocated port might be changed when\nrestarting the container.\n\nThe port is selected from the ephemeral port range that depends on\nthe kernel. For example, on Linux the range is defined by\n`/proc/sys/net/ipv4/ip_local_port_range`.\n", - ) - ReadonlyRootfs: bool | None = Field( - None, description="Mount the container's root filesystem as read only." - ) - SecurityOpt: list[str] | None = Field( - None, - description="A list of string values to customize labels for MLS systems, such\nas SELinux.\n", - ) - StorageOpt: dict[str, str] | None = Field( - None, - description='Storage driver options for this container, in the form `{"size": "120G"}`.\n', - ) - Tmpfs: dict[str, str] | None = Field( - None, - description='A map of container directories which should be replaced by tmpfs\nmounts, and their corresponding mount options. For example:\n\n```\n{ "/run": "rw,noexec,nosuid,size=65536k" }\n```\n', - ) - UTSMode: str | None = Field( - None, description="UTS namespace to use for the container." - ) - UsernsMode: str | None = Field( - None, - description="Sets the usernamespace mode for the container when usernamespace\nremapping option is enabled.\n", - ) - ShmSize: int | None = Field( - None, - description="Size of `/dev/shm` in bytes. If omitted, the system uses 64MB.\n", - ge=0, - ) - Sysctls: dict[str, str] | None = Field( - None, - description='A list of kernel parameters (sysctls) to set in the container.\nFor example:\n\n```\n{"net.ipv4.ip_forward": "1"}\n```\n', - ) - Runtime: str | None = Field(None, description="Runtime to use with this container.") - ConsoleSize: list[ConsoleSizeItem] | None = Field( - None, - description="Initial console size, as an `[height, width]` array. (Windows only)\n", - max_items=2, - min_items=2, - ) - Isolation: Isolation | None = Field( - None, description="Isolation technology of the container. (Windows only)\n" - ) - MaskedPaths: list[str] | None = Field( - None, - description="The list of paths to be masked inside the container (this overrides\nthe default set of paths).\n", - ) - ReadonlyPaths: list[str] | None = Field( - None, - description="The list of paths to be set as read-only inside the container\n(this overrides the default set of paths).\n", - ) + root: dict[str, list[PortBinding]] | None = None class IPAM(BaseModel): - Driver: str | None = Field("default", description="Name of the IPAM driver to use.") - Config_: list[IPAMConfig] | None = Field( - None, + driver: str | None = Field( + default="default", + alias="Driver", + description="Name of the IPAM driver to use.", + examples=["default"], + ) + config: list[IPAMConfig] | None = Field( + default=None, alias="Config", description='List of IPAM configuration options, specified as a map:\n\n```\n{"Subnet": , "IPRange": , "Gateway": , "AuxAddress": }\n```\n', ) - Options: dict[str, str] | None = Field( - None, description="Driver-specific options, specified as a map." + options: dict[str, str] | None = Field( + default=None, + alias="Options", + description="Driver-specific options, specified as a map.", + examples=[{"foo": "bar"}], ) @@ -3128,26 +3865,27 @@ class BuildInfo(BaseModel): id: str | None = None stream: str | None = None error: str | None = None - errorDetail: ErrorDetail | None = None + error_detail: ErrorDetail | None = Field(default=None, alias="errorDetail") status: str | None = None progress: str | None = None - progressDetail: ProgressDetail | None = None + progress_detail: ProgressDetail | None = Field(default=None, alias="progressDetail") aux: ImageID | None = None class CreateImageInfo(BaseModel): id: str | None = None error: str | None = None + error_detail: ErrorDetail | None = Field(default=None, alias="errorDetail") status: str | None = None progress: str | None = None - progressDetail: ProgressDetail | None = None + progress_detail: ProgressDetail | None = Field(default=None, alias="progressDetail") class PushImageInfo(BaseModel): error: str | None = None status: str | None = None progress: str | None = None - progressDetail: ProgressDetail | None = None + progress_detail: ProgressDetail | None = Field(default=None, alias="progressDetail") class EndpointSettings(BaseModel): @@ -3155,109 +3893,145 @@ class EndpointSettings(BaseModel): Configuration for a network endpoint. """ - IPAMConfig: EndpointIPAMConfig | None = None - Links: list[str] | None = Field(None, example=["container_1", "container_2"]) - Aliases: list[str] | None = Field(None, example=["server_x", "server_y"]) - NetworkID: str | None = Field( - None, - description="Unique ID of the network.\n", - example="08754567f1f40222263eab4102e1c733ae697e8e354aa9cd6e18d7402835292a", - ) - EndpointID: str | None = Field( - None, - description="Unique ID for the service endpoint in a Sandbox.\n", - example="b88f5b905aabf2893f3cbc4ee42d1ea7980bbc0a92e2c8922b1e1795298afb0b", - ) - Gateway: str | None = Field( - None, description="Gateway address for this network.\n", example="172.17.0.1" - ) - IPAddress: str | None = Field( - None, description="IPv4 address.\n", example="172.17.0.4" + ipam_config: EndpointIPAMConfig | None = Field(default=None, alias="IPAMConfig") + links: list[str] | None = Field( + default=None, alias="Links", examples=[["container_1", "container_2"]] ) - IPPrefixLen: int | None = Field( - None, description="Mask length of the IPv4 address.\n", example=16 + aliases: list[str] | None = Field( + default=None, alias="Aliases", examples=[["server_x", "server_y"]] ) - IPv6Gateway: str | None = Field( - None, description="IPv6 gateway address.\n", example="2001:db8:2::100" - ) - GlobalIPv6Address: str | None = Field( - None, description="Global IPv6 address.\n", example="2001:db8::5689" - ) - GlobalIPv6PrefixLen: int | None = Field( - None, description="Mask length of the global IPv6 address.\n", example=64 + network_id: str | None = Field( + default=None, + alias="NetworkID", + description="Unique ID of the network.\n", + examples=["08754567f1f40222263eab4102e1c733ae697e8e354aa9cd6e18d7402835292a"], ) - MacAddress: str | None = Field( - None, + endpoint_id: str | None = Field( + default=None, + alias="EndpointID", + description="Unique ID for the service endpoint in a Sandbox.\n", + examples=["b88f5b905aabf2893f3cbc4ee42d1ea7980bbc0a92e2c8922b1e1795298afb0b"], + ) + gateway: str | None = Field( + default=None, + alias="Gateway", + description="Gateway address for this network.\n", + examples=["172.17.0.1"], + ) + ip_address: str | None = Field( + default=None, + alias="IPAddress", + description="IPv4 address.\n", + examples=["172.17.0.4"], + ) + ip_prefix_len: int | None = Field( + default=None, + alias="IPPrefixLen", + description="Mask length of the IPv4 address.\n", + examples=[16], + ) + i_pv6_gateway: str | None = Field( + default=None, + alias="IPv6Gateway", + description="IPv6 gateway address.\n", + examples=["2001:db8:2::100"], + ) + global_i_pv6_address: str | None = Field( + default=None, + alias="GlobalIPv6Address", + description="Global IPv6 address.\n", + examples=["2001:db8::5689"], + ) + global_i_pv6_prefix_len: int | None = Field( + default=None, + alias="GlobalIPv6PrefixLen", + description="Mask length of the global IPv6 address.\n", + examples=[64], + ) + mac_address: str | None = Field( + default=None, + alias="MacAddress", description="MAC address for the endpoint on this network.\n", - example="02:42:ac:11:00:04", + examples=["02:42:ac:11:00:04"], ) - DriverOpts: dict[str, str] | None = Field( - None, + driver_opts: dict[str, str] | None = Field( + default=None, + alias="DriverOpts", description="DriverOpts is a mapping of driver options and values. These options\nare passed directly to the driver and are driver specific.\n", - example={ - "com.example.some-label": "some-value", - "com.example.some-other-label": "some-other-value", - }, + examples=[ + { + "com.example.some-label": "some-value", + "com.example.some-other-label": "some-other-value", + } + ], ) class NodeDescription(BaseModel): """ - NodeDescription encapsulates the properties of the Node as reported by the + NodeDescription encapsulates the properties of the Node as reported by the agent. """ - Hostname: str | None = Field(None, example="bf3067039e47") - Platform: Platform | None = None - Resources: ResourceObject | None = None - Engine: EngineDescription | None = None - TLSInfo: TLSInfo | None = None + hostname: str | None = Field( + default=None, alias="Hostname", examples=["bf3067039e47"] + ) + platform: Platform | None = Field(default=None, alias="Platform") + resources: ResourceObject | None = Field(default=None, alias="Resources") + engine: EngineDescription | None = Field(default=None, alias="Engine") + tls_info: TLSInfo | None = Field(default=None, alias="TLSInfo") class NodeStatus(BaseModel): """ - NodeStatus represents the status of a node. + NodeStatus represents the status of a node. It provides the current status of the node, as seen by the manager. """ - State: NodeState | None = None - Message: str | None = Field(None, example="") - Addr: str | None = Field( - None, description="IP address of the node.", example="172.17.0.2" + state: NodeState | None = Field(default=None, alias="State") + message: str | None = Field(default=None, alias="Message", examples=[""]) + addr: str | None = Field( + default=None, + alias="Addr", + description="IP address of the node.", + examples=["172.17.0.2"], ) class ManagerStatus(BaseModel): """ - ManagerStatus represents the status of a manager. + ManagerStatus represents the status of a manager. It provides the current status of a node's manager component, if the node is a manager. """ - Leader: bool | None = Field(False, example=True) - Reachability: Reachability | None = None - Addr: str | None = Field( - None, + leader: bool | None = Field(default=False, alias="Leader", examples=[True]) + reachability: Reachability | None = Field(default=None, alias="Reachability") + addr: str | None = Field( + default=None, + alias="Addr", description="The IP address and port at which the manager is reachable.\n", - example="10.0.0.46:2377", + examples=["10.0.0.46:2377"], ) class Resources1(BaseModel): """ - Resource requirements which apply to each individual container created + Resource requirements which apply to each individual container created as part of the service. """ - Limits: Limit | None = Field(None, description="Define resources limits.") - Reservations: ResourceObject | None = Field( - None, description="Define resources reservation." + limits: Limit | None = Field( + default=None, alias="Limits", description="Define resources limits." + ) + reservations: ResourceObject | None = Field( + default=None, alias="Reservations", description="Define resources reservation." ) @@ -3266,66 +4040,85 @@ class TaskSpec(BaseModel): User modifiable task configuration. """ - PluginSpec: PluginSpec | None = Field( - None, + plugin_spec: PluginSpec | None = Field( + default=None, + alias="PluginSpec", description="Plugin spec for the service. *(Experimental release only.)*\n\n


\n\n> **Note**: ContainerSpec, NetworkAttachmentSpec, and PluginSpec are\n> mutually exclusive. PluginSpec is only used when the Runtime field\n> is set to `plugin`. NetworkAttachmentSpec is used when the Runtime\n> field is set to `attachment`.\n", ) - ContainerSpec: ContainerSpec | None = Field( - None, + container_spec: ContainerSpec | None = Field( + default=None, + alias="ContainerSpec", description="Container spec for the service.\n\n


\n\n> **Note**: ContainerSpec, NetworkAttachmentSpec, and PluginSpec are\n> mutually exclusive. PluginSpec is only used when the Runtime field\n> is set to `plugin`. NetworkAttachmentSpec is used when the Runtime\n> field is set to `attachment`.\n", ) - NetworkAttachmentSpec: NetworkAttachmentSpec | None = Field( - None, + network_attachment_spec: NetworkAttachmentSpec | None = Field( + default=None, + alias="NetworkAttachmentSpec", description="Read-only spec type for non-swarm containers attached to swarm overlay\nnetworks.\n\n


\n\n> **Note**: ContainerSpec, NetworkAttachmentSpec, and PluginSpec are\n> mutually exclusive. PluginSpec is only used when the Runtime field\n> is set to `plugin`. NetworkAttachmentSpec is used when the Runtime\n> field is set to `attachment`.\n", ) - Resources: Resources1 | None = Field( - None, + resources: Resources1 | None = Field( + default=None, + alias="Resources", description="Resource requirements which apply to each individual container created\nas part of the service.\n", ) - RestartPolicy: RestartPolicy1 | None = Field( - None, + restart_policy: RestartPolicy1 | None = Field( + default=None, + alias="RestartPolicy", description="Specification for the restart policy which applies to containers\ncreated as part of this service.\n", ) - Placement: Placement | None = None - ForceUpdate: int | None = Field( - None, + placement: Placement | None = Field(default=None, alias="Placement") + force_update: int | None = Field( + default=None, + alias="ForceUpdate", description="A counter that triggers an update even if no relevant parameters have\nbeen changed.\n", ) - Runtime: str | None = Field( - None, + runtime: str | None = Field( + default=None, + alias="Runtime", description="Runtime is the type of runtime specified for the task executor.\n", ) - Networks: list[NetworkAttachmentConfig] | None = Field( - None, description="Specifies which networks the service should attach to." + networks: list[NetworkAttachmentConfig] | None = Field( + default=None, + alias="Networks", + description="Specifies which networks the service should attach to.", ) - LogDriver: LogDriver1 | None = Field( - None, + log_driver: LogDriver1 | None = Field( + default=None, + alias="LogDriver", description="Specifies the log driver to use for tasks created from this spec. If\nnot present, the default one for the swarm will be used, finally\nfalling back to the engine default if not specified.\n", ) class Task(BaseModel): - ID: str | None = Field(None, description="The ID of the task.") - Version: ObjectVersion | None = None - CreatedAt: str | None = None - UpdatedAt: str | None = None - Name: str | None = Field(None, description="Name of the task.") - Labels: dict[str, str] | None = Field( - None, description="User-defined key/value metadata." - ) - Spec: TaskSpec | None = None - ServiceID: str | None = Field( - None, description="The ID of the service this task is part of." - ) - Slot: int | None = None - NodeID: str | None = Field( - None, description="The ID of the node that this task is on." - ) - AssignedGenericResources: GenericResources | None = None - Status: Status1 | None = None - DesiredState: TaskState | None = None - JobIteration: ObjectVersion | None = Field( - None, + id: str | None = Field(default=None, alias="ID", description="The ID of the task.") + version: ObjectVersion | None = Field(default=None, alias="Version") + created_at: str | None = Field(default=None, alias="CreatedAt") + updated_at: str | None = Field(default=None, alias="UpdatedAt") + name: str | None = Field( + default=None, alias="Name", description="Name of the task." + ) + labels: dict[str, str] | None = Field( + default=None, alias="Labels", description="User-defined key/value metadata." + ) + spec: TaskSpec | None = Field(default=None, alias="Spec") + service_id: str | None = Field( + default=None, + alias="ServiceID", + description="The ID of the service this task is part of.", + ) + slot: int | None = Field(default=None, alias="Slot") + node_id: str | None = Field( + default=None, + alias="NodeID", + description="The ID of the node that this task is on.", + ) + assigned_generic_resources: GenericResources | None = Field( + default=None, alias="AssignedGenericResources" + ) + status: Status1 | None = Field(default=None, alias="Status") + desired_state: TaskState | None = Field(default=None, alias="DesiredState") + job_iteration: ObjectVersion | None = Field( + default=None, + alias="JobIteration", description="If the Service this Task belongs to is a job-mode service, contains\nthe JobIteration of the Service this Task was created for. Absent if\nthe Task was created for a Replicated or Global Service.\n", ) @@ -3335,40 +4128,54 @@ class ServiceSpec(BaseModel): User modifiable configuration for a service. """ - Name: str | None = Field(None, description="Name of the service.") - Labels: dict[str, str] | None = Field( - None, description="User-defined key/value metadata." + name: str | None = Field( + default=None, alias="Name", description="Name of the service." + ) + labels: dict[str, str] | None = Field( + default=None, alias="Labels", description="User-defined key/value metadata." + ) + task_template: TaskSpec | None = Field(default=None, alias="TaskTemplate") + mode: Mode | None = Field( + default=None, alias="Mode", description="Scheduling mode for the service." ) - TaskTemplate: TaskSpec | None = None - Mode: Mode | None = Field(None, description="Scheduling mode for the service.") - UpdateConfig: UpdateConfig | None = Field( - None, description="Specification for the update strategy of the service." + update_config: UpdateConfig | None = Field( + default=None, + alias="UpdateConfig", + description="Specification for the update strategy of the service.", ) - RollbackConfig: RollbackConfig | None = Field( - None, description="Specification for the rollback strategy of the service." + rollback_config: RollbackConfig | None = Field( + default=None, + alias="RollbackConfig", + description="Specification for the rollback strategy of the service.", ) - Networks: list[NetworkAttachmentConfig] | None = Field( - None, description="Specifies which networks the service should attach to." + networks: list[NetworkAttachmentConfig] | None = Field( + default=None, + alias="Networks", + description="Specifies which networks the service should attach to.", ) - EndpointSpec: EndpointSpec | None = None + endpoint_spec: EndpointSpec | None = Field(default=None, alias="EndpointSpec") class Service(BaseModel): - ID: str | None = None - Version: ObjectVersion | None = None - CreatedAt: str | None = None - UpdatedAt: str | None = None - Spec: ServiceSpec | None = None - Endpoint: Endpoint | None = None - UpdateStatus: UpdateStatus | None = Field( - None, description="The status of a service update." - ) - ServiceStatus: ServiceStatus | None = Field( - None, + id: str | None = Field(default=None, alias="ID") + version: ObjectVersion | None = Field(default=None, alias="Version") + created_at: str | None = Field(default=None, alias="CreatedAt") + updated_at: str | None = Field(default=None, alias="UpdatedAt") + spec: ServiceSpec | None = Field(default=None, alias="Spec") + endpoint: Endpoint | None = Field(default=None, alias="Endpoint") + update_status: UpdateStatus | None = Field( + default=None, + alias="UpdateStatus", + description="The status of a service update.", + ) + service_status: ServiceStatus | None = Field( + default=None, + alias="ServiceStatus", description="The status of the service's tasks. Provided only when requested as\npart of a ServiceList operation.\n", ) - JobStatus: JobStatus | None = Field( - None, + job_status: JobStatus | None = Field( + default=None, + alias="JobStatus", description="The status of the service when it is in one of ReplicatedJob or\nGlobalJob modes. Absent on Replicated and Global mode services. The\nJobIteration is an ObjectVersion, but unlike the Service's version,\ndoes not need to be sent with an update request.\n", ) @@ -3378,98 +4185,135 @@ class NetworkSettings1(BaseModel): A summary of the container's network settings """ - Networks: dict[str, EndpointSettings] | None = None + networks: dict[str, EndpointSettings] | None = Field(default=None, alias="Networks") class ContainerSummary(BaseModel): - Id: str | None = Field(None, description="The ID of this container") - Names: list[str] | None = Field( - None, description="The names that this container has been given" - ) - Image: str | None = Field( - None, description="The name of the image used when creating this container" - ) - ImageID: str | None = Field( - None, description="The ID of the image that this container was created from" - ) - Command: str | None = Field( - None, description="Command to run when starting the container" - ) - Created: int | None = Field(None, description="When the container was created") - Ports: list[Port] | None = Field( - None, description="The ports exposed by this container" - ) - SizeRw: int | None = Field( - None, + id: str | None = Field( + default=None, alias="Id", description="The ID of this container" + ) + names: list[str] | None = Field( + default=None, + alias="Names", + description="The names that this container has been given", + ) + image: str | None = Field( + default=None, + alias="Image", + description="The name of the image used when creating this container", + ) + image_id: str | None = Field( + default=None, + alias="ImageID", + description="The ID of the image that this container was created from", + ) + command: str | None = Field( + default=None, + alias="Command", + description="Command to run when starting the container", + ) + created: int | None = Field( + default=None, alias="Created", description="When the container was created" + ) + ports: list[Port] | None = Field( + default=None, alias="Ports", description="The ports exposed by this container" + ) + size_rw: int | None = Field( + default=None, + alias="SizeRw", description="The size of files that have been created or changed by this container", ) - SizeRootFs: int | None = Field( - None, description="The total size of all the files in this container" + size_root_fs: int | None = Field( + default=None, + alias="SizeRootFs", + description="The total size of all the files in this container", ) - Labels: dict[str, str] | None = Field( - None, description="User-defined key/value metadata." + labels: dict[str, str] | None = Field( + default=None, alias="Labels", description="User-defined key/value metadata." ) - State: str | None = Field( - None, description="The state of this container (e.g. `Exited`)" + state: str | None = Field( + default=None, + alias="State", + description="The state of this container (e.g. `Exited`)", ) - Status: str | None = Field( - None, + status: str | None = Field( + default=None, + alias="Status", description="Additional human-readable status of this container (e.g. `Exit 0`)", ) - HostConfig: HostConfig1 | None = None - NetworkSettings: NetworkSettings1 | None = Field( - None, description="A summary of the container's network settings" + host_config: HostConfig1 | None = Field(default=None, alias="HostConfig") + network_settings: NetworkSettings1 | None = Field( + default=None, + alias="NetworkSettings", + description="A summary of the container's network settings", ) - Mounts: list[MountPoint] | None = None + mounts: list[MountPoint] | None = Field(default=None, alias="Mounts") class ContainerState(BaseModel): """ - ContainerState stores container's running state. It's part of ContainerJSONBase + ContainerState stores container's running state. It's part of ContainerJSONBase and will be returned by the "inspect" command. """ - Status: Status2 | None = Field( - None, + status: Status2 | None = Field( + default=None, + alias="Status", description='String representation of the container state. Can be one of "created",\n"running", "paused", "restarting", "removing", "exited", or "dead".\n', - example="running", + examples=["running"], ) - Running: bool | None = Field( - None, + running: bool | None = Field( + default=None, + alias="Running", description='Whether this container is running.\n\nNote that a running container can be _paused_. The `Running` and `Paused`\nbooleans are not mutually exclusive:\n\nWhen pausing a container (on Linux), the freezer cgroup is used to suspend\nall processes in the container. Freezing the process requires the process to\nbe running. As a result, paused containers are both `Running` _and_ `Paused`.\n\nUse the `Status` field instead to determine if a container\'s state is "running".\n', - example=True, - ) - Paused: bool | None = Field( - None, description="Whether this container is paused.", example=False - ) - Restarting: bool | None = Field( - None, description="Whether this container is restarting.", example=False - ) - OOMKilled: bool | None = Field( - None, + examples=[True], + ) + paused: bool | None = Field( + default=None, + alias="Paused", + description="Whether this container is paused.", + examples=[False], + ) + restarting: bool | None = Field( + default=None, + alias="Restarting", + description="Whether this container is restarting.", + examples=[False], + ) + oom_killed: bool | None = Field( + default=None, + alias="OOMKilled", description="Whether this container has been killed because it ran out of memory.\n", - example=False, - ) - Dead: bool | None = Field(None, example=False) - Pid: int | None = Field( - None, description="The process ID of this container", example=1234 - ) - ExitCode: int | None = Field( - None, description="The last exit code of this container", example=0 - ) - Error: str | None = None - StartedAt: str | None = Field( - None, + examples=[False], + ) + dead: bool | None = Field(default=None, alias="Dead", examples=[False]) + pid: int | None = Field( + default=None, + alias="Pid", + description="The process ID of this container", + examples=[1234], + ) + exit_code: int | None = Field( + default=None, + alias="ExitCode", + description="The last exit code of this container", + examples=[0], + ) + error: str | None = Field(default=None, alias="Error") + started_at: str | None = Field( + default=None, + alias="StartedAt", description="The time when this container was last started.", - example="2020-01-06T09:06:59.461876391Z", + examples=["2020-01-06T09:06:59.461876391Z"], ) - FinishedAt: str | None = Field( - None, + finished_at: str | None = Field( + default=None, + alias="FinishedAt", description="The time when this container last exited.", - example="2020-01-06T09:07:59.461876391Z", + examples=["2020-01-06T09:07:59.461876391Z"], ) - Health: Health | None = None + health: Health | None = Field(default=None, alias="Health") class ContainerWaitResponse(BaseModel): @@ -3477,8 +4321,10 @@ class ContainerWaitResponse(BaseModel): OK response to ContainerWait operation """ - StatusCode: int = Field(..., description="Exit code of the container") - Error: ContainerWaitExitError | None = None + status_code: int = Field( + ..., alias="StatusCode", description="Exit code of the container" + ) + error: ContainerWaitExitError | None = Field(default=None, alias="Error") class RegistryServiceConfig(BaseModel): @@ -3487,59 +4333,67 @@ class RegistryServiceConfig(BaseModel): """ - AllowNondistributableArtifactsCIDRs: list[str] | None = Field( - None, + allow_nondistributable_artifacts_cid_rs: list[str] | None = Field( + default=None, + alias="AllowNondistributableArtifactsCIDRs", description="List of IP ranges to which nondistributable artifacts can be pushed,\nusing the CIDR syntax [RFC 4632](https://tools.ietf.org/html/4632).\n\nSome images (for example, Windows base images) contain artifacts\nwhose distribution is restricted by license. When these images are\npushed to a registry, restricted artifacts are not included.\n\nThis configuration override this behavior, and enables the daemon to\npush nondistributable artifacts to all registries whose resolved IP\naddress is within the subnet described by the CIDR syntax.\n\nThis option is useful when pushing images containing\nnondistributable artifacts to a registry on an air-gapped network so\nhosts on that network can pull the images without connecting to\nanother server.\n\n> **Warning**: Nondistributable artifacts typically have restrictions\n> on how and where they can be distributed and shared. Only use this\n> feature to push artifacts to private registries and ensure that you\n> are in compliance with any terms that cover redistributing\n> nondistributable artifacts.\n", - example=["::1/128", "127.0.0.0/8"], + examples=[["::1/128", "127.0.0.0/8"]], ) - AllowNondistributableArtifactsHostnames: list[str] | None = Field( - None, + allow_nondistributable_artifacts_hostnames: list[str] | None = Field( + default=None, + alias="AllowNondistributableArtifactsHostnames", description="List of registry hostnames to which nondistributable artifacts can be\npushed, using the format `[:]` or `[:]`.\n\nSome images (for example, Windows base images) contain artifacts\nwhose distribution is restricted by license. When these images are\npushed to a registry, restricted artifacts are not included.\n\nThis configuration override this behavior for the specified\nregistries.\n\nThis option is useful when pushing images containing\nnondistributable artifacts to a registry on an air-gapped network so\nhosts on that network can pull the images without connecting to\nanother server.\n\n> **Warning**: Nondistributable artifacts typically have restrictions\n> on how and where they can be distributed and shared. Only use this\n> feature to push artifacts to private registries and ensure that you\n> are in compliance with any terms that cover redistributing\n> nondistributable artifacts.\n", - example=[ - "registry.internal.corp.example.com:3000", - "[2001:db8:a0b:12f0::1]:443", + examples=[ + ["registry.internal.corp.example.com:3000", "[2001:db8:a0b:12f0::1]:443"] ], ) - InsecureRegistryCIDRs: list[str] | None = Field( - None, + insecure_registry_cid_rs: list[str] | None = Field( + default=None, + alias="InsecureRegistryCIDRs", description="List of IP ranges of insecure registries, using the CIDR syntax\n([RFC 4632](https://tools.ietf.org/html/4632)). Insecure registries\naccept un-encrypted (HTTP) and/or untrusted (HTTPS with certificates\nfrom unknown CAs) communication.\n\nBy default, local registries (`127.0.0.0/8`) are configured as\ninsecure. All other registries are secure. Communicating with an\ninsecure registry is not possible if the daemon assumes that registry\nis secure.\n\nThis configuration override this behavior, insecure communication with\nregistries whose resolved IP address is within the subnet described by\nthe CIDR syntax.\n\nRegistries can also be marked insecure by hostname. Those registries\nare listed under `IndexConfigs` and have their `Secure` field set to\n`false`.\n\n> **Warning**: Using this option can be useful when running a local\n> registry, but introduces security vulnerabilities. This option\n> should therefore ONLY be used for testing purposes. For increased\n> security, users should add their CA to their system's list of trusted\n> CAs instead of enabling this option.\n", - example=["::1/128", "127.0.0.0/8"], - ) - IndexConfigs: dict[str, IndexInfo] | None = Field( - None, - example={ - "127.0.0.1:5000": { - "Name": "127.0.0.1:5000", - "Mirrors": [], - "Secure": False, - "Official": False, - }, - "[2001:db8:a0b:12f0::1]:80": { - "Name": "[2001:db8:a0b:12f0::1]:80", - "Mirrors": [], - "Secure": False, - "Official": False, - }, - "docker.io": { - "Name": "docker.io", - "Mirrors": ["https://hub-mirror.corp.example.com:5000/"], - "Secure": True, - "Official": True, - }, - "registry.internal.corp.example.com:3000": { - "Name": "registry.internal.corp.example.com:3000", - "Mirrors": [], - "Secure": False, - "Official": False, - }, - }, - ) - Mirrors: list[str] | None = Field( - None, + examples=[["::1/128", "127.0.0.0/8"]], + ) + index_configs: dict[str, IndexInfo] | None = Field( + default=None, + alias="IndexConfigs", + examples=[ + { + "127.0.0.1:5000": { + "Name": "127.0.0.1:5000", + "Mirrors": [], + "Secure": False, + "Official": False, + }, + "[2001:db8:a0b:12f0::1]:80": { + "Name": "[2001:db8:a0b:12f0::1]:80", + "Mirrors": [], + "Secure": False, + "Official": False, + }, + "docker.io": { + "Name": "docker.io", + "Mirrors": ["https://hub-mirror.corp.example.com:5000/"], + "Secure": True, + "Official": True, + }, + "registry.internal.corp.example.com:3000": { + "Name": "registry.internal.corp.example.com:3000", + "Mirrors": [], + "Secure": False, + "Official": False, + }, + } + ], + ) + mirrors: list[str] | None = Field( + default=None, + alias="Mirrors", description="List of registry URLs that act as a mirror for the official\n(`docker.io`) registry.\n", - example=[ - "https://hub-mirror.corp.example.com:5000/", - "https://[2001:db8:a0b:12f0::1]/", + examples=[ + [ + "https://hub-mirror.corp.example.com:5000/", + "https://[2001:db8:a0b:12f0::1]/", + ] ], ) @@ -3550,48 +4404,247 @@ class SwarmInfo(BaseModel): """ - NodeID: str | None = Field( - "", + node_id: str | None = Field( + default="", + alias="NodeID", description="Unique identifier of for this node in the swarm.", - example="k67qz4598weg5unwwffg6z1m1", + examples=["k67qz4598weg5unwwffg6z1m1"], ) - NodeAddr: str | None = Field( - "", + node_addr: str | None = Field( + default="", + alias="NodeAddr", description="IP address at which this node can be reached by other nodes in the\nswarm.\n", - example="10.0.0.46", + examples=["10.0.0.46"], ) - LocalNodeState: LocalNodeState | None = None - ControlAvailable: bool | None = Field(False, example=True) - Error: str | None = "" - RemoteManagers: list[PeerNode] | None = Field( - None, + local_node_state: LocalNodeState | None = Field(default="", alias="LocalNodeState") + control_available: bool | None = Field( + default=False, alias="ControlAvailable", examples=[True] + ) + error: str | None = Field(default="", alias="Error") + remote_managers: list[PeerNode] | None = Field( + default=None, + alias="RemoteManagers", description="List of ID's and addresses of other managers in the swarm.\n", - example=[ - {"NodeID": "71izy0goik036k48jg985xnds", "Addr": "10.0.0.158:2377"}, - {"NodeID": "79y6h1o4gv8n120drcprv5nmc", "Addr": "10.0.0.159:2377"}, - {"NodeID": "k67qz4598weg5unwwffg6z1m1", "Addr": "10.0.0.46:2377"}, + examples=[ + [ + {"NodeID": "71izy0goik036k48jg985xnds", "Addr": "10.0.0.158:2377"}, + {"NodeID": "79y6h1o4gv8n120drcprv5nmc", "Addr": "10.0.0.159:2377"}, + {"NodeID": "k67qz4598weg5unwwffg6z1m1", "Addr": "10.0.0.46:2377"}, + ] ], ) - Nodes: int | None = Field( - None, description="Total number of nodes in the swarm.", example=4 + nodes: int | None = Field( + default=None, + alias="Nodes", + description="Total number of nodes in the swarm.", + examples=[4], ) - Managers: int | None = Field( - None, description="Total number of managers in the swarm.", example=3 + managers: int | None = Field( + default=None, + alias="Managers", + description="Total number of managers in the swarm.", + examples=[3], + ) + cluster: ClusterInfo | None = Field(default=None, alias="Cluster") + + +class HostConfig(Resources): + """ + Container configuration that depends on the host we are running on + """ + + binds: list[str] | None = Field( + default=None, + alias="Binds", + description="A list of volume bindings for this container. Each volume binding\nis a string in one of these forms:\n\n- `host-src:container-dest[:options]` to bind-mount a host path\n into the container. Both `host-src`, and `container-dest` must\n be an _absolute_ path.\n- `volume-name:container-dest[:options]` to bind-mount a volume\n managed by a volume driver into the container. `container-dest`\n must be an _absolute_ path.\n\n`options` is an optional, comma-delimited list of:\n\n- `nocopy` disables automatic copying of data from the container\n path to the volume. The `nocopy` flag only applies to named volumes.\n- `[ro|rw]` mounts a volume read-only or read-write, respectively.\n If omitted or set to `rw`, volumes are mounted read-write.\n- `[z|Z]` applies SELinux labels to allow or deny multiple containers\n to read and write to the same volume.\n - `z`: a _shared_ content label is applied to the content. This\n label indicates that multiple containers can share the volume\n content, for both reading and writing.\n - `Z`: a _private unshared_ label is applied to the content.\n This label indicates that only the current container can use\n a private volume. Labeling systems such as SELinux require\n proper labels to be placed on volume content that is mounted\n into a container. Without a label, the security system can\n prevent a container's processes from using the content. By\n default, the labels set by the host operating system are not\n modified.\n- `[[r]shared|[r]slave|[r]private]` specifies mount\n [propagation behavior](https://www.kernel.org/doc/Documentation/filesystems/sharedsubtree.txt).\n This only applies to bind-mounted volumes, not internal volumes\n or named volumes. Mount propagation requires the source mount\n point (the location where the source directory is mounted in the\n host operating system) to have the correct propagation properties.\n For shared volumes, the source mount point must be set to `shared`.\n For slave volumes, the mount must be set to either `shared` or\n `slave`.\n", + ) + container_id_file: str | None = Field( + default=None, + alias="ContainerIDFile", + description="Path to a file where the container ID is written", + ) + log_config: LogConfig | None = Field( + default=None, + alias="LogConfig", + description="The logging configuration for this container", + ) + network_mode: str | None = Field( + default=None, + alias="NetworkMode", + description="Network mode to use for this container. Supported standard values\nare: `bridge`, `host`, `none`, and `container:`. Any\nother value is taken as a custom network's name to which this\ncontainer should connect to.\n", + ) + port_bindings: PortMap | None = Field(default=None, alias="PortBindings") + restart_policy: RestartPolicy | None = Field(default=None, alias="RestartPolicy") + auto_remove: bool | None = Field( + default=None, + alias="AutoRemove", + description="Automatically remove the container when the container's process\nexits. This has no effect if `RestartPolicy` is set.\n", + ) + volume_driver: str | None = Field( + default=None, + alias="VolumeDriver", + description="Driver that this container uses to mount volumes.", + ) + volumes_from: list[str] | None = Field( + default=None, + alias="VolumesFrom", + description="A list of volumes to inherit from another container, specified in\nthe form `[:]`.\n", + ) + mounts: list[Mount] | None = Field( + default=None, + alias="Mounts", + description="Specification for mounts to be added to the container.\n", + ) + cap_add: list[str] | None = Field( + default=None, + alias="CapAdd", + description="A list of kernel capabilities to add to the container. Conflicts\nwith option 'Capabilities'.\n", + ) + cap_drop: list[str] | None = Field( + default=None, + alias="CapDrop", + description="A list of kernel capabilities to drop from the container. Conflicts\nwith option 'Capabilities'.\n", + ) + cgroupns_mode: CgroupnsMode | None = Field( + default=None, + alias="CgroupnsMode", + description='cgroup namespace mode for the container. Possible values are:\n\n- `"private"`: the container runs in its own private cgroup namespace\n- `"host"`: use the host system\'s cgroup namespace\n\nIf not specified, the daemon default is used, which can either be `"private"`\nor `"host"`, depending on daemon version, kernel support and configuration.\n', + ) + dns: list[str] | None = Field( + default=None, + alias="Dns", + description="A list of DNS servers for the container to use.", + ) + dns_options: list[str] | None = Field( + default=None, alias="DnsOptions", description="A list of DNS options." + ) + dns_search: list[str] | None = Field( + default=None, alias="DnsSearch", description="A list of DNS search domains." + ) + extra_hosts: list[str] | None = Field( + default=None, + alias="ExtraHosts", + description='A list of hostnames/IP mappings to add to the container\'s `/etc/hosts`\nfile. Specified in the form `["hostname:IP"]`.\n', + ) + group_add: list[str] | None = Field( + default=None, + alias="GroupAdd", + description="A list of additional groups that the container process will run as.\n", + ) + ipc_mode: str | None = Field( + default=None, + alias="IpcMode", + description='IPC sharing mode for the container. Possible values are:\n\n- `"none"`: own private IPC namespace, with /dev/shm not mounted\n- `"private"`: own private IPC namespace\n- `"shareable"`: own private IPC namespace, with a possibility to share it with other containers\n- `"container:"`: join another (shareable) container\'s IPC namespace\n- `"host"`: use the host system\'s IPC namespace\n\nIf not specified, daemon default is used, which can either be `"private"`\nor `"shareable"`, depending on daemon version and configuration.\n', + ) + cgroup: str | None = Field( + default=None, alias="Cgroup", description="Cgroup to use for the container." + ) + links: list[str] | None = Field( + default=None, + alias="Links", + description="A list of links for the container in the form `container_name:alias`.\n", + ) + oom_score_adj: int | None = Field( + default=None, + alias="OomScoreAdj", + description="An integer value containing the score given to the container in\norder to tune OOM killer preferences.\n", + examples=[500], + ) + pid_mode: str | None = Field( + default=None, + alias="PidMode", + description='Set the PID (Process) Namespace mode for the container. It can be\neither:\n\n- `"container:"`: joins another container\'s PID namespace\n- `"host"`: use the host\'s PID namespace inside the container\n', + ) + privileged: bool | None = Field( + default=None, + alias="Privileged", + description="Gives the container full access to the host.", + ) + publish_all_ports: bool | None = Field( + default=None, + alias="PublishAllPorts", + description="Allocates an ephemeral host port for all of a container's\nexposed ports.\n\nPorts are de-allocated when the container stops and allocated when\nthe container starts. The allocated port might be changed when\nrestarting the container.\n\nThe port is selected from the ephemeral port range that depends on\nthe kernel. For example, on Linux the range is defined by\n`/proc/sys/net/ipv4/ip_local_port_range`.\n", + ) + readonly_rootfs: bool | None = Field( + default=None, + alias="ReadonlyRootfs", + description="Mount the container's root filesystem as read only.", + ) + security_opt: list[str] | None = Field( + default=None, + alias="SecurityOpt", + description="A list of string values to customize labels for MLS systems, such\nas SELinux.\n", + ) + storage_opt: dict[str, str] | None = Field( + default=None, + alias="StorageOpt", + description='Storage driver options for this container, in the form `{"size": "120G"}`.\n', + ) + tmpfs: dict[str, str] | None = Field( + default=None, + alias="Tmpfs", + description='A map of container directories which should be replaced by tmpfs\nmounts, and their corresponding mount options. For example:\n\n```\n{ "/run": "rw,noexec,nosuid,size=65536k" }\n```\n', + ) + uts_mode: str | None = Field( + default=None, + alias="UTSMode", + description="UTS namespace to use for the container.", + ) + userns_mode: str | None = Field( + default=None, + alias="UsernsMode", + description="Sets the usernamespace mode for the container when usernamespace\nremapping option is enabled.\n", + ) + shm_size: int | None = Field( + default=None, + alias="ShmSize", + description="Size of `/dev/shm` in bytes. If omitted, the system uses 64MB.\n", + ge=0, + ) + sysctls: dict[str, str] | None = Field( + default=None, + alias="Sysctls", + description='A list of kernel parameters (sysctls) to set in the container.\nFor example:\n\n```\n{"net.ipv4.ip_forward": "1"}\n```\n', + ) + runtime: str | None = Field( + default=None, alias="Runtime", description="Runtime to use with this container." + ) + console_size: list[ConsoleSizeItem] | None = Field( + default=None, + alias="ConsoleSize", + description="Initial console size, as an `[height, width]` array. (Windows only)\n", + max_length=2, + min_length=2, + ) + isolation: Isolation | None = Field( + default=None, + alias="Isolation", + description="Isolation technology of the container. (Windows only)\n", + ) + masked_paths: list[str] | None = Field( + default=None, + alias="MaskedPaths", + description="The list of paths to be masked inside the container (this overrides\nthe default set of paths).\n", + ) + readonly_paths: list[str] | None = Field( + default=None, + alias="ReadonlyPaths", + description="The list of paths to be set as read-only inside the container\n(this overrides the default set of paths).\n", ) - Cluster: ClusterInfo | None = None class NetworkingConfig(BaseModel): """ - NetworkingConfig represents the container's networking configuration for + NetworkingConfig represents the container's networking configuration for each of its interfaces. It is used for the networking configs specified in the `docker create` and `docker network connect` commands. """ - EndpointsConfig: dict[str, EndpointSettings] | None = Field( - None, + endpoints_config: dict[str, EndpointSettings] | None = Field( + default=None, + alias="EndpointsConfig", description="A mapping of network name to endpoint configuration for that network.\n", ) @@ -3601,400 +4654,606 @@ class NetworkSettings(BaseModel): NetworkSettings exposes the network settings in the API """ - Bridge: str | None = Field( - None, - description="Name of the network'a bridge (for example, `docker0`).", - example="docker0", + bridge: str | None = Field( + default=None, + alias="Bridge", + description="Name of the network's bridge (for example, `docker0`).", + examples=["docker0"], ) - SandboxID: str | None = Field( - None, + sandbox_id: str | None = Field( + default=None, + alias="SandboxID", description="SandboxID uniquely represents a container's network stack.", - example="9d12daf2c33f5959c8bf90aa513e4f65b561738661003029ec84830cd503a0c3", + examples=["9d12daf2c33f5959c8bf90aa513e4f65b561738661003029ec84830cd503a0c3"], ) - HairpinMode: bool | None = Field( - None, + hairpin_mode: bool | None = Field( + default=None, + alias="HairpinMode", description="Indicates if hairpin NAT should be enabled on the virtual interface.\n", - example=False, + examples=[False], ) - LinkLocalIPv6Address: str | None = Field( - None, + link_local_i_pv6_address: str | None = Field( + default=None, + alias="LinkLocalIPv6Address", description="IPv6 unicast address using the link-local prefix.", - example="fe80::42:acff:fe11:1", + examples=["fe80::42:acff:fe11:1"], + ) + link_local_i_pv6_prefix_len: int | None = Field( + default=None, + alias="LinkLocalIPv6PrefixLen", + description="Prefix length of the IPv6 unicast address.", + examples=["64"], + ) + ports: PortMap | None = Field(default=None, alias="Ports") + sandbox_key: str | None = Field( + default=None, + alias="SandboxKey", + description="SandboxKey identifies the sandbox", + examples=["/var/run/docker/netns/8ab54b426c38"], ) - LinkLocalIPv6PrefixLen: int | None = Field( - None, description="Prefix length of the IPv6 unicast address.", example="64" + secondary_ip_addresses: list[Address] | None = Field( + default=None, alias="SecondaryIPAddresses", description="" ) - Ports: PortMap | None = None - SandboxKey: str | None = Field( - None, - description="SandboxKey identifies the sandbox", - example="/var/run/docker/netns/8ab54b426c38", + secondary_i_pv6_addresses: list[Address] | None = Field( + default=None, alias="SecondaryIPv6Addresses", description="" ) - SecondaryIPAddresses: list[Address] | None = Field(None, description="") - SecondaryIPv6Addresses: list[Address] | None = Field(None, description="") - EndpointID: str | None = Field( - None, + endpoint_id: str | None = Field( + default=None, + alias="EndpointID", description='EndpointID uniquely represents a service endpoint in a Sandbox.\n\n


\n\n> **Deprecated**: This field is only propagated when attached to the\n> default "bridge" network. Use the information from the "bridge"\n> network inside the `Networks` map instead, which contains the same\n> information. This field was deprecated in Docker 1.9 and is scheduled\n> to be removed in Docker 17.12.0\n', - example="b88f5b905aabf2893f3cbc4ee42d1ea7980bbc0a92e2c8922b1e1795298afb0b", + examples=["b88f5b905aabf2893f3cbc4ee42d1ea7980bbc0a92e2c8922b1e1795298afb0b"], ) - Gateway: str | None = Field( - None, + gateway: str | None = Field( + default=None, + alias="Gateway", description='Gateway address for the default "bridge" network.\n\n


\n\n> **Deprecated**: This field is only propagated when attached to the\n> default "bridge" network. Use the information from the "bridge"\n> network inside the `Networks` map instead, which contains the same\n> information. This field was deprecated in Docker 1.9 and is scheduled\n> to be removed in Docker 17.12.0\n', - example="172.17.0.1", + examples=["172.17.0.1"], ) - GlobalIPv6Address: str | None = Field( - None, + global_i_pv6_address: str | None = Field( + default=None, + alias="GlobalIPv6Address", description='Global IPv6 address for the default "bridge" network.\n\n


\n\n> **Deprecated**: This field is only propagated when attached to the\n> default "bridge" network. Use the information from the "bridge"\n> network inside the `Networks` map instead, which contains the same\n> information. This field was deprecated in Docker 1.9 and is scheduled\n> to be removed in Docker 17.12.0\n', - example="2001:db8::5689", + examples=["2001:db8::5689"], ) - GlobalIPv6PrefixLen: int | None = Field( - None, + global_i_pv6_prefix_len: int | None = Field( + default=None, + alias="GlobalIPv6PrefixLen", description='Mask length of the global IPv6 address.\n\n


\n\n> **Deprecated**: This field is only propagated when attached to the\n> default "bridge" network. Use the information from the "bridge"\n> network inside the `Networks` map instead, which contains the same\n> information. This field was deprecated in Docker 1.9 and is scheduled\n> to be removed in Docker 17.12.0\n', - example=64, + examples=[64], ) - IPAddress: str | None = Field( - None, + ip_address: str | None = Field( + default=None, + alias="IPAddress", description='IPv4 address for the default "bridge" network.\n\n


\n\n> **Deprecated**: This field is only propagated when attached to the\n> default "bridge" network. Use the information from the "bridge"\n> network inside the `Networks` map instead, which contains the same\n> information. This field was deprecated in Docker 1.9 and is scheduled\n> to be removed in Docker 17.12.0\n', - example="172.17.0.4", + examples=["172.17.0.4"], ) - IPPrefixLen: int | None = Field( - None, + ip_prefix_len: int | None = Field( + default=None, + alias="IPPrefixLen", description='Mask length of the IPv4 address.\n\n


\n\n> **Deprecated**: This field is only propagated when attached to the\n> default "bridge" network. Use the information from the "bridge"\n> network inside the `Networks` map instead, which contains the same\n> information. This field was deprecated in Docker 1.9 and is scheduled\n> to be removed in Docker 17.12.0\n', - example=16, + examples=[16], ) - IPv6Gateway: str | None = Field( - None, + i_pv6_gateway: str | None = Field( + default=None, + alias="IPv6Gateway", description='IPv6 gateway address for this network.\n\n


\n\n> **Deprecated**: This field is only propagated when attached to the\n> default "bridge" network. Use the information from the "bridge"\n> network inside the `Networks` map instead, which contains the same\n> information. This field was deprecated in Docker 1.9 and is scheduled\n> to be removed in Docker 17.12.0\n', - example="2001:db8:2::100", + examples=["2001:db8:2::100"], ) - MacAddress: str | None = Field( - None, + mac_address: str | None = Field( + default=None, + alias="MacAddress", description='MAC address for the container on the default "bridge" network.\n\n


\n\n> **Deprecated**: This field is only propagated when attached to the\n> default "bridge" network. Use the information from the "bridge"\n> network inside the `Networks` map instead, which contains the same\n> information. This field was deprecated in Docker 1.9 and is scheduled\n> to be removed in Docker 17.12.0\n', - example="02:42:ac:11:00:04", + examples=["02:42:ac:11:00:04"], ) - Networks: dict[str, EndpointSettings] | None = Field( - None, + networks: dict[str, EndpointSettings] | None = Field( + default=None, + alias="Networks", description="Information about all networks that the container is connected to.\n", ) class Network(BaseModel): - Name: str | None = None - Id: str | None = None - Created: str | None = None - Scope: str | None = None - Driver: str | None = None - EnableIPv6: bool | None = None - IPAM: IPAM | None = None - Internal: bool | None = None - Attachable: bool | None = None - Ingress: bool | None = None - Containers: dict[str, NetworkContainer] | None = None - Options: dict[str, str] | None = None - Labels: dict[str, str] | None = None + name: str | None = Field( + default=None, + alias="Name", + description="Name of the network.\n", + examples=["my_network"], + ) + id: str | None = Field( + default=None, + alias="Id", + description="ID that uniquely identifies a network on a single machine.\n", + examples=["7d86d31b1478e7cca9ebed7e73aa0fdeec46c5ca29497431d3007d2d9e15ed99"], + ) + created: str | None = Field( + default=None, + alias="Created", + description="Date and time at which the network was created in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n", + examples=["2016-10-19T04:33:30.360899459Z"], + ) + scope: str | None = Field( + default=None, + alias="Scope", + description="The level at which the network exists (e.g. `swarm` for cluster-wide\nor `local` for machine level)\n", + examples=["local"], + ) + driver: str | None = Field( + default=None, + alias="Driver", + description="The name of the driver used to create the network (e.g. `bridge`,\n`overlay`).\n", + examples=["overlay"], + ) + enable_i_pv6: bool | None = Field( + default=None, + alias="EnableIPv6", + description="Whether the network was created with IPv6 enabled.\n", + examples=[False], + ) + ipam: IPAM | None = Field(default=None, alias="IPAM") + internal: bool | None = Field( + default=False, + alias="Internal", + description="Whether the network is created to only allow internal networking\nconnectivity.\n", + examples=[False], + ) + attachable: bool | None = Field( + default=False, + alias="Attachable", + description="Whether a global / swarm scope network is manually attachable by regular\ncontainers from workers in swarm mode.\n", + examples=[False], + ) + ingress: bool | None = Field( + default=False, + alias="Ingress", + description="Whether the network is providing the routing-mesh for the swarm cluster.\n", + examples=[False], + ) + config_from: ConfigReference | None = Field(default=None, alias="ConfigFrom") + config_only: bool | None = Field( + default=False, + alias="ConfigOnly", + description="Whether the network is a config-only network. Config-only networks are\nplaceholder networks for network configurations to be used by other\nnetworks. Config-only networks cannot be used directly to run containers\nor services.\n", + ) + containers: dict[str, NetworkContainer] | None = Field( + default=None, + alias="Containers", + description="Contains endpoints attached to the network.\n", + examples=[ + { + "19a4d5d687db25203351ed79d478946f861258f018fe384f229f2efa4b23513c": { + "Name": "test", + "EndpointID": "628cadb8bcb92de107b2a1e516cbffe463e321f548feb37697cce00ad694f21a", + "MacAddress": "02:42:ac:13:00:02", + "IPv4Address": "172.19.0.2/16", + "IPv6Address": "", + } + } + ], + ) + options: dict[str, str] | None = Field( + default=None, + alias="Options", + description="Network-specific options uses when creating the network.\n", + examples=[ + { + "com.docker.network.bridge.default_bridge": "true", + "com.docker.network.bridge.enable_icc": "true", + "com.docker.network.bridge.enable_ip_masquerade": "true", + "com.docker.network.bridge.host_binding_ipv4": "0.0.0.0", + "com.docker.network.bridge.name": "docker0", + "com.docker.network.driver.mtu": "1500", + } + ], + ) + labels: dict[str, str] | None = Field( + default=None, + alias="Labels", + description="User-defined key/value metadata.", + examples=[ + { + "com.example.some-label": "some-value", + "com.example.some-other-label": "some-other-value", + } + ], + ) + peers: list[PeerInfo] | None = Field( + default=None, + alias="Peers", + description="List of peer nodes for an overlay network. This field is only present\nfor overlay networks, and omitted for other network types.\n", + ) class Node(BaseModel): - ID: str | None = Field(None, example="24ifsmvkjbyhk") - Version: ObjectVersion | None = None - CreatedAt: str | None = Field( - None, + id: str | None = Field(default=None, alias="ID", examples=["24ifsmvkjbyhk"]) + version: ObjectVersion | None = Field(default=None, alias="Version") + created_at: str | None = Field( + default=None, + alias="CreatedAt", description="Date and time at which the node was added to the swarm in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n", - example="2016-08-18T10:44:24.496525531Z", + examples=["2016-08-18T10:44:24.496525531Z"], ) - UpdatedAt: str | None = Field( - None, + updated_at: str | None = Field( + default=None, + alias="UpdatedAt", description="Date and time at which the node was last updated in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n", - example="2017-08-09T07:09:37.632105588Z", + examples=["2017-08-09T07:09:37.632105588Z"], ) - Spec: NodeSpec | None = None - Description: NodeDescription | None = None - Status: NodeStatus | None = None - ManagerStatus: ManagerStatus | None = None + spec: NodeSpec | None = Field(default=None, alias="Spec") + description: NodeDescription | None = Field(default=None, alias="Description") + status: NodeStatus | None = Field(default=None, alias="Status") + manager_status: ManagerStatus | None = Field(default=None, alias="ManagerStatus") class SystemInfo(BaseModel): - ID: str | None = Field( - None, + id: str | None = Field( + default=None, + alias="ID", description="Unique identifier of the daemon.\n\n


\n\n> **Note**: The format of the ID itself is not part of the API, and\n> should not be considered stable.\n", - example="7TRN:IPZB:QYBB:VPBQ:UMPP:KARE:6ZNR:XE6T:7EWV:PKF4:ZOJD:TPYS", - ) - Containers: int | None = Field( - None, description="Total number of containers on the host.", example=14 - ) - ContainersRunning: int | None = Field( - None, description='Number of containers with status `"running"`.\n', example=3 - ) - ContainersPaused: int | None = Field( - None, description='Number of containers with status `"paused"`.\n', example=1 - ) - ContainersStopped: int | None = Field( - None, description='Number of containers with status `"stopped"`.\n', example=10 - ) - Images: int | None = Field( - None, + examples=["7TRN:IPZB:QYBB:VPBQ:UMPP:KARE:6ZNR:XE6T:7EWV:PKF4:ZOJD:TPYS"], + ) + containers: int | None = Field( + default=None, + alias="Containers", + description="Total number of containers on the host.", + examples=[14], + ) + containers_running: int | None = Field( + default=None, + alias="ContainersRunning", + description='Number of containers with status `"running"`.\n', + examples=[3], + ) + containers_paused: int | None = Field( + default=None, + alias="ContainersPaused", + description='Number of containers with status `"paused"`.\n', + examples=[1], + ) + containers_stopped: int | None = Field( + default=None, + alias="ContainersStopped", + description='Number of containers with status `"stopped"`.\n', + examples=[10], + ) + images: int | None = Field( + default=None, + alias="Images", description="Total number of images on the host.\n\nBoth _tagged_ and _untagged_ (dangling) images are counted.\n", - example=508, + examples=[508], ) - Driver: str | None = Field( - None, description="Name of the storage driver in use.", example="overlay2" + driver: str | None = Field( + default=None, + alias="Driver", + description="Name of the storage driver in use.", + examples=["overlay2"], ) - DriverStatus: list[list[str]] | None = Field( - None, + driver_status: list[list[str]] | None = Field( + default=None, + alias="DriverStatus", description='Information specific to the storage driver, provided as\n"label" / "value" pairs.\n\nThis information is provided by the storage driver, and formatted\nin a way consistent with the output of `docker info` on the command\nline.\n\n


\n\n> **Note**: The information returned in this field, including the\n> formatting of values and labels, should not be considered stable,\n> and may change without notice.\n', - example=[ - ["Backing Filesystem", "extfs"], - ["Supports d_type", "true"], - ["Native Overlay Diff", "true"], + examples=[ + [ + ["Backing Filesystem", "extfs"], + ["Supports d_type", "true"], + ["Native Overlay Diff", "true"], + ] ], ) - DockerRootDir: str | None = Field( - None, + docker_root_dir: str | None = Field( + default=None, + alias="DockerRootDir", description="Root directory of persistent Docker state.\n\nDefaults to `/var/lib/docker` on Linux, and `C:\\ProgramData\\docker`\non Windows.\n", - example="/var/lib/docker", + examples=["/var/lib/docker"], ) - Plugins: PluginsInfo | None = None - MemoryLimit: bool | None = Field( - None, + plugins: PluginsInfo | None = Field(default=None, alias="Plugins") + memory_limit: bool | None = Field( + default=None, + alias="MemoryLimit", description="Indicates if the host has memory limit support enabled.", - example=True, + examples=[True], ) - SwapLimit: bool | None = Field( - None, + swap_limit: bool | None = Field( + default=None, + alias="SwapLimit", description="Indicates if the host has memory swap limit support enabled.", - example=True, + examples=[True], ) - KernelMemory: bool | None = Field( - None, + kernel_memory: bool | None = Field( + default=None, + alias="KernelMemory", description="Indicates if the host has kernel memory limit support enabled.\n\n


\n\n> **Deprecated**: This field is deprecated as the kernel 5.4 deprecated\n> `kmem.limit_in_bytes`.\n", - example=True, + examples=[True], ) - KernelMemoryTCP: bool | None = Field( - None, + kernel_memory_tcp: bool | None = Field( + default=None, + alias="KernelMemoryTCP", description="Indicates if the host has kernel memory TCP limit support enabled.\n\nKernel memory TCP limits are not supported when using cgroups v2, which\ndoes not support the corresponding `memory.kmem.tcp.limit_in_bytes` cgroup.\n", - example=True, + examples=[True], ) - CpuCfsPeriod: bool | None = Field( - None, + cpu_cfs_period: bool | None = Field( + default=None, + alias="CpuCfsPeriod", description="Indicates if CPU CFS(Completely Fair Scheduler) period is supported by\nthe host.\n", - example=True, + examples=[True], ) - CpuCfsQuota: bool | None = Field( - None, + cpu_cfs_quota: bool | None = Field( + default=None, + alias="CpuCfsQuota", description="Indicates if CPU CFS(Completely Fair Scheduler) quota is supported by\nthe host.\n", - example=True, + examples=[True], ) - CPUShares: bool | None = Field( - None, + cpu_shares: bool | None = Field( + default=None, + alias="CPUShares", description="Indicates if CPU Shares limiting is supported by the host.\n", - example=True, + examples=[True], ) - CPUSet: bool | None = Field( - None, + cpu_set: bool | None = Field( + default=None, + alias="CPUSet", description="Indicates if CPUsets (cpuset.cpus, cpuset.mems) are supported by the host.\n\nSee [cpuset(7)](https://www.kernel.org/doc/Documentation/cgroup-v1/cpusets.txt)\n", - example=True, + examples=[True], ) - PidsLimit: bool | None = Field( - None, + pids_limit: bool | None = Field( + default=None, + alias="PidsLimit", description="Indicates if the host kernel has PID limit support enabled.", - example=True, - ) - OomKillDisable: bool | None = Field( - None, description="Indicates if OOM killer disable is supported on the host." - ) - IPv4Forwarding: bool | None = Field( - None, description="Indicates IPv4 forwarding is enabled.", example=True - ) - BridgeNfIptables: bool | None = Field( - None, + examples=[True], + ) + oom_kill_disable: bool | None = Field( + default=None, + alias="OomKillDisable", + description="Indicates if OOM killer disable is supported on the host.", + ) + i_pv4_forwarding: bool | None = Field( + default=None, + alias="IPv4Forwarding", + description="Indicates IPv4 forwarding is enabled.", + examples=[True], + ) + bridge_nf_iptables: bool | None = Field( + default=None, + alias="BridgeNfIptables", description="Indicates if `bridge-nf-call-iptables` is available on the host.", - example=True, + examples=[True], ) - BridgeNfIp6tables: bool | None = Field( - None, + bridge_nf_ip6tables: bool | None = Field( + default=None, + alias="BridgeNfIp6tables", description="Indicates if `bridge-nf-call-ip6tables` is available on the host.", - example=True, + examples=[True], ) - Debug: bool | None = Field( - None, + debug: bool | None = Field( + default=None, + alias="Debug", description="Indicates if the daemon is running in debug-mode / with debug-level\nlogging enabled.\n", - example=True, + examples=[True], ) - NFd: int | None = Field( - None, + n_fd: int | None = Field( + default=None, + alias="NFd", description="The total number of file Descriptors in use by the daemon process.\n\nThis information is only returned if debug-mode is enabled.\n", - example=64, + examples=[64], ) - NGoroutines: int | None = Field( - None, + n_goroutines: int | None = Field( + default=None, + alias="NGoroutines", description="The number of goroutines that currently exist.\n\nThis information is only returned if debug-mode is enabled.\n", - example=174, + examples=[174], ) - SystemTime: str | None = Field( - None, + system_time: str | None = Field( + default=None, + alias="SystemTime", description="Current system-time in [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt)\nformat with nano-seconds.\n", - example="2017-08-08T20:28:29.06202363Z", + examples=["2017-08-08T20:28:29.06202363Z"], ) - LoggingDriver: str | None = Field( - None, description="The logging driver to use as a default for new containers.\n" + logging_driver: str | None = Field( + default=None, + alias="LoggingDriver", + description="The logging driver to use as a default for new containers.\n", ) - CgroupDriver: CgroupDriver | None = Field( - CgroupDriver.cgroupfs, + cgroup_driver: CgroupDriver | None = Field( + default=CgroupDriver.cgroupfs, + alias="CgroupDriver", description="The driver to use for managing cgroups.\n", - example="cgroupfs", - ) - CgroupVersion: CgroupVersion | None = Field( - CgroupVersion.field_1, description="The version of the cgroup.\n", example="1" - ) - NEventsListener: int | None = Field( - None, description="Number of event listeners subscribed.", example=30 - ) - KernelVersion: str | None = Field( - None, + examples=["cgroupfs"], + ) + cgroup_version: CgroupVersion | None = Field( + default=CgroupVersion.field_1, + alias="CgroupVersion", + description="The version of the cgroup.\n", + examples=["1"], + ) + n_events_listener: int | None = Field( + default=None, + alias="NEventsListener", + description="Number of event listeners subscribed.", + examples=[30], + ) + kernel_version: str | None = Field( + default=None, + alias="KernelVersion", description='Kernel version of the host.\n\nOn Linux, this information obtained from `uname`. On Windows this\ninformation is queried from the HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\Windows NT\\\\CurrentVersion\\\\\nregistry value, for example _"10.0 14393 (14393.1198.amd64fre.rs1_release_sec.170427-1353)"_.\n', - example="4.9.38-moby", + examples=["4.9.38-moby"], ) - OperatingSystem: str | None = Field( - None, + operating_system: str | None = Field( + default=None, + alias="OperatingSystem", description='Name of the host\'s operating system, for example: "Ubuntu 16.04.2 LTS"\nor "Windows Server 2016 Datacenter"\n', - example="Alpine Linux v3.5", + examples=["Alpine Linux v3.5"], ) - OSVersion: str | None = Field( - None, + os_version: str | None = Field( + default=None, + alias="OSVersion", description="Version of the host's operating system\n\n


\n\n> **Note**: The information returned in this field, including its\n> very existence, and the formatting of values, should not be considered\n> stable, and may change without notice.\n", - example="16.04", + examples=["16.04"], ) - OSType: str | None = Field( - None, - description='Generic type of the operating system of the host, as returned by the\nGo runtime (`GOOS`).\n\nCurrently returned values are "linux" and "windows". A full list of\npossible values can be found in the [Go documentation](https://golang.org/doc/install/source#environment).\n', - example="linux", + os_type: str | None = Field( + default=None, + alias="OSType", + description='Generic type of the operating system of the host, as returned by the\nGo runtime (`GOOS`).\n\nCurrently returned values are "linux" and "windows". A full list of\npossible values can be found in the [Go documentation](https://go.dev/doc/install/source#environment).\n', + examples=["linux"], ) - Architecture: str | None = Field( - None, - description="Hardware architecture of the host, as returned by the Go runtime\n(`GOARCH`).\n\nA full list of possible values can be found in the [Go documentation](https://golang.org/doc/install/source#environment).\n", - example="x86_64", - ) - NCPU: int | None = Field( - None, + architecture: str | None = Field( + default=None, + alias="Architecture", + description="Hardware architecture of the host, as returned by the Go runtime\n(`GOARCH`).\n\nA full list of possible values can be found in the [Go documentation](https://go.dev/doc/install/source#environment).\n", + examples=["x86_64"], + ) + ncpu: int | None = Field( + default=None, + alias="NCPU", description="The number of logical CPUs usable by the daemon.\n\nThe number of available CPUs is checked by querying the operating\nsystem when the daemon starts. Changes to operating system CPU\nallocation after the daemon is started are not reflected.\n", - example=4, + examples=[4], ) - MemTotal: int | None = Field( - None, + mem_total: int | None = Field( + default=None, + alias="MemTotal", description="Total amount of physical memory available on the host, in bytes.\n", - example=2095882240, + examples=[2095882240], ) - IndexServerAddress: str | None = Field( - "https://index.docker.io/v1/", + index_server_address: str | None = Field( + default="https://index.docker.io/v1/", + alias="IndexServerAddress", description="Address / URL of the index server that is used for image search,\nand as a default for user authentication for Docker Hub and Docker Cloud.\n", - example="https://index.docker.io/v1/", + examples=["https://index.docker.io/v1/"], + ) + registry_config: RegistryServiceConfig | None = Field( + default=None, alias="RegistryConfig" ) - RegistryConfig: RegistryServiceConfig | None = None - GenericResources: GenericResources | None = None - HttpProxy: str | None = Field( - None, + generic_resources: GenericResources | None = Field( + default=None, alias="GenericResources" + ) + http_proxy: str | None = Field( + default=None, + alias="HttpProxy", description="HTTP-proxy configured for the daemon. This value is obtained from the\n[`HTTP_PROXY`](https://www.gnu.org/software/wget/manual/html_node/Proxies.html) environment variable.\nCredentials ([user info component](https://tools.ietf.org/html/rfc3986#section-3.2.1)) in the proxy URL\nare masked in the API response.\n\nContainers do not automatically inherit this configuration.\n", - example="http://xxxxx:xxxxx@proxy.corp.example.com:8080", + examples=["http://xxxxx:xxxxx@proxy.corp.example.com:8080"], ) - HttpsProxy: str | None = Field( - None, + https_proxy: str | None = Field( + default=None, + alias="HttpsProxy", description="HTTPS-proxy configured for the daemon. This value is obtained from the\n[`HTTPS_PROXY`](https://www.gnu.org/software/wget/manual/html_node/Proxies.html) environment variable.\nCredentials ([user info component](https://tools.ietf.org/html/rfc3986#section-3.2.1)) in the proxy URL\nare masked in the API response.\n\nContainers do not automatically inherit this configuration.\n", - example="https://xxxxx:xxxxx@proxy.corp.example.com:4443", + examples=["https://xxxxx:xxxxx@proxy.corp.example.com:4443"], ) - NoProxy: str | None = Field( - None, + no_proxy: str | None = Field( + default=None, + alias="NoProxy", description="Comma-separated list of domain extensions for which no proxy should be\nused. This value is obtained from the [`NO_PROXY`](https://www.gnu.org/software/wget/manual/html_node/Proxies.html)\nenvironment variable.\n\nContainers do not automatically inherit this configuration.\n", - example="*.local, 169.254/16", + examples=["*.local, 169.254/16"], ) - Name: str | None = Field( - None, description="Hostname of the host.", example="node5.corp.example.com" + name: str | None = Field( + default=None, + alias="Name", + description="Hostname of the host.", + examples=["node5.corp.example.com"], ) - Labels: list[str] | None = Field( - None, + labels: list[str] | None = Field( + default=None, + alias="Labels", description="User-defined labels (key/value metadata) as set on the daemon.\n\n


\n\n> **Note**: When part of a Swarm, nodes can both have _daemon_ labels,\n> set through the daemon configuration, and _node_ labels, set from a\n> manager node in the Swarm. Node labels are not included in this\n> field. Node labels can be retrieved using the `/nodes/(id)` endpoint\n> on a manager node in the Swarm.\n", - example=["storage=ssd", "production"], + examples=[["storage=ssd", "production"]], ) - ExperimentalBuild: bool | None = Field( - None, + experimental_build: bool | None = Field( + default=None, + alias="ExperimentalBuild", description="Indicates if experimental features are enabled on the daemon.\n", - example=True, + examples=[True], ) - ServerVersion: str | None = Field( - None, - description="Version string of the daemon.\n\n> **Note**: the [standalone Swarm API](/swarm/swarm-api/)\n> returns the Swarm version instead of the daemon version, for example\n> `swarm/1.2.8`.\n", - example="17.06.0-ce", + server_version: str | None = Field( + default=None, + alias="ServerVersion", + description="Version string of the daemon.\n", + examples=["20.10.25"], ) - ClusterStore: str | None = Field( - None, + cluster_store: str | None = Field( + default=None, + alias="ClusterStore", description="URL of the distributed storage backend.\n\n\nThe storage backend is used for multihost networking (to store\nnetwork and endpoint information) and by the node discovery mechanism.\n\n


\n\n> **Deprecated**: This field is only propagated when using standalone Swarm\n> mode, and overlay networking using an external k/v store. Overlay\n> networks with Swarm mode enabled use the built-in raft store, and\n> this field will be empty.\n", - example="consul://consul.corp.example.com:8600/some/path", + examples=["consul://consul.corp.example.com:8600/some/path"], ) - ClusterAdvertise: str | None = Field( - None, + cluster_advertise: str | None = Field( + default=None, + alias="ClusterAdvertise", description="The network endpoint that the Engine advertises for the purpose of\nnode discovery. ClusterAdvertise is a `host:port` combination on which\nthe daemon is reachable by other hosts.\n\n


\n\n> **Deprecated**: This field is only propagated when using standalone Swarm\n> mode, and overlay networking using an external k/v store. Overlay\n> networks with Swarm mode enabled use the built-in raft store, and\n> this field will be empty.\n", - example="node5.corp.example.com:8000", + examples=["node5.corp.example.com:8000"], ) - Runtimes: dict[str, Runtime] | None = Field( - {"runc": {"path": "runc"}}, + runtimes: dict[str, Runtime] | None = Field( + default_factory=lambda: Runtime.model_validate({"runc": {"path": "runc"}}), + alias="Runtimes", description='List of [OCI compliant](https://github.com/opencontainers/runtime-spec)\nruntimes configured on the daemon. Keys hold the "name" used to\nreference the runtime.\n\nThe Docker daemon relies on an OCI compliant runtime (invoked via the\n`containerd` daemon) as its interface to the Linux kernel namespaces,\ncgroups, and SELinux.\n\nThe default runtime is `runc`, and automatically configured. Additional\nruntimes can be configured by the user and will be listed here.\n', - example={ - "runc": {"path": "runc"}, - "runc-master": {"path": "/go/bin/runc"}, - "custom": { - "path": "/usr/local/bin/my-oci-runtime", - "runtimeArgs": ["--debug", "--systemd-cgroup=false"], - }, - }, - ) - DefaultRuntime: str | None = Field( - "runc", + examples=[ + { + "runc": {"path": "runc"}, + "runc-master": {"path": "/go/bin/runc"}, + "custom": { + "path": "/usr/local/bin/my-oci-runtime", + "runtimeArgs": ["--debug", "--systemd-cgroup=false"], + }, + } + ], + ) + default_runtime: str | None = Field( + default="runc", + alias="DefaultRuntime", description="Name of the default OCI runtime that is used when starting containers.\n\nThe default can be overridden per-container at create time.\n", - example="runc", + examples=["runc"], ) - Swarm: SwarmInfo | None = None - LiveRestoreEnabled: bool | None = Field( - False, + swarm: SwarmInfo | None = Field(default=None, alias="Swarm") + live_restore_enabled: bool | None = Field( + default=False, + alias="LiveRestoreEnabled", description="Indicates if live restore is enabled.\n\nIf enabled, containers are kept running when the daemon is shutdown\nor upon daemon start if running containers are detected.\n", - example=False, + examples=[False], ) - Isolation: Isolation2 | None = Field( - Isolation2.default, + isolation: Isolation2 | None = Field( + default=Isolation2.default, + alias="Isolation", description="Represents the isolation technology to use as a default for containers.\nThe supported values are platform-specific.\n\nIf no isolation value is specified on daemon start, on Windows client,\nthe default is `hyperv`, and on Windows server, the default is `process`.\n\nThis option is currently not used on other platforms.\n", ) - InitBinary: str | None = Field( - None, + init_binary: str | None = Field( + default=None, + alias="InitBinary", description="Name and, optional, path of the `docker-init` binary.\n\nIf the path is omitted, the daemon searches the host's `$PATH` for the\nbinary and uses the first result.\n", - example="docker-init", - ) - ContainerdCommit: Commit | None = None - RuncCommit: Commit | None = None - InitCommit: Commit | None = None - SecurityOptions: list[str] | None = Field( - None, + examples=["docker-init"], + ) + containerd_commit: Commit | None = Field(default=None, alias="ContainerdCommit") + runc_commit: Commit | None = Field(default=None, alias="RuncCommit") + init_commit: Commit | None = Field(default=None, alias="InitCommit") + security_options: list[str] | None = Field( + default=None, + alias="SecurityOptions", description="List of security features that are enabled on the daemon, such as\napparmor, seccomp, SELinux, user-namespaces (userns), and rootless.\n\nAdditional configuration options for each security feature may\nbe present, and are included as a comma-separated list of key/value\npairs.\n", - example=[ - "name=apparmor", - "name=seccomp,profile=default", - "name=selinux", - "name=userns", - "name=rootless", + examples=[ + [ + "name=apparmor", + "name=seccomp,profile=default", + "name=selinux", + "name=userns", + "name=rootless", + ] ], ) - ProductLicense: str | None = Field( - None, + product_license: str | None = Field( + default=None, + alias="ProductLicense", description="Reports a summary of the product license on the daemon.\n\nIf a commercial license has been applied to the daemon, information\nsuch as number of nodes, and expiration are included.\n", - example="Community Engine", + examples=["Community Engine"], ) - DefaultAddressPools: list[DefaultAddressPool] | None = Field( - None, + default_address_pools: list[DefaultAddressPool] | None = Field( + default=None, + alias="DefaultAddressPools", description='List of custom default address pools for local networks, which can be\nspecified in the daemon.json file or dockerd option.\n\nExample: a Base "10.10.0.0/16" with Size 24 will define the set of 256\n10.10.[0-255].0/24 address pools.\n', ) - Warnings: list[str] | None = Field( - None, + warnings: list[str] | None = Field( + default=None, + alias="Warnings", description="List of warnings / informational messages about missing features, or\nissues related to the daemon configuration.\n\nThese messages can be printed by the client as information to the user.\n", - example=[ - "WARNING: No memory limit support", - "WARNING: bridge-nf-call-iptables is disabled", - "WARNING: bridge-nf-call-ip6tables is disabled", + examples=[ + [ + "WARNING: No memory limit support", + "WARNING: bridge-nf-call-iptables is disabled", + "WARNING: bridge-nf-call-ip6tables is disabled", + ] ], ) diff --git a/packages/models-library/src/models_library/generics.py b/packages/models-library/src/models_library/generics.py index 50d6f3398100..753510d088b5 100644 --- a/packages/models-library/src/models_library/generics.py +++ b/packages/models-library/src/models_library/generics.py @@ -1,66 +1,66 @@ from collections.abc import ItemsView, Iterable, Iterator, KeysView, ValuesView from typing import Any, Generic, TypeVar -from pydantic.generics import GenericModel +from pydantic import BaseModel, RootModel DictKey = TypeVar("DictKey") DictValue = TypeVar("DictValue") -class DictModel(GenericModel, Generic[DictKey, DictValue]): - __root__: dict[DictKey, DictValue] +class DictModel(RootModel[dict[DictKey, DictValue]], Generic[DictKey, DictValue]): + root: dict[DictKey, DictValue] def __getitem__(self, k: DictKey) -> DictValue: - return self.__root__.__getitem__(k) + return self.root.__getitem__(k) def __setitem__(self, k: DictKey, v: DictValue) -> None: - self.__root__.__setitem__(k, v) + self.root.__setitem__(k, v) def items(self) -> ItemsView[DictKey, DictValue]: - return self.__root__.items() + return self.root.items() def keys(self) -> KeysView[DictKey]: - return self.__root__.keys() + return self.root.keys() def values(self) -> ValuesView[DictValue]: - return self.__root__.values() + return self.root.values() def update(self, *s: Iterable[tuple[DictKey, DictValue]]) -> None: - return self.__root__.update(*s) + return self.root.update(*s) def __iter__(self) -> Iterator[DictKey]: # type: ignore - return self.__root__.__iter__() + return self.root.__iter__() def get(self, key: DictKey, default: DictValue | None = None): - return self.__root__.get(key, default) + return self.root.get(key, default) def setdefault(self, key: DictKey, default: DictValue): - return self.__root__.setdefault(key, default) + return self.root.setdefault(key, default) def __len__(self) -> int: - return self.__root__.__len__() + return self.root.__len__() DataT = TypeVar("DataT") -class ListModel(GenericModel, Generic[DataT]): - __root__: list[DataT] +class ListModel(RootModel[list[DataT]], Generic[DataT]): + root: list[DataT] def __iter__(self): - return iter(self.__root__) + return iter(self.root) def __getitem__(self, item): - return self.__root__[item] + return self.root[item] def __len__(self): - return len(self.__root__) + return len(self.root) -class Envelope(GenericModel, Generic[DataT]): +class Envelope(BaseModel, Generic[DataT]): data: DataT | None = None error: Any | None = None @classmethod def from_data(cls, obj: Any) -> "Envelope": - return cls.parse_obj({"data": obj}) + return cls.model_validate({"data": obj}) diff --git a/packages/models-library/src/models_library/groups.py b/packages/models-library/src/models_library/groups.py index 3be5d1663c83..488776b6d8ea 100644 --- a/packages/models-library/src/models_library/groups.py +++ b/packages/models-library/src/models_library/groups.py @@ -1,7 +1,7 @@ import enum -from typing import Any, ClassVar, Final +from typing import Final -from pydantic import BaseModel, Field, validator +from pydantic import BaseModel, ConfigDict, Field, field_validator from pydantic.types import PositiveInt from .utils.common_validators import create_enums_pre_validator @@ -28,16 +28,15 @@ class Group(BaseModel): group_type: GroupTypeInModel = Field(..., alias="type") thumbnail: str | None - _from_equivalent_enums = validator("group_type", allow_reuse=True, pre=True)( + _from_equivalent_enums = field_validator("group_type", mode="before")( create_enums_pre_validator(GroupTypeInModel) ) class GroupAtDB(Group): - class Config: - orm_mode = True - - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + from_attributes=True, + json_schema_extra={ "example": { "gid": 218, "name": "Friends group", @@ -45,4 +44,5 @@ class Config: "type": "standard", "thumbnail": "https://image.flaticon.com/icons/png/512/23/23374.png", } - } + }, + ) diff --git a/packages/models-library/src/models_library/invitations.py b/packages/models-library/src/models_library/invitations.py index f7f8328b9be6..595c09b6012c 100644 --- a/packages/models-library/src/models_library/invitations.py +++ b/packages/models-library/src/models_library/invitations.py @@ -1,7 +1,7 @@ from datetime import datetime, timezone from typing import Final -from pydantic import BaseModel, EmailStr, Field, PositiveInt, validator +from pydantic import BaseModel, EmailStr, Field, PositiveInt, field_validator from .products import ProductName @@ -35,7 +35,7 @@ class InvitationInputs(BaseModel): description="If None, it will use INVITATIONS_DEFAULT_PRODUCT", ) - @validator("issuer", pre=True) + @field_validator("issuer", mode="before") @classmethod def trim_long_issuers_to_max_length(cls, v): if v and isinstance(v, str): @@ -50,14 +50,14 @@ class InvitationContent(InvitationInputs): created: datetime = Field(..., description="Timestamp for creation") def as_invitation_inputs(self) -> InvitationInputs: - return self.copy(exclude={"created"}) + return self.model_validate(self.model_dump(exclude={"created"})) # copy excluding "created" @classmethod def create_from_inputs( cls, invitation_inputs: InvitationInputs, default_product: ProductName ) -> "InvitationContent": - kwargs = invitation_inputs.dict(exclude_none=True) + kwargs = invitation_inputs.model_dump(exclude_none=True) kwargs.setdefault("product", default_product) return cls( created=datetime.now(tz=timezone.utc), diff --git a/packages/models-library/src/models_library/osparc_variable_identifier.py b/packages/models-library/src/models_library/osparc_variable_identifier.py index 71e4779b2ad1..80a8e6d0fc07 100644 --- a/packages/models-library/src/models_library/osparc_variable_identifier.py +++ b/packages/models-library/src/models_library/osparc_variable_identifier.py @@ -1,30 +1,25 @@ from copy import deepcopy from typing import Any, TypeVar -from pydantic import BaseModel, Field -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin +from models_library.basic_types import ConstrainedStr + +from pydantic import BaseModel from .utils.string_substitution import OSPARC_IDENTIFIER_PREFIX T = TypeVar("T") -class OsparcVariableIdentifier(BaseModel): +class OsparcVariableIdentifier(ConstrainedStr): # NOTE: To allow parametrized value, set the type to Union[OsparcVariableIdentifier, ...] # NOTE: When dealing with str types, to avoid unexpected behavior, the following # order is suggested `OsparcVariableIdentifier | str` - __root__: str = Field( - ..., - # NOTE: in below regex `{`` and `}` are respectively escaped with `{{` and `}}` - regex=rf"^\${{1,2}}(?:\{{)?{OSPARC_IDENTIFIER_PREFIX}[A-Za-z0-9_]+(?:\}})?(:-.+)?$", + # NOTE: in below regex `{`` and `}` are respectively escaped with `{{` and `}}` + pattern = ( + rf"^\${{1,2}}(?:\{{)?{OSPARC_IDENTIFIER_PREFIX}[A-Za-z0-9_]+(?:\}})?(:-.+)?$" ) - def __hash__(self): - return hash(str(self.__root__)) - - def __eq__(self, other): - return self.__root__ == other.__root__ - def _get_without_template_markers(self) -> str: # $VAR # ${VAR} @@ -32,7 +27,7 @@ def _get_without_template_markers(self) -> str: # ${VAR:-default} # ${VAR:-{}} return ( - self.__root__.removeprefix("$$") + self.removeprefix("$$") .removeprefix("$") .removeprefix("{") .removesuffix("}") @@ -48,7 +43,7 @@ def default_value(self) -> str | None: return parts[1] if len(parts) > 1 else None -class UnresolvedOsparcVariableIdentifierError(PydanticErrorMixin, TypeError): +class UnresolvedOsparcVariableIdentifierError(OsparcErrorMixin, TypeError): msg_template = "Provided argument is unresolved: value={value}" diff --git a/packages/models-library/src/models_library/payments.py b/packages/models-library/src/models_library/payments.py index 7a4ec846575e..ff704ab7d2e6 100644 --- a/packages/models-library/src/models_library/payments.py +++ b/packages/models-library/src/models_library/payments.py @@ -1,7 +1,7 @@ from decimal import Decimal -from typing import Any, ClassVar, TypeAlias +from typing import TypeAlias -from pydantic import BaseModel, Field, validator +from pydantic import BaseModel, ConfigDict, Field, field_validator from .emails import LowerCaseEmailStr from .products import StripePriceID, StripeTaxRateID @@ -19,15 +19,8 @@ class UserInvoiceAddress(BaseModel): description="Currently validated in webserver via pycountry library. Two letter country code alpha_2 expected.", ) - @validator("*", pre=True) - @classmethod - def parse_empty_string_as_null(cls, v): - if isinstance(v, str) and len(v.strip()) == 0: - return None - return v - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "line1": None, @@ -38,6 +31,14 @@ class Config: }, ] } + ) + + @field_validator("*", mode="before") + @classmethod + def parse_empty_string_as_null(cls, v): + if isinstance(v, str) and len(v.strip()) == 0: + return None + return v class InvoiceDataGet(BaseModel): @@ -48,18 +49,17 @@ class InvoiceDataGet(BaseModel): user_display_name: str user_email: LowerCaseEmailStr - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { - "credit_amount": Decimal(15.5), + "credit_amount": Decimal(15.5), # type: ignore[dict-item] "stripe_price_id": "stripe-price-id", "stripe_tax_rate_id": "stripe-tax-rate-id", - "user_invoice_address": UserInvoiceAddress.Config.schema_extra[ - "examples" - ][0], + "user_invoice_address": UserInvoiceAddress.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] "user_display_name": "My Name", - "user_email": LowerCaseEmailStr("email@example.itis"), + "user_email": "email@example.itis", }, ] } + ) diff --git a/packages/models-library/src/models_library/products.py b/packages/models-library/src/models_library/products.py index c38281d9f6a1..51c44a83d478 100644 --- a/packages/models-library/src/models_library/products.py +++ b/packages/models-library/src/models_library/products.py @@ -1,7 +1,7 @@ from decimal import Decimal -from typing import Any, ClassVar, TypeAlias +from typing import TypeAlias -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field ProductName: TypeAlias = str StripePriceID: TypeAlias = str @@ -12,20 +12,20 @@ class CreditResultGet(BaseModel): product_name: ProductName credit_amount: Decimal = Field(..., description="") - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ - {"product_name": "s4l", "credit_amount": Decimal(15.5)}, + {"product_name": "s4l", "credit_amount": Decimal(15.5)}, # type: ignore[dict-item] ] } + ) class ProductStripeInfoGet(BaseModel): stripe_price_id: StripePriceID stripe_tax_rate_id: StripeTaxRateID - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "stripe_price_id": "stripe-price-id", @@ -33,3 +33,4 @@ class Config: }, ] } + ) diff --git a/packages/models-library/src/models_library/progress_bar.py b/packages/models-library/src/models_library/progress_bar.py index 788331b103e8..da2829b0c94b 100644 --- a/packages/models-library/src/models_library/progress_bar.py +++ b/packages/models-library/src/models_library/progress_bar.py @@ -1,6 +1,6 @@ -from typing import Any, ClassVar, Literal, TypeAlias +from typing import Literal, TypeAlias -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict from .basic_types import IDStr @@ -15,8 +15,8 @@ class ProgressStructuredMessage(BaseModel): unit: str | None sub: "ProgressStructuredMessage | None" - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "description": "some description", @@ -42,6 +42,7 @@ class Config: }, ] } + ) UNITLESS = None @@ -77,9 +78,9 @@ def composed_message(self) -> str: return msg - class Config: - frozen = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + frozen=True, + json_schema_extra={ "examples": [ # typical percent progress (no units) { @@ -96,9 +97,8 @@ class Config: { "actual_value": 0.3, "total": 1.0, - "message": ProgressStructuredMessage.Config.schema_extra[ - "examples" - ][2], + "message": ProgressStructuredMessage.model_config["json_schema_extra"]["examples"][2], # type: ignore [index] }, ] - } + }, + ) diff --git a/packages/models-library/src/models_library/projects.py b/packages/models-library/src/models_library/projects.py index 6f62457272cc..60a8874cb419 100644 --- a/packages/models-library/src/models_library/projects.py +++ b/packages/models-library/src/models_library/projects.py @@ -1,19 +1,18 @@ """ Models a study's project document """ -import re -from copy import deepcopy + from datetime import datetime from enum import Enum from typing import Any, Final, TypeAlias from uuid import UUID +from models_library.basic_types import ConstrainedStr from models_library.folders import FolderID from models_library.workspaces import WorkspaceID -from pydantic import BaseModel, ConstrainedStr, Extra, Field, validator +from pydantic import BaseModel, ConfigDict, Field, HttpUrl, field_validator from .basic_regex import DATE_RE, UUID_RE_BASE -from .basic_types import HttpUrlWithCustomMinLength from .emails import LowerCaseEmailStr from .projects_access import AccessRights, GroupIDStr from .projects_nodes import Node @@ -33,17 +32,11 @@ class ProjectIDStr(ConstrainedStr): - regex = re.compile(UUID_RE_BASE) - - class Config: - frozen = True + pattern = UUID_RE_BASE class DateTimeStr(ConstrainedStr): - regex = re.compile(DATE_RE) - - class Config: - frozen = True + pattern = DATE_RE @classmethod def to_datetime(cls, s: "DateTimeStr"): @@ -74,7 +67,7 @@ class BaseProjectModel(BaseModel): description="longer one-line description about the project", examples=["Dabbling in temporal transitions ..."], ) - thumbnail: HttpUrlWithCustomMinLength | None = Field( + thumbnail: HttpUrl | None = Field( ..., description="url of the project thumbnail", examples=["https://placeimg.com/171/96/tech/grayscale/?0.jpg"], @@ -87,11 +80,11 @@ class BaseProjectModel(BaseModel): workbench: NodesDict = Field(..., description="Project's pipeline") # validators - _empty_thumbnail_is_none = validator("thumbnail", allow_reuse=True, pre=True)( + _empty_thumbnail_is_none = field_validator("thumbnail", mode="before")( empty_str_to_none_pre_validator ) - _none_description_is_empty = validator("description", allow_reuse=True, pre=True)( + _none_description_is_empty = field_validator("description", mode="before")( none_to_empty_str_pre_validator ) @@ -109,17 +102,16 @@ class ProjectAtDB(BaseProjectModel): False, description="Defines if a study is available publicly" ) - @validator("project_type", pre=True) + @field_validator("project_type", mode="before") @classmethod def convert_sql_alchemy_enum(cls, v): if isinstance(v, Enum): return v.value return v - class Config: - orm_mode = True - use_enum_values = True - allow_population_by_field_name = True + model_config = ConfigDict( + from_attributes=True, use_enum_values=True, populate_by_name=True + ) class Project(BaseProjectModel): @@ -186,18 +178,7 @@ class Project(BaseProjectModel): alias="folderId", ) - class Config: - description = "Document that stores metadata, pipeline and UI setup of a study" - title = "osparc-simcore project" - extra = Extra.forbid - - @staticmethod - def schema_extra(schema: dict, _model: "Project"): - # pylint: disable=unsubscriptable-object - - # Patch to allow jsonschema nullable - # SEE https://github.com/samuelcolvin/pydantic/issues/990#issuecomment-645961530 - state_pydantic_schema = deepcopy(schema["properties"]["state"]) - schema["properties"]["state"] = { - "anyOf": [{"type": "null"}, state_pydantic_schema] - } + model_config = ConfigDict( + title="osparc-simcore project", + extra="forbid", + ) diff --git a/packages/models-library/src/models_library/projects_access.py b/packages/models-library/src/models_library/projects_access.py index 1b800c6b0aed..29ca6c9f5921 100644 --- a/packages/models-library/src/models_library/projects_access.py +++ b/packages/models-library/src/models_library/projects_access.py @@ -3,9 +3,8 @@ """ from enum import Enum -from typing import Any, ClassVar -from pydantic import BaseModel, Extra, Field +from pydantic import BaseModel, ConfigDict, Field from pydantic.types import PositiveInt from .basic_types import IDStr @@ -26,33 +25,22 @@ class AccessRights(BaseModel): read: bool = Field(..., description="has read access") write: bool = Field(..., description="has write access") delete: bool = Field(..., description="has deletion rights") - - class Config: - extra = Extra.forbid - - -class PositiveIntWithExclusiveMinimumRemoved(PositiveInt): - # As we are trying to match this Pydantic model to a historical json schema "project-v0.0.1" we need to remove this - # Pydantic does not support exclusiveMinimum boolean https://github.com/pydantic/pydantic/issues/4108 - @classmethod - def __modify_schema__(cls, field_schema): - field_schema.pop("exclusiveMinimum", None) + model_config = ConfigDict(extra="forbid") class Owner(BaseModel): - user_id: PositiveIntWithExclusiveMinimumRemoved = Field( - ..., description="Owner's user id" - ) + user_id: PositiveInt = Field(..., description="Owner's user id") first_name: FirstNameStr | None = Field(..., description="Owner's first name") last_name: LastNameStr | None = Field(..., description="Owner's last name") - class Config: - extra = Extra.forbid - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ # NOTE: None and empty string are both defining an undefined value {"user_id": 1, "first_name": None, "last_name": None}, {"user_id": 2, "first_name": "", "last_name": ""}, {"user_id": 3, "first_name": "John", "last_name": "Smith"}, ] - } + }, + ) diff --git a/packages/models-library/src/models_library/projects_comments.py b/packages/models-library/src/models_library/projects_comments.py index 234ec638a4a8..88937d83d78c 100644 --- a/packages/models-library/src/models_library/projects_comments.py +++ b/packages/models-library/src/models_library/projects_comments.py @@ -1,7 +1,7 @@ from datetime import datetime from typing import TypeAlias -from pydantic import BaseModel, Extra, Field, PositiveInt +from pydantic import BaseModel, ConfigDict, Field, PositiveInt from .projects import ProjectID from .users import UserID @@ -33,12 +33,8 @@ class _ProjectsCommentsBase(BaseModel): class ProjectsCommentsDB(_ProjectsCommentsBase): - class Config: - extra = Extra.forbid - validation = False + model_config = ConfigDict(extra="forbid") class ProjectsCommentsAPI(_ProjectsCommentsBase): - class Config: - extra = Extra.forbid - validation = False + model_config = ConfigDict(extra="forbid") diff --git a/packages/models-library/src/models_library/projects_networks.py b/packages/models-library/src/models_library/projects_networks.py index e0775ccb5d5f..ee255dd80ff8 100644 --- a/packages/models-library/src/models_library/projects_networks.py +++ b/packages/models-library/src/models_library/projects_networks.py @@ -1,7 +1,7 @@ import re -from typing import Any, ClassVar, Final +from typing import Annotated, Final, TypeAlias -from pydantic import BaseModel, ConstrainedStr, Field +from pydantic import BaseModel, ConfigDict, Field, StringConstraints from .generics import DictModel from .projects import ProjectID @@ -12,12 +12,9 @@ PROJECT_NETWORK_PREFIX: Final[str] = "prj-ntwrk" -class DockerNetworkName(ConstrainedStr): - regex = SERVICE_NETWORK_RE +DockerNetworkName: TypeAlias = Annotated[str, StringConstraints(pattern=SERVICE_NETWORK_RE)] - -class DockerNetworkAlias(ConstrainedStr): - regex = SERVICE_NETWORK_RE +DockerNetworkAlias: TypeAlias = Annotated[str, StringConstraints(pattern=SERVICE_NETWORK_RE)] class ContainerAliases(DictModel[NodeIDStr, DockerNetworkAlias]): @@ -25,8 +22,8 @@ class ContainerAliases(DictModel[NodeIDStr, DockerNetworkAlias]): class NetworksWithAliases(DictModel[DockerNetworkName, ContainerAliases]): - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "network_one": { @@ -36,6 +33,7 @@ class Config: }, ] } + ) class ProjectsNetworks(BaseModel): @@ -47,10 +45,9 @@ class ProjectsNetworks(BaseModel): "is given a user defined alias by which it is identified on the network." ), ) - - class Config: - orm_mode = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + from_attributes=True, + json_schema_extra={ "example": { "project_uuid": "ec5cdfea-f24e-4aa1-83b8-6dccfdc8cf4d", "networks_with_aliases": { @@ -60,4 +57,5 @@ class Config: } }, } - } + }, + ) diff --git a/packages/models-library/src/models_library/projects_nodes.py b/packages/models-library/src/models_library/projects_nodes.py index 318f7149ab4e..3a6ea0523135 100644 --- a/packages/models-library/src/models_library/projects_nodes.py +++ b/packages/models-library/src/models_library/projects_nodes.py @@ -3,21 +3,22 @@ """ from copy import deepcopy -from typing import Any, ClassVar, TypeAlias, Union +from typing import Annotated, Any, TypeAlias, Union from pydantic import ( BaseModel, - ConstrainedStr, - Extra, + ConfigDict, Field, + HttpUrl, Json, StrictBool, StrictFloat, StrictInt, - validator, + StringConstraints, + field_validator, ) -from .basic_types import EnvVarKey, HttpUrlWithCustomMinLength, KeyIDStr +from .basic_types import EnvVarKey, KeyIDStr from .projects_access import AccessEnum from .projects_nodes_io import ( DatCoreFileLink, @@ -58,12 +59,15 @@ InputID: TypeAlias = KeyIDStr OutputID: TypeAlias = KeyIDStr -InputsDict: TypeAlias = dict[InputID, InputTypes] -OutputsDict: TypeAlias = dict[OutputID, OutputTypes] - +# union_mode="smart" by default for Pydantic>=2: https://docs.pydantic.dev/latest/concepts/unions/#union-modes +InputsDict: TypeAlias = dict[ + InputID, Annotated[InputTypes, Field(union_mode="left_to_right")] +] +OutputsDict: TypeAlias = dict[ + OutputID, Annotated[OutputTypes, Field(union_mode="left_to_right")] +] -class UnitStr(ConstrainedStr): - strip_whitespace = True +UnitStr: TypeAlias = Annotated[str, StringConstraints(strip_whitespace=True)] class NodeState(BaseModel): @@ -85,10 +89,9 @@ class NodeState(BaseModel): le=1.0, description="current progress of the task if available (None if not started or not a computational task)", ) - - class Config: - extra = Extra.forbid - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ { "modified": True, @@ -106,7 +109,18 @@ class Config: "currentStatus": "SUCCESS", }, ] - } + }, + ) + + +def _patch_json_schema_extra(schema: dict) -> None: + # NOTE: exporting without this trick does not make runHash as nullable. + # It is a Pydantic issue see https://github.com/samuelcolvin/pydantic/issues/1270 + for prop_name in ["parent", "runHash"]: + if prop_name in schema.get("properties", {}): + prop = deepcopy(schema["properties"][prop_name]) + prop["nullable"] = True + schema["properties"][prop_name] = prop class Node(BaseModel): @@ -134,7 +148,7 @@ class Node(BaseModel): description="the node progress value (deprecated in DB, still used for API only)", deprecated=True, ) - thumbnail: HttpUrlWithCustomMinLength | None = Field( + thumbnail: Annotated[str, HttpUrl] | None = Field( default=None, description="url of the latest screenshot of the node", examples=["https://placeimg.com/171/96/tech/grayscale/?0.jpg"], @@ -208,7 +222,7 @@ class Node(BaseModel): ), ) - @validator("thumbnail", pre=True) + @field_validator("thumbnail", mode="before") @classmethod def convert_empty_str_to_none(cls, v): if isinstance(v, str) and v == "": @@ -221,7 +235,7 @@ def convert_old_enum_name(cls, v) -> RunningState: return RunningState.FAILED return RunningState(v) - @validator("state", pre=True) + @field_validator("state", mode="before") @classmethod def convert_from_enum(cls, v): if isinstance(v, str): @@ -230,16 +244,7 @@ def convert_from_enum(cls, v): return NodeState(currentStatus=running_state_value) return v - class Config: - extra = Extra.forbid - - # NOTE: exporting without this trick does not make runHash as nullable. - # It is a Pydantic issue see https://github.com/samuelcolvin/pydantic/issues/1270 - @staticmethod - def schema_extra(schema, _model: "Node"): - # SEE https://swagger.io/docs/specification/data-models/data-types/#Null - for prop_name in ["parent", "runHash"]: - if prop_name in schema.get("properties", {}): - prop = deepcopy(schema["properties"][prop_name]) - prop["nullable"] = True - schema["properties"][prop_name] = prop + model_config = ConfigDict( + extra="forbid", + json_schema_extra=_patch_json_schema_extra, + ) diff --git a/packages/models-library/src/models_library/projects_nodes_io.py b/packages/models-library/src/models_library/projects_nodes_io.py index b2d884854896..3a79b6acf003 100644 --- a/packages/models-library/src/models_library/projects_nodes_io.py +++ b/packages/models-library/src/models_library/projects_nodes_io.py @@ -6,20 +6,21 @@ - Link to another port: PortLink """ -import re from pathlib import Path -from typing import Any, ClassVar, TypeAlias +from typing import Annotated, TypeAlias from uuid import UUID -from models_library.basic_types import KeyIDStr +from models_library.basic_types import ConstrainedStr, KeyIDStr from pydantic import ( AnyUrl, BaseModel, - ConstrainedStr, - Extra, + BeforeValidator, + ConfigDict, Field, - parse_obj_as, - validator, + StringConstraints, + TypeAdapter, + ValidationInfo, + field_validator, ) from .basic_regex import ( @@ -31,10 +32,7 @@ NodeID = UUID - -class UUIDStr(ConstrainedStr): - regex: re.Pattern[str] | None = re.compile(UUID_RE) - +UUIDStr: TypeAlias = Annotated[str, StringConstraints(pattern=UUID_RE)] NodeIDStr = UUIDStr @@ -42,8 +40,9 @@ class UUIDStr(ConstrainedStr): LocationName = str -class SimcoreS3FileID(ConstrainedStr): - regex: re.Pattern[str] | None = re.compile(SIMCORE_S3_FILE_ID_RE) +SimcoreS3FileID: TypeAlias = Annotated[ + str, StringConstraints(pattern=SIMCORE_S3_FILE_ID_RE) +] class SimcoreS3DirectoryID(ConstrainedStr): @@ -52,7 +51,7 @@ class SimcoreS3DirectoryID(ConstrainedStr): `{project_id}/{node_id}/simcore-dir-name/` """ - regex: re.Pattern[str] | None = re.compile(SIMCORE_S3_DIRECTORY_ID_RE) + pattern: str = SIMCORE_S3_DIRECTORY_ID_RE @staticmethod def _get_parent(s3_object: str, *, parent_index: int) -> str: @@ -72,8 +71,8 @@ def _get_parent(s3_object: str, *, parent_index: int) -> str: raise ValueError(msg) from err @classmethod - def validate(cls, value: str) -> str: - value = super().validate(value) + def _validate(cls, __input_value: str) -> str: + value = super()._validate(__input_value) value = value.rstrip("/") parent = cls._get_parent(value, parent_index=3) @@ -86,12 +85,10 @@ def validate(cls, value: str) -> str: @classmethod def from_simcore_s3_object(cls, s3_object: str) -> "SimcoreS3DirectoryID": parent_path: str = cls._get_parent(s3_object, parent_index=4) - return parse_obj_as(cls, f"{parent_path}/") - + return TypeAdapter(cls).validate_python(f"{parent_path}/") -class DatCoreFileID(ConstrainedStr): - regex: re.Pattern[str] | None = re.compile(DATCORE_FILE_ID_RE) +DatCoreFileID: TypeAlias = Annotated[str, StringConstraints(pattern=DATCORE_FILE_ID_RE)] StorageFileID: TypeAlias = SimcoreS3FileID | DatCoreFileID @@ -108,10 +105,9 @@ class PortLink(BaseModel): ..., description="The port key in the node given by nodeUuid", ) - - class Config: - extra = Extra.forbid - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ # minimal { @@ -119,25 +115,28 @@ class Config: "output": "out_2", } ], - } + }, + ) class DownloadLink(BaseModel): """I/O port type to hold a generic download link to a file (e.g. S3 pre-signed link, etc)""" - download_link: AnyUrl = Field(..., alias="downloadLink") + download_link: Annotated[ + str, BeforeValidator(lambda x: str(TypeAdapter(AnyUrl).validate_python(x))) + ] = Field(..., alias="downloadLink") label: str | None = Field(default=None, description="Display name") - - class Config: - extra = Extra.forbid - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ # minimal { "downloadLink": "https://fakeimg.pl/250x100/", } ], - } + }, + ) ## CUSTOM STORAGE SERVICES ----------- @@ -147,16 +146,17 @@ class BaseFileLink(BaseModel): store: LocationID = Field( ..., description="The store identifier: 0 for simcore S3, 1 for datcore", + validate_default=True, ) path: StorageFileID = Field( ..., description="The path to the file in the storage provider domain", + union_mode="left_to_right", ) label: str | None = Field( - default=None, - description="The real file name", + default=None, description="The real file name", validate_default=True ) e_tag: str | None = Field( @@ -165,7 +165,7 @@ class BaseFileLink(BaseModel): alias="eTag", ) - @validator("store", pre=True) + @field_validator("store", mode="before") @classmethod def legacy_enforce_str_to_int(cls, v): # SEE example 'legacy: store as string' @@ -173,6 +173,8 @@ def legacy_enforce_str_to_int(cls, v): return int(v) return v + model_config = ConfigDict(populate_by_name=True) + class SimCoreFileLink(BaseFileLink): """I/O port type to hold a link to a file in simcore S3 storage""" @@ -182,7 +184,7 @@ class SimCoreFileLink(BaseFileLink): deprecated=True, ) - @validator("store", always=True) + @field_validator("store") @classmethod def check_discriminator(cls, v): """Used as discriminator to cast to this class""" @@ -191,16 +193,16 @@ def check_discriminator(cls, v): raise ValueError(msg) return 0 - @validator("label", always=True, pre=True) + @field_validator("label", mode="before") @classmethod - def pre_fill_label_with_filename_ext(cls, v, values): - if v is None and "path" in values: - return Path(values["path"]).name + def pre_fill_label_with_filename_ext(cls, v, info: ValidationInfo): + if v is None and "path" in info.data: + return Path(info.data["path"]).name return v - class Config: - extra = Extra.forbid - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ { "store": 0, @@ -225,7 +227,8 @@ class Config: "path": "94453a6a-c8d4-52b3-a22d-ccbf81f8d636/d4442ca4-23fd-5b6b-ba6d-0b75f711c109/y_1D.txt", }, ], - } + }, + ) class DatCoreFileLink(BaseFileLink): @@ -241,7 +244,7 @@ class DatCoreFileLink(BaseFileLink): description="Unique identifier to access the dataset on datcore (REQUIRED for datcore)", ) - @validator("store", always=True) + @field_validator("store") @classmethod def check_discriminator(cls, v): """Used as discriminator to cast to this class""" @@ -251,9 +254,9 @@ def check_discriminator(cls, v): raise ValueError(msg) return 1 - class Config: - extra = Extra.forbid - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ { # minimal @@ -270,7 +273,8 @@ class Config: "label": "initial_WTstates", }, ], - } + }, + ) # Bundles all model links to a file vs PortLink diff --git a/packages/models-library/src/models_library/projects_nodes_ui.py b/packages/models-library/src/models_library/projects_nodes_ui.py index aa55332ccbae..e14f2b21a284 100644 --- a/packages/models-library/src/models_library/projects_nodes_ui.py +++ b/packages/models-library/src/models_library/projects_nodes_ui.py @@ -2,20 +2,18 @@ Models node UI (legacy model, use instead projects.ui.py) """ -from pydantic import BaseModel, Extra, Field -from pydantic.color import Color +from pydantic import BaseModel, ConfigDict, Field +from pydantic_extra_types.color import Color class Position(BaseModel): - x: int = Field(..., description="The x position", example=["12"]) - y: int = Field(..., description="The y position", example=["15"]) + x: int = Field(..., description="The x position", examples=[["12"]]) + y: int = Field(..., description="The y position", examples=[["15"]]) - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class Marker(BaseModel): color: Color = Field(...) - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") diff --git a/packages/models-library/src/models_library/projects_pipeline.py b/packages/models-library/src/models_library/projects_pipeline.py index 2139d1820438..975d4726b4e4 100644 --- a/packages/models-library/src/models_library/projects_pipeline.py +++ b/packages/models-library/src/models_library/projects_pipeline.py @@ -1,9 +1,8 @@ import datetime -from typing import Any, ClassVar from uuid import UUID import arrow -from pydantic import BaseModel, Field, PositiveInt +from pydantic import BaseModel, ConfigDict, Field, PositiveInt from .clusters import ClusterID from .projects_nodes import NodeState @@ -58,8 +57,8 @@ class ComputationTask(BaseModel): description="task last modification timestamp or None if the there is no task", ) - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "id": "42838344-03de-4ce2-8d93-589a5dcdfd05", @@ -89,9 +88,9 @@ class Config: }, "iteration": None, "cluster_id": None, - "started": arrow.utcnow().shift(minutes=-50).datetime, + "started": arrow.utcnow().shift(minutes=-50).datetime, # type: ignore[dict-item] "stopped": None, - "submitted": arrow.utcnow().shift(hours=-1).datetime, + "submitted": arrow.utcnow().shift(hours=-1).datetime, # type: ignore[dict-item] }, { "id": "f81d7994-9ccc-4c95-8c32-aa70d6bbb1b0", @@ -121,9 +120,10 @@ class Config: }, "iteration": 2, "cluster_id": 0, - "started": arrow.utcnow().shift(minutes=-50).datetime, - "stopped": arrow.utcnow().shift(minutes=-20).datetime, - "submitted": arrow.utcnow().shift(hours=-1).datetime, + "started": arrow.utcnow().shift(minutes=-50).datetime, # type: ignore[dict-item] + "stopped": arrow.utcnow().shift(minutes=-20).datetime, # type: ignore[dict-item] + "submitted": arrow.utcnow().shift(hours=-1).datetime, # type: ignore[dict-item] }, ] } + ) diff --git a/packages/models-library/src/models_library/projects_state.py b/packages/models-library/src/models_library/projects_state.py index 757704e14d33..9f067f8dc516 100644 --- a/packages/models-library/src/models_library/projects_state.py +++ b/packages/models-library/src/models_library/projects_state.py @@ -3,9 +3,8 @@ """ from enum import Enum, unique -from typing import Any, ClassVar -from pydantic import BaseModel, Extra, Field, validator +from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_validator from .projects_access import Owner @@ -58,14 +57,15 @@ class ProjectStatus(str, Enum): class ProjectLocked(BaseModel): value: bool = Field(..., description="True if the project is locked") owner: Owner | None = Field( - default=None, description="If locked, the user that owns the lock" + default=None, + description="If locked, the user that owns the lock", + validate_default=True, ) status: ProjectStatus = Field(..., description="The status of the project") - - class Config: - extra = Extra.forbid - use_enum_values = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + use_enum_values=True, + json_schema_extra={ "examples": [ {"value": False, "status": ProjectStatus.CLOSED}, { @@ -78,24 +78,25 @@ class Config: }, }, ] - } + }, + ) - @validator("owner", pre=True, always=True) + @field_validator("owner", mode="before") @classmethod - def check_not_null(cls, v, values): - if values["value"] is True and v is None: + def check_not_null(cls, v, info: ValidationInfo): + if info.data["value"] is True and v is None: msg = "value cannot be None when project is locked" raise ValueError(msg) return v - @validator("status", always=True) + @field_validator("status") @classmethod - def check_status_compatible(cls, v, values): - if values["value"] is False and v not in ["CLOSED", "OPENED"]: - msg = f"status is set to {v} and lock is set to {values['value']}!" + def check_status_compatible(cls, v, info: ValidationInfo): + if info.data["value"] is False and v not in ["CLOSED", "OPENED"]: + msg = f"status is set to {v} and lock is set to {info.data['value']}!" raise ValueError(msg) - if values["value"] is True and v == "CLOSED": - msg = f"status is set to {v} and lock is set to {values['value']}!" + if info.data["value"] is True and v == "CLOSED": + msg = f"status is set to {v} and lock is set to {info.data['value']}!" raise ValueError(msg) return v @@ -105,13 +106,11 @@ class ProjectRunningState(BaseModel): ..., description="The running state of the project", examples=["STARTED"] ) - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class ProjectState(BaseModel): locked: ProjectLocked = Field(..., description="The project lock state") state: ProjectRunningState = Field(..., description="The project running state") - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") diff --git a/packages/models-library/src/models_library/projects_ui.py b/packages/models-library/src/models_library/projects_ui.py index 154007a2a6d0..93aa68d628bf 100644 --- a/packages/models-library/src/models_library/projects_ui.py +++ b/packages/models-library/src/models_library/projects_ui.py @@ -2,10 +2,11 @@ Models Front-end UI """ -from typing import Any, ClassVar, Literal, TypedDict +from typing import Literal -from pydantic import BaseModel, Extra, Field, validator -from pydantic.color import Color +from pydantic import BaseModel, ConfigDict, Field, field_validator +from pydantic_extra_types.color import Color +from typing_extensions import TypedDict from .projects_nodes_io import NodeID, NodeIDStr from .projects_nodes_ui import Marker, Position @@ -15,9 +16,7 @@ class WorkbenchUI(BaseModel): position: Position = Field(..., description="The node position in the workbench") marker: Marker | None = None - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class _SlideshowRequired(TypedDict): @@ -32,10 +31,9 @@ class Annotation(BaseModel): type: Literal["note", "rect", "text"] = Field(...) color: Color = Field(...) attributes: dict = Field(..., description="svg attributes") - - class Config: - extra = Extra.forbid - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ { "type": "note", @@ -60,7 +58,8 @@ class Config: "attributes": {"x": 415, "y": 100, "text": "Hey!"}, }, ] - } + }, + ) class StudyUI(BaseModel): @@ -69,9 +68,8 @@ class StudyUI(BaseModel): current_node_id: NodeID | None = Field(default=None, alias="currentNodeId") annotations: dict[NodeIDStr, Annotation] | None = None - class Config: - extra = Extra.allow + model_config = ConfigDict(extra="allow") - _empty_is_none = validator("*", allow_reuse=True, pre=True)( + _empty_is_none = field_validator("*", mode="before")( empty_str_to_none_pre_validator ) diff --git a/packages/models-library/src/models_library/rabbitmq_basic_types.py b/packages/models-library/src/models_library/rabbitmq_basic_types.py index 022b66b9a9df..e8ae694b8be2 100644 --- a/packages/models-library/src/models_library/rabbitmq_basic_types.py +++ b/packages/models-library/src/models_library/rabbitmq_basic_types.py @@ -1,15 +1,15 @@ -import re from typing import Final -from pydantic import ConstrainedStr, parse_obj_as +from models_library.basic_types import ConstrainedStr +from pydantic import TypeAdapter REGEX_RABBIT_QUEUE_ALLOWED_SYMBOLS: Final[str] = r"^[\w\-\.]*$" class RPCNamespace(ConstrainedStr): + pattern = REGEX_RABBIT_QUEUE_ALLOWED_SYMBOLS min_length: int = 1 max_length: int = 252 - regex: re.Pattern[str] | None = re.compile(REGEX_RABBIT_QUEUE_ALLOWED_SYMBOLS) @classmethod def from_entries(cls, entries: dict[str, str]) -> "RPCNamespace": @@ -18,10 +18,10 @@ def from_entries(cls, entries: dict[str, str]) -> "RPCNamespace": Keeping this to a predefined length """ composed_string = "-".join(f"{k}_{v}" for k, v in sorted(entries.items())) - return parse_obj_as(cls, composed_string) + return TypeAdapter(cls).validate_python(composed_string) class RPCMethodName(ConstrainedStr): + pattern = REGEX_RABBIT_QUEUE_ALLOWED_SYMBOLS min_length: int = 1 max_length: int = 252 - regex: re.Pattern[str] | None = re.compile(REGEX_RABBIT_QUEUE_ALLOWED_SYMBOLS) diff --git a/packages/models-library/src/models_library/rabbitmq_messages.py b/packages/models-library/src/models_library/rabbitmq_messages.py index 69812689baae..dd8917586035 100644 --- a/packages/models-library/src/models_library/rabbitmq_messages.py +++ b/packages/models-library/src/models_library/rabbitmq_messages.py @@ -30,12 +30,12 @@ class RabbitEventMessageType(str, Enum): class RabbitMessageBase(BaseModel): - channel_name: str = Field(..., const=True) + channel_name: str @classmethod def get_channel_name(cls) -> str: # NOTE: this returns the channel type name - name: str = cls.__fields__["channel_name"].default + name: str = cls.model_fields["channel_name"].default return name @abstractmethod @@ -46,7 +46,7 @@ def routing_key(self) -> str | None: """ def body(self) -> bytes: - return self.json().encode() + return self.model_dump_json().encode() class ProjectMessageBase(BaseModel): @@ -133,9 +133,7 @@ def routing_key(self) -> str | None: class _RabbitAutoscalingBaseMessage(RabbitMessageBase): - channel_name: Literal["io.simcore.autoscaling"] = Field( - default="io.simcore.autoscaling", const=True - ) + channel_name: Literal["io.simcore.autoscaling"] = "io.simcore.autoscaling" origin: str = Field( ..., description="autoscaling app type, in case there would be more than one" ) @@ -178,9 +176,7 @@ class RabbitResourceTrackingMessageType(StrAutoEnum): class RabbitResourceTrackingBaseMessage(RabbitMessageBase): - channel_name: Literal["io.simcore.service.tracking"] = Field( - default="io.simcore.service.tracking", const=True - ) + channel_name: Literal["io.simcore.service.tracking"] = "io.simcore.service.tracking" service_run_id: str = Field( ..., description="uniquely identitifies the service run" @@ -196,7 +192,7 @@ def routing_key(self) -> str | None: class DynamicServiceRunningMessage(RabbitMessageBase): channel_name: Literal["io.simcore.service.dynamic-service-running"] = Field( - default="io.simcore.service.dynamic-service-running", const=True + default="io.simcore.service.dynamic-service-running" ) project_id: ProjectID @@ -213,9 +209,9 @@ def routing_key(self) -> str | None: class RabbitResourceTrackingStartedMessage(RabbitResourceTrackingBaseMessage): - message_type: RabbitResourceTrackingMessageType = Field( - default=RabbitResourceTrackingMessageType.TRACKING_STARTED, const=True - ) + message_type: Literal[ + RabbitResourceTrackingMessageType.TRACKING_STARTED + ] = RabbitResourceTrackingMessageType.TRACKING_STARTED wallet_id: WalletID | None wallet_name: str | None @@ -253,9 +249,9 @@ class RabbitResourceTrackingStartedMessage(RabbitResourceTrackingBaseMessage): class RabbitResourceTrackingHeartbeatMessage(RabbitResourceTrackingBaseMessage): - message_type: RabbitResourceTrackingMessageType = Field( - default=RabbitResourceTrackingMessageType.TRACKING_HEARTBEAT, const=True - ) + message_type: Literal[ + RabbitResourceTrackingMessageType.TRACKING_HEARTBEAT + ] = RabbitResourceTrackingMessageType.TRACKING_HEARTBEAT class SimcorePlatformStatus(StrAutoEnum): @@ -264,9 +260,9 @@ class SimcorePlatformStatus(StrAutoEnum): class RabbitResourceTrackingStoppedMessage(RabbitResourceTrackingBaseMessage): - message_type: RabbitResourceTrackingMessageType = Field( - default=RabbitResourceTrackingMessageType.TRACKING_STOPPED, const=True - ) + message_type: Literal[ + RabbitResourceTrackingMessageType.TRACKING_STOPPED + ] = RabbitResourceTrackingMessageType.TRACKING_STOPPED simcore_platform_status: SimcorePlatformStatus = Field( ..., @@ -282,9 +278,7 @@ class RabbitResourceTrackingStoppedMessage(RabbitResourceTrackingBaseMessage): class WalletCreditsMessage(RabbitMessageBase): - channel_name: Literal["io.simcore.service.wallets"] = Field( - default="io.simcore.service.wallets", const=True - ) + channel_name: Literal["io.simcore.service.wallets"] = "io.simcore.service.wallets" created_at: datetime.datetime = Field( default_factory=lambda: arrow.utcnow().datetime, description="message creation datetime", @@ -302,9 +296,9 @@ class CreditsLimit(IntEnum): class WalletCreditsLimitReachedMessage(RabbitMessageBase): - channel_name: Literal["io.simcore.service.wallets-credit-limit-reached"] = Field( - default="io.simcore.service.wallets-credit-limit-reached", const=True - ) + channel_name: Literal[ + "io.simcore.service.wallets-credit-limit-reached" + ] = "io.simcore.service.wallets-credit-limit-reached" created_at: datetime.datetime = Field( default_factory=lambda: arrow.utcnow().datetime, description="message creation datetime", diff --git a/packages/models-library/src/models_library/resource_tracker.py b/packages/models-library/src/models_library/resource_tracker.py index 13c92e161ed1..c3b42a087957 100644 --- a/packages/models-library/src/models_library/resource_tracker.py +++ b/packages/models-library/src/models_library/resource_tracker.py @@ -2,16 +2,16 @@ from datetime import datetime, timezone from decimal import Decimal from enum import IntEnum, auto -from typing import Any, ClassVar, NamedTuple, TypeAlias +from typing import NamedTuple, TypeAlias from pydantic import ( BaseModel, ByteSize, - Extra, + ConfigDict, Field, NonNegativeInt, PositiveInt, - validator, + field_validator, ) from .products import ProductName @@ -59,26 +59,28 @@ class PricingInfo(BaseModel): pricing_unit_id: PricingUnitId pricing_unit_cost_id: PricingUnitCostId - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ {"pricing_plan_id": 1, "pricing_unit_id": 1, "pricing_unit_cost_id": 1} ] } + ) class HardwareInfo(BaseModel): aws_ec2_instances: list[str] - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ {"aws_ec2_instances": ["c6a.4xlarge"]}, {"aws_ec2_instances": []}, ] } + ) - @validator("aws_ec2_instances") + @field_validator("aws_ec2_instances") @classmethod def warn_if_too_many_instances_are_present(cls, v: list[str]) -> list[str]: if len(v) > 1: @@ -106,10 +108,9 @@ class StartedAt(BaseModel): from_: datetime | None = Field(None, alias="from") until: datetime | None = Field(None) - class Config: - allow_population_by_field_name = True + model_config = ConfigDict(populate_by_name=True) - @validator("from_", pre=True) + @field_validator("from_", mode="before") @classmethod def parse_from_filter(cls, v): """Parse the filters field.""" @@ -124,7 +125,7 @@ def parse_from_filter(cls, v): return from_ return v - @validator("until", pre=True) + @field_validator("until", mode="before") @classmethod def parse_until_filter(cls, v): """Parse the filters field.""" @@ -153,9 +154,8 @@ class PricingPlanCreate(BaseModel): description: str classification: PricingPlanClassification pricing_plan_key: str - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "product_name": "osparc", @@ -166,6 +166,7 @@ class Config: } ] } + ) class PricingPlanUpdate(BaseModel): @@ -174,8 +175,8 @@ class PricingPlanUpdate(BaseModel): description: str is_active: bool - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "pricing_plan_id": 1, @@ -185,6 +186,7 @@ class Config: } ] } + ) ## Pricing Units @@ -202,10 +204,10 @@ class UnitExtraInfo(BaseModel): RAM: ByteSize VRAM: ByteSize - class Config: - allow_population_by_field_name = True - extra = Extra.allow - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + populate_by_name=True, + extra="allow", + json_schema_extra={ "examples": [ { "CPU": 32, @@ -215,7 +217,8 @@ class Config: "custom key": "custom value", } ] - } + }, + ) class PricingUnitWithCostCreate(BaseModel): @@ -227,13 +230,13 @@ class PricingUnitWithCostCreate(BaseModel): cost_per_unit: Decimal comment: str - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "pricing_plan_id": 1, "unit_name": "My pricing plan", - "unit_extra_info": UnitExtraInfo.Config.schema_extra["examples"][0], + "unit_extra_info": UnitExtraInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] "default": True, "specific_info": {"aws_ec2_instances": ["t3.medium"]}, "cost_per_unit": 10, @@ -241,6 +244,7 @@ class Config: } ] } + ) class PricingUnitCostUpdate(BaseModel): @@ -257,14 +261,14 @@ class PricingUnitWithCostUpdate(BaseModel): specific_info: SpecificInfo pricing_unit_cost_update: None | PricingUnitCostUpdate - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "pricing_plan_id": 1, "pricing_unit_id": 1, "unit_name": "My pricing plan", - "unit_extra_info": UnitExtraInfo.Config.schema_extra["examples"][0], + "unit_extra_info": UnitExtraInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] "default": True, "specific_info": {"aws_ec2_instances": ["t3.medium"]}, "pricing_unit_cost_update": { @@ -276,13 +280,14 @@ class Config: "pricing_plan_id": 1, "pricing_unit_id": 1, "unit_name": "My pricing plan", - "unit_extra_info": UnitExtraInfo.Config.schema_extra["examples"][0], + "unit_extra_info": UnitExtraInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] "default": True, "specific_info": {"aws_ec2_instances": ["t3.medium"]}, "pricing_unit_cost_update": None, }, ] } + ) class ServicesAggregatedUsagesType(StrAutoEnum): diff --git a/packages/models-library/src/models_library/rest_ordering.py b/packages/models-library/src/models_library/rest_ordering.py index c8a791343ee3..7b1b6b39c39e 100644 --- a/packages/models-library/src/models_library/rest_ordering.py +++ b/packages/models-library/src/models_library/rest_ordering.py @@ -1,6 +1,6 @@ from enum import Enum -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field from .basic_types import IDStr @@ -16,5 +16,4 @@ class OrderBy(BaseModel): field: IDStr = Field() direction: OrderDirection = Field(default=OrderDirection.ASC) - class Config: - extra = "forbid" + model_config = ConfigDict(extra="forbid") diff --git a/packages/models-library/src/models_library/rest_pagination.py b/packages/models-library/src/models_library/rest_pagination.py index 89c90cb1c2d3..28556d50303b 100644 --- a/packages/models-library/src/models_library/rest_pagination.py +++ b/packages/models-library/src/models_library/rest_pagination.py @@ -1,17 +1,17 @@ -from typing import Any, ClassVar, Final, Generic, TypeVar +from typing import Annotated, Final, Generic, TypeAlias, TypeVar +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy from pydantic import ( - AnyHttpUrl, BaseModel, - ConstrainedInt, - Extra, + BeforeValidator, + ConfigDict, Field, NonNegativeInt, PositiveInt, - parse_obj_as, - validator, + TypeAdapter, + ValidationInfo, + field_validator, ) -from pydantic.generics import GenericModel from .utils.common_validators import none_to_empty_list_pre_validator @@ -21,19 +21,22 @@ MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE: Final[int] = 50 -class PageLimitInt(ConstrainedInt): - ge = 1 - lt = MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE +PageLimitInt: TypeAlias = Annotated[ + int, Field(ge=1, lt=MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE) +] - -DEFAULT_NUMBER_OF_ITEMS_PER_PAGE: Final[PageLimitInt] = parse_obj_as(PageLimitInt, 20) +DEFAULT_NUMBER_OF_ITEMS_PER_PAGE: Final[PageLimitInt] = TypeAdapter( + PageLimitInt +).validate_python(20) class PageQueryParameters(BaseModel): """Use as pagination options in query parameters""" limit: PageLimitInt = Field( - default=parse_obj_as(PageLimitInt, DEFAULT_NUMBER_OF_ITEMS_PER_PAGE), + default=TypeAdapter(PageLimitInt).validate_python( + DEFAULT_NUMBER_OF_ITEMS_PER_PAGE + ), description="maximum number of items to return (pagination)", ) offset: NonNegativeInt = Field( @@ -47,38 +50,36 @@ class PageMetaInfoLimitOffset(BaseModel): offset: NonNegativeInt = 0 count: NonNegativeInt - @validator("offset") + @field_validator("offset") @classmethod - def _check_offset(cls, v, values): - if v > 0 and v >= values["total"]: - msg = f"offset {v} cannot be equal or bigger than total {values['total']}, please check" + def _check_offset(cls, v, info: ValidationInfo): + if v > 0 and v >= info.data["total"]: + msg = f"offset {v} cannot be equal or bigger than total {info.data['total']}, please check" raise ValueError(msg) return v - @validator("count") + @field_validator("count") @classmethod - def _check_count(cls, v, values): - if v > values["limit"]: - msg = f"count {v} bigger than limit {values['limit']}, please check" + def _check_count(cls, v, info: ValidationInfo): + if v > info.data["limit"]: + msg = f"count {v} bigger than limit {info.data['limit']}, please check" raise ValueError(msg) - if v > values["total"]: - msg = ( - f"count {v} bigger than expected total {values['total']}, please check" - ) + if v > info.data["total"]: + msg = f"count {v} bigger than expected total {info.data['total']}, please check" raise ValueError(msg) - if "offset" in values and (values["offset"] + v) > values["total"]: - msg = f"offset {values['offset']} + count {v} is bigger than allowed total {values['total']}, please check" + if "offset" in info.data and (info.data["offset"] + v) > info.data["total"]: + msg = f"offset {info.data['offset']} + count {v} is bigger than allowed total {info.data['total']}, please check" raise ValueError(msg) return v - class Config: - extra = Extra.forbid - - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ {"total": 7, "count": 4, "limit": 4, "offset": 0}, ] - } + }, + ) RefT = TypeVar("RefT") @@ -91,18 +92,26 @@ class PageRefs(BaseModel, Generic[RefT]): next: RefT | None last: RefT - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") -class PageLinks(PageRefs[AnyHttpUrl]): +class PageLinks( + PageRefs[ + Annotated[ + str, + BeforeValidator( + lambda x: str(TypeAdapter(AnyHttpUrlLegacy).validate_python(x)) + ), + ] + ] +): ... ItemT = TypeVar("ItemT") -class Page(GenericModel, Generic[ItemT]): +class Page(BaseModel, Generic[ItemT]): """ Paginated response model of ItemTs """ @@ -111,26 +120,25 @@ class Page(GenericModel, Generic[ItemT]): links: PageLinks = Field(alias="_links") data: list[ItemT] - _none_is_empty = validator("data", allow_reuse=True, pre=True)( + _none_is_empty = field_validator("data", mode="before")( none_to_empty_list_pre_validator ) - @validator("data") + @field_validator("data") @classmethod - def _check_data_compatible_with_meta(cls, v, values): - if "meta" not in values: + def _check_data_compatible_with_meta(cls, v, info: ValidationInfo): + if "meta" not in info.data: # if the validation failed in meta this happens msg = "meta not in values" raise ValueError(msg) - if len(v) != values["meta"].count: - msg = f"container size [{len(v)}] must be equal to count [{values['meta'].count}]" + if len(v) != info.data["meta"].count: + msg = f"container size [{len(v)}] must be equal to count [{info.data['meta'].count}]" raise ValueError(msg) return v - class Config: - extra = Extra.forbid - - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ # first page Page[str] { @@ -157,4 +165,5 @@ class Config: "data": ["data 5", "data 6", "data 7"], }, ] - } + }, + ) diff --git a/packages/models-library/src/models_library/rest_pagination_utils.py b/packages/models-library/src/models_library/rest_pagination_utils.py index ec9cedf0a544..41899acd8cf5 100644 --- a/packages/models-library/src/models_library/rest_pagination_utils.py +++ b/packages/models-library/src/models_library/rest_pagination_utils.py @@ -1,7 +1,8 @@ from math import ceil from typing import Any, Protocol, TypedDict, Union, runtime_checkable -from pydantic import AnyHttpUrl, parse_obj_as +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy +from pydantic import TypeAdapter from .rest_pagination import PageLinks, PageMetaInfoLimitOffset @@ -38,7 +39,9 @@ def _replace_query(url: _URLType, query: dict[str, Any]) -> str: new_url = url.update_query(query) else: new_url = url.replace_query_params(**query) - return f"{new_url}" + + new_url_str = f"{new_url}" + return f"{TypeAdapter(AnyHttpUrlLegacy).validate_python(new_url_str)}" class PageDict(TypedDict): @@ -60,7 +63,7 @@ def paginate_data( Usage: obj: PageDict = paginate_data( ... ) - model = Page[MyModelItem].parse_obj(obj) + model = Page[MyModelItem].model_validate(obj) raises ValidationError """ @@ -71,37 +74,21 @@ def paginate_data( total=total, count=len(chunk), limit=limit, offset=offset ), _links=PageLinks( - self=( - parse_obj_as( - AnyHttpUrl, - _replace_query(request_url, {"offset": offset, "limit": limit}), - ) - ), - first=parse_obj_as( - AnyHttpUrl, _replace_query(request_url, {"offset": 0, "limit": limit}) - ), - prev=parse_obj_as( - AnyHttpUrl, - _replace_query( - request_url, {"offset": max(offset - limit, 0), "limit": limit} - ), + self=_replace_query(request_url, {"offset": offset, "limit": limit}), + first=_replace_query(request_url, {"offset": 0, "limit": limit}), + prev=_replace_query( + request_url, {"offset": max(offset - limit, 0), "limit": limit} ) if offset > 0 else None, - next=parse_obj_as( - AnyHttpUrl, - _replace_query( - request_url, - {"offset": min(offset + limit, last_page * limit), "limit": limit}, - ), + next=_replace_query( + request_url, + {"offset": min(offset + limit, last_page * limit), "limit": limit}, ) if offset < (last_page * limit) else None, - last=parse_obj_as( - AnyHttpUrl, - _replace_query( - request_url, {"offset": last_page * limit, "limit": limit} - ), + last=_replace_query( + request_url, {"offset": last_page * limit, "limit": limit} ), ), data=chunk, diff --git a/packages/models-library/src/models_library/rpc_pagination.py b/packages/models-library/src/models_library/rpc_pagination.py index 34eeb9979906..92470b30d677 100644 --- a/packages/models-library/src/models_library/rpc_pagination.py +++ b/packages/models-library/src/models_library/rpc_pagination.py @@ -1,8 +1,8 @@ # mypy: disable-error-code=truthy-function from math import ceil -from typing import Any, ClassVar, Generic +from typing import Any, Generic -from pydantic import Extra, Field +from pydantic import ConfigDict, Field from .rest_pagination import ( DEFAULT_NUMBER_OF_ITEMS_PER_PAGE, @@ -31,7 +31,7 @@ class PageRefsParams(PageRefs[PageQueryParameters]): @classmethod def create(cls, total: int, limit: int, offset: int) -> "PageRefsParams": last_page = ceil(total / limit) - 1 - return cls.parse_obj( + return cls.model_validate( { "self": {"offset": offset, "limit": limit}, "first": {"offset": 0, "limit": limit}, @@ -74,10 +74,9 @@ def create( data=chunk, ) - class Config: - extra = Extra.forbid - - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ # first page Page[str] { @@ -104,4 +103,5 @@ class Config: "data": ["data 5", "data 6", "data 7"], }, ] - } + }, + ) diff --git a/packages/models-library/src/models_library/service_settings_labels.py b/packages/models-library/src/models_library/service_settings_labels.py index 95bc13a0b094..851b1880cc31 100644 --- a/packages/models-library/src/models_library/service_settings_labels.py +++ b/packages/models-library/src/models_library/service_settings_labels.py @@ -3,19 +3,20 @@ from enum import Enum from functools import cached_property from pathlib import Path -from typing import Any, ClassVar, Literal, TypeAlias +from typing import Any, Literal, TypeAlias from pydantic import ( BaseModel, ByteSize, - Extra, + ConfigDict, Field, Json, PrivateAttr, + TypeAdapter, ValidationError, - parse_obj_as, - root_validator, - validator, + ValidationInfo, + field_validator, + model_validator, ) from .callbacks_mapping import CallbacksMapping @@ -24,11 +25,9 @@ from .services_resources import DEFAULT_SINGLE_SERVICE_NAME from .utils.json_serialization import json_dumps - -class _BaseConfig: - arbitrary_types_allowed = True - extra = Extra.forbid - keep_untouched = (cached_property,) +_BaseConfig = ConfigDict( + extra="forbid", arbitrary_types_allowed=True, ignored_types=(cached_property,) +) class ContainerSpec(BaseModel): @@ -40,18 +39,19 @@ class ContainerSpec(BaseModel): alias="Command", description="Used to override the container's command", # NOTE: currently constraint to our use cases. Might mitigate some security issues. - min_items=1, - max_items=2, + min_length=1, + max_length=2, ) - class Config(_BaseConfig): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = _BaseConfig | ConfigDict( + json_schema_extra={ "examples": [ {"Command": ["executable"]}, {"Command": ["executable", "subcommand"]}, {"Command": ["ofs", "linear-regression"]}, ] - } + }, + ) class SimcoreServiceSettingLabelEntry(BaseModel): @@ -93,7 +93,7 @@ def get_destination_containers(self) -> list[str]: # as fields return self._destination_containers - @validator("setting_type", pre=True) + @field_validator("setting_type", mode="before") @classmethod def ensure_backwards_compatible_setting_type(cls, v): if v == "resources": @@ -101,9 +101,9 @@ def ensure_backwards_compatible_setting_type(cls, v): return "Resources" return v - class Config(_BaseConfig): - allow_population_by_field_name = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = _BaseConfig | ConfigDict( + populate_by_name=True, + json_schema_extra={ "examples": [ # constraints { @@ -157,7 +157,8 @@ class Config(_BaseConfig): }, }, ] - } + }, + ) SimcoreServiceSettingsLabel = ListModel[SimcoreServiceSettingLabelEntry] @@ -191,23 +192,23 @@ class PathMappingsLabel(BaseModel): ), ) - @validator("volume_size_limits") + @field_validator("volume_size_limits") @classmethod - def validate_volume_limits(cls, v, values) -> str | None: + def validate_volume_limits(cls, v, info: ValidationInfo) -> str | None: if v is None: return v for path_str, size_str in v.items(): # checks that format is correct try: - parse_obj_as(ByteSize, size_str) + TypeAdapter(ByteSize).validate_python(size_str) except ValidationError as e: msg = f"Provided size='{size_str}' contains invalid charactes: {e!s}" raise ValueError(msg) from e - inputs_path: Path | None = values.get("inputs_path") - outputs_path: Path | None = values.get("outputs_path") - state_paths: list[Path] | None = values.get("state_paths") + inputs_path: Path | None = info.data.get("inputs_path") + outputs_path: Path | None = info.data.get("outputs_path") + state_paths: list[Path] | None = info.data.get("state_paths") path = Path(path_str) if not ( path in (inputs_path, outputs_path) @@ -218,8 +219,8 @@ def validate_volume_limits(cls, v, values) -> str | None: output: str | None = v return output - class Config(_BaseConfig): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = _BaseConfig | ConfigDict( + json_schema_extra={ "examples": [ { "outputs_path": "/tmp/outputs", # noqa: S108 nosec @@ -249,7 +250,8 @@ class Config(_BaseConfig): }, }, ] - } + }, + ) ComposeSpecLabelDict: TypeAlias = dict[str, Any] @@ -292,6 +294,7 @@ class DynamicSidecarServiceLabels(BaseModel): "specified. Required by dynamic-sidecar when " "compose_spec is set." ), + validate_default=True, ) user_preferences_path: Path | None = Field( @@ -339,25 +342,29 @@ def needs_dynamic_sidecar(self) -> bool: """if paths mapping is present the service needs to be ran via dynamic-sidecar""" return self.paths_mapping is not None - @validator("container_http_entry", always=True) + @field_validator("container_http_entry") @classmethod - def compose_spec_requires_container_http_entry(cls, v, values) -> str | None: + def compose_spec_requires_container_http_entry( + cls, v, info: ValidationInfo + ) -> str | None: v = None if v == "" else v - if v is None and values.get("compose_spec") is not None: + if v is None and info.data.get("compose_spec") is not None: msg = "Field `container_http_entry` must be defined but is missing" raise ValueError(msg) - if v is not None and values.get("compose_spec") is None: + if v is not None and info.data.get("compose_spec") is None: msg = "`container_http_entry` not allowed if `compose_spec` is missing" raise ValueError(msg) return f"{v}" if v else v - @validator("containers_allowed_outgoing_permit_list") + @field_validator("containers_allowed_outgoing_permit_list") @classmethod - def _containers_allowed_outgoing_permit_list_in_compose_spec(cls, v, values): + def _containers_allowed_outgoing_permit_list_in_compose_spec( + cls, v, info: ValidationInfo + ): if v is None: return v - compose_spec: dict | None = values.get("compose_spec") + compose_spec: dict | None = info.data.get("compose_spec") if compose_spec is None: keys = set(v.keys()) if len(keys) != 1 or DEFAULT_SINGLE_SERVICE_NAME not in keys: @@ -372,13 +379,15 @@ def _containers_allowed_outgoing_permit_list_in_compose_spec(cls, v, values): return v - @validator("containers_allowed_outgoing_internet") + @field_validator("containers_allowed_outgoing_internet") @classmethod - def _containers_allowed_outgoing_internet_in_compose_spec(cls, v, values): + def _containers_allowed_outgoing_internet_in_compose_spec( + cls, v, info: ValidationInfo + ): if v is None: - return v + return None - compose_spec: dict | None = values.get("compose_spec") + compose_spec: dict | None = info.data.get("compose_spec") if compose_spec is None: if {DEFAULT_SINGLE_SERVICE_NAME} != v: err_msg = ( @@ -393,10 +402,10 @@ def _containers_allowed_outgoing_internet_in_compose_spec(cls, v, values): raise ValueError(err_msg) return v - @validator("callbacks_mapping") + @field_validator("callbacks_mapping") @classmethod def _ensure_callbacks_mapping_container_names_defined_in_compose_spec( - cls, v: CallbacksMapping, values + cls, v: CallbacksMapping, info: ValidationInfo ): if v is None: return {} @@ -408,7 +417,7 @@ def _ensure_callbacks_mapping_container_names_defined_in_compose_spec( if len(defined_services) == 0: return v - compose_spec: dict | None = values.get("compose_spec") + compose_spec: dict | None = info.data.get("compose_spec") if compose_spec is None: if {DEFAULT_SINGLE_SERVICE_NAME} != defined_services: err_msg = f"Expected only 1 entry '{DEFAULT_SINGLE_SERVICE_NAME}' not '{defined_services}'" @@ -421,17 +430,17 @@ def _ensure_callbacks_mapping_container_names_defined_in_compose_spec( raise ValueError(err_msg) return v - @validator("user_preferences_path", pre=True) + @field_validator("user_preferences_path", mode="before") @classmethod def _deserialize_from_json(cls, v): return f"{v}".removeprefix('"').removesuffix('"') - @validator("user_preferences_path") + @field_validator("user_preferences_path") @classmethod def _user_preferences_path_no_included_in_other_volumes( - cls, v: CallbacksMapping, values + cls, v: CallbacksMapping, info: ValidationInfo ): - paths_mapping: PathMappingsLabel | None = values.get("paths_mapping", None) + paths_mapping: PathMappingsLabel | None = info.data.get("paths_mapping", None) if paths_mapping is None: return v @@ -445,33 +454,24 @@ def _user_preferences_path_no_included_in_other_volumes( raise ValueError(msg) return v - @root_validator - @classmethod - def _not_allowed_in_both_specs(cls, values): + @model_validator(mode="after") + def _not_allowed_in_both_specs(self): match_keys = { "containers_allowed_outgoing_internet", "containers_allowed_outgoing_permit_list", } - if match_keys & set(values.keys()) != match_keys: - err_msg = ( - f"Expected the following keys {match_keys} to be present {values=}" - ) + if match_keys & set(self.model_fields) != match_keys: + err_msg = f"Expected the following keys {match_keys} to be present {self.model_fields=}" raise ValueError(err_msg) - containers_allowed_outgoing_internet = values[ - "containers_allowed_outgoing_internet" - ] - containers_allowed_outgoing_permit_list = values[ - "containers_allowed_outgoing_permit_list" - ] if ( - containers_allowed_outgoing_internet is None - or containers_allowed_outgoing_permit_list is None + self.containers_allowed_outgoing_internet is None + or self.containers_allowed_outgoing_permit_list is None ): - return values + return self - common_containers = set(containers_allowed_outgoing_internet) & set( - containers_allowed_outgoing_permit_list.keys() + common_containers = set(self.containers_allowed_outgoing_internet) & set( + self.containers_allowed_outgoing_permit_list.keys() ) if len(common_containers) > 0: err_msg = ( @@ -481,10 +481,9 @@ def _not_allowed_in_both_specs(cls, values): ) raise ValueError(err_msg) - return values + return self - class Config(_BaseConfig): - ... + model_config = _BaseConfig class SimcoreServiceLabels(DynamicSidecarServiceLabels): @@ -513,24 +512,32 @@ class SimcoreServiceLabels(DynamicSidecarServiceLabels): ), ) - class Config(_BaseConfig): - extra = Extra.allow - schema_extra: ClassVar[dict[str, Any]] = { + model_config = _BaseConfig | ConfigDict( + extra="allow", + json_schema_extra={ "examples": [ # WARNING: do not change order. Used in tests! # legacy service { "simcore.service.settings": json_dumps( - SimcoreServiceSettingLabelEntry.Config.schema_extra["examples"] + SimcoreServiceSettingLabelEntry.model_config[ + "json_schema_extra" + ][ + "examples" + ] # type: ignore[index] ) }, # dynamic-service { "simcore.service.settings": json_dumps( - SimcoreServiceSettingLabelEntry.Config.schema_extra["examples"] + SimcoreServiceSettingLabelEntry.model_config[ + "json_schema_extra" + ][ + "examples" + ] # type: ignore[index] ), "simcore.service.paths-mapping": json_dumps( - PathMappingsLabel.Config.schema_extra["examples"][0] + PathMappingsLabel.model_config["json_schema_extra"]["examples"][0] # type: ignore [index] ), "simcore.service.restart-policy": RestartPolicy.NO_RESTART.value, "simcore.service.callbacks-mapping": json_dumps( @@ -549,10 +556,14 @@ class Config(_BaseConfig): # dynamic-service with compose spec { "simcore.service.settings": json_dumps( - SimcoreServiceSettingLabelEntry.Config.schema_extra["examples"] + SimcoreServiceSettingLabelEntry.model_config[ + "json_schema_extra" + ][ + "examples" + ] # type: ignore[index] ), "simcore.service.paths-mapping": json_dumps( - PathMappingsLabel.Config.schema_extra["examples"][0] + PathMappingsLabel.model_config["json_schema_extra"]["examples"][0], # type: ignore[index] ), "simcore.service.compose-spec": json_dumps( { @@ -580,8 +591,9 @@ class Config(_BaseConfig): "simcore.service.container-http-entrypoint": "rt-web", "simcore.service.restart-policy": RestartPolicy.ON_INPUTS_DOWNLOADED.value, "simcore.service.callbacks-mapping": json_dumps( - CallbacksMapping.Config.schema_extra["examples"][3] + CallbacksMapping.model_config["json_schema_extra"]["examples"][3] # type: ignore [index] ), }, ] - } + }, + ) diff --git a/packages/models-library/src/models_library/service_settings_nat_rule.py b/packages/models-library/src/models_library/service_settings_nat_rule.py index bcdf0604eec6..1f50b62f5037 100644 --- a/packages/models-library/src/models_library/service_settings_nat_rule.py +++ b/packages/models-library/src/models_library/service_settings_nat_rule.py @@ -1,14 +1,14 @@ from collections.abc import Generator -from typing import Any, ClassVar, Final +from typing import Final -from pydantic import BaseModel, Extra, Field, parse_obj_as, validator +from pydantic import BaseModel, ConfigDict, Field, TypeAdapter, ValidationInfo, field_validator from .basic_types import PortInt from .osparc_variable_identifier import OsparcVariableIdentifier, raise_if_unresolved # Cloudflare DNS server address DEFAULT_DNS_SERVER_ADDRESS: Final[str] = "1.1.1.1" # NOSONAR -DEFAULT_DNS_SERVER_PORT: Final[PortInt] = parse_obj_as(PortInt, 53) +DEFAULT_DNS_SERVER_PORT: Final[PortInt] = TypeAdapter(PortInt).validate_python(53) class _PortRange(BaseModel): @@ -17,14 +17,14 @@ class _PortRange(BaseModel): lower: PortInt | OsparcVariableIdentifier upper: PortInt | OsparcVariableIdentifier - @validator("upper") + @field_validator("upper") @classmethod - def lower_less_than_upper(cls, v, values) -> PortInt: + def lower_less_than_upper(cls, v, info: ValidationInfo) -> PortInt: if isinstance(v, OsparcVariableIdentifier): return v # type: ignore # bypass validation if unresolved upper = v - lower: PortInt | OsparcVariableIdentifier | None = values.get("lower") + lower: PortInt | OsparcVariableIdentifier | None = info.data.get("lower") if lower and isinstance(lower, OsparcVariableIdentifier): return v # type: ignore # bypass validation if unresolved @@ -34,9 +34,7 @@ def lower_less_than_upper(cls, v, values) -> PortInt: raise ValueError(msg) return PortInt(v) - class Config: - arbitrary_types_allowed = True - validate_assignment = True + model_config = ConfigDict(arbitrary_types_allowed=True, validate_assignment=True) class DNSResolver(BaseModel): @@ -45,16 +43,17 @@ class DNSResolver(BaseModel): ) port: PortInt | OsparcVariableIdentifier - class Config: - arbitrary_types_allowed = True - validate_assignment = True - extra = Extra.allow - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + arbitrary_types_allowed=True, + validate_assignment=True, + extra="allow", + json_schema_extra={ "examples": [ {"address": "1.1.1.1", "port": 53}, # NOSONAR {"address": "ns1.example.com", "port": 53}, ] - } + }, + ) class NATRule(BaseModel): @@ -69,6 +68,8 @@ class NATRule(BaseModel): description="specify a DNS resolver address and port", ) + model_config = ConfigDict(arbitrary_types_allowed=True, validate_assignment=True) + def iter_tcp_ports(self) -> Generator[PortInt, None, None]: for port in self.tcp_ports: if isinstance(port, _PortRange): @@ -81,7 +82,3 @@ def iter_tcp_ports(self) -> Generator[PortInt, None, None]: ) else: yield raise_if_unresolved(port) - - class Config: - arbitrary_types_allowed = True - validate_assignment = True diff --git a/packages/models-library/src/models_library/services_access.py b/packages/models-library/src/models_library/services_access.py index 8bc6786c6955..84dbd7d17a0e 100644 --- a/packages/models-library/src/models_library/services_access.py +++ b/packages/models-library/src/models_library/services_access.py @@ -2,7 +2,7 @@ """ -from pydantic import BaseModel, Extra, Field +from pydantic import BaseModel, ConfigDict, Field from .users import GroupID from .utils.change_case import snake_to_camel @@ -22,10 +22,9 @@ class ServiceGroupAccessRightsV2(BaseModel): execute: bool = False write: bool = False - class Config: - alias_generator = snake_to_camel - allow_population_by_field_name = True - extra = Extra.forbid + model_config = ConfigDict( + alias_generator=snake_to_camel, populate_by_name=True, extra="forbid" + ) class ServiceAccessRights(BaseModel): diff --git a/packages/models-library/src/models_library/services_authoring.py b/packages/models-library/src/models_library/services_authoring.py index 18673319f46b..05b5197994c8 100644 --- a/packages/models-library/src/models_library/services_authoring.py +++ b/packages/models-library/src/models_library/services_authoring.py @@ -1,6 +1,4 @@ -from typing import Any, ClassVar - -from pydantic import BaseModel, Field, HttpUrl +from pydantic import BaseModel, ConfigDict, Field, HttpUrl from .emails import LowerCaseEmailStr @@ -18,15 +16,15 @@ class Badge(BaseModel): ..., description="Link to the status", ) - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "name": "osparc.io", "image": "https://img.shields.io/website-up-down-green-red/https/itisfoundation.github.io.svg?label=documentation", "url": "https://itisfoundation.github.io/", } } + ) class Author(BaseModel): @@ -39,9 +37,8 @@ class Author(BaseModel): description="Email address", ) affiliation: str | None = Field(None) - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "name": "Jim Knopf", @@ -54,3 +51,4 @@ class Config: }, ] } + ) diff --git a/packages/models-library/src/models_library/services_base.py b/packages/models-library/src/models_library/services_base.py index d80fc59df24c..5f92d6e46b6b 100644 --- a/packages/models-library/src/models_library/services_base.py +++ b/packages/models-library/src/models_library/services_base.py @@ -1,4 +1,5 @@ -from pydantic import BaseModel, Field, HttpUrl, validator +from typing import Annotated +from pydantic import BaseModel, ConfigDict, Field, HttpUrl, field_validator from .services_types import ServiceKey, ServiceVersion from .utils.common_validators import empty_str_to_none_pre_validator @@ -16,22 +17,22 @@ class ServiceKeyVersion(BaseModel): description="service version number", ) - class Config: - frozen = True + model_config = ConfigDict(frozen=True) class ServiceBaseDisplay(BaseModel): name: str = Field( ..., description="Display name: short, human readable name for the node", - example="Fast Counter", + examples=["Fast Counter"], ) - thumbnail: HttpUrl | None = Field( + thumbnail: Annotated[str, HttpUrl] | None = Field( None, description="url to the thumbnail", examples=[ "https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png" ], + validate_default=True, ) description: str = Field( ..., @@ -53,6 +54,6 @@ class ServiceBaseDisplay(BaseModel): " This name is not used for version comparison but is useful for communication and documentation purposes.", ) - _empty_is_none = validator("thumbnail", allow_reuse=True, pre=True, always=False)( + _empty_is_none = field_validator("thumbnail", mode="before")( empty_str_to_none_pre_validator ) diff --git a/packages/models-library/src/models_library/services_creation.py b/packages/models-library/src/models_library/services_creation.py index e2102efe0750..5abb8c9e4d2d 100644 --- a/packages/models-library/src/models_library/services_creation.py +++ b/packages/models-library/src/models_library/services_creation.py @@ -1,9 +1,9 @@ -from typing import Any, ClassVar +from typing import Any -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict, TypeAdapter -from .services import ServiceKey, ServiceVersion from .services_resources import ServiceResourcesDict +from .services_types import ServiceKey, ServiceVersion from .wallets import WalletID @@ -23,8 +23,8 @@ class CreateServiceMetricsAdditionalParams(BaseModel): service_resources: ServiceResourcesDict service_additional_metadata: dict[str, Any] - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "wallet_id": 1, "wallet_name": "a private wallet for me", @@ -36,9 +36,13 @@ class Config: "user_email": "test@test.com", "project_name": "_!New Study", "node_name": "the service of a lifetime _ *!", - "service_key": ServiceKey("simcore/services/dynamic/test"), - "service_version": ServiceVersion("0.0.1"), + "service_key": TypeAdapter(ServiceKey).validate_python( + "simcore/services/dynamic/test" + ), + "service_version": TypeAdapter(ServiceVersion).validate_python("0.0.1"), "service_resources": {}, "service_additional_metadata": {}, + "pricing_unit_cost_id": None, } } + ) diff --git a/packages/models-library/src/models_library/services_history.py b/packages/models-library/src/models_library/services_history.py index 70f4e513c15b..b38f5f2e783e 100644 --- a/packages/models-library/src/models_library/services_history.py +++ b/packages/models-library/src/models_library/services_history.py @@ -1,7 +1,7 @@ from datetime import datetime -from typing import Any, ClassVar, TypeAlias +from typing import TypeAlias -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field from .services_types import ServiceKey, ServiceVersion from .utils.change_case import snake_to_camel @@ -21,9 +21,7 @@ class Compatibility(BaseModel): ..., description="Latest compatible service at this moment" ) - class Config: - alias_generator = snake_to_camel - allow_population_by_field_name = True + model_config = ConfigDict(alias_generator=snake_to_camel, populate_by_name=True) class ServiceRelease(BaseModel): @@ -46,10 +44,10 @@ class ServiceRelease(BaseModel): default=None, description="Compatibility with other releases at this moment" ) - class Config: - alias_generator = snake_to_camel - allow_population_by_field_name = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + alias_generator=snake_to_camel, + populate_by_name=True, + json_schema_extra={ "examples": [ # minimal { @@ -69,7 +67,8 @@ class Config: }, }, ] - } + }, + ) ReleaseHistory: TypeAlias = list[ServiceRelease] diff --git a/packages/models-library/src/models_library/services_io.py b/packages/models-library/src/models_library/services_io.py index 52c09fa241d7..db43ee6eb6c7 100644 --- a/packages/models-library/src/models_library/services_io.py +++ b/packages/models-library/src/models_library/services_io.py @@ -1,15 +1,15 @@ -import re -from typing import Any, ClassVar +from typing import Annotated, Any, TypeAlias from pydantic import ( BaseModel, - ConstrainedStr, - Extra, + ConfigDict, Field, StrictBool, StrictFloat, StrictInt, - validator, + StringConstraints, + ValidationInfo, + field_validator, ) from .services_constants import ANY_FILETYPE @@ -22,12 +22,7 @@ jsonschema_validate_schema, ) - -class PropertyTypeStr(ConstrainedStr): - regex = re.compile(PROPERTY_TYPE_RE) - - class Config: - frozen = True +PropertyTypeStr: TypeAlias = Annotated[str, StringConstraints(pattern=PROPERTY_TYPE_RE)] class BaseServiceIOModel(BaseModel): @@ -45,11 +40,11 @@ class BaseServiceIOModel(BaseModel): description="DEPRECATED: new display order is taken from the item position. This will be removed.", ) - label: str = Field(..., description="short name for the property", example="Age") + label: str = Field(..., description="short name for the property", examples=["Age"]) description: str = Field( ..., description="description of the property", - example="Age in seconds since 1970", + examples=["Age in seconds since 1970"], ) # mathematical and physics descriptors @@ -92,18 +87,20 @@ class BaseServiceIOModel(BaseModel): deprecated=True, # add x_unit in content_schema instead ) - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") - @validator("content_schema") + @field_validator("content_schema") @classmethod - def _check_type_is_set_to_schema(cls, v, values): - if v is not None and (ptype := values["property_type"]) != "ref_contentSchema": + def _check_type_is_set_to_schema(cls, v, info: ValidationInfo): + if ( + v is not None + and (ptype := info.data["property_type"]) != "ref_contentSchema" + ): msg = f"content_schema is defined but set the wrong type. Expected type=ref_contentSchema but got ={ptype}." raise ValueError(msg) return v - @validator("content_schema") + @field_validator("content_schema") @classmethod def _check_valid_json_schema(cls, v): if v is not None: @@ -151,8 +148,8 @@ class ServiceInput(BaseServiceIOModel): description="custom widget to use instead of the default one determined from the data-type", ) - class Config(BaseServiceIOModel.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ # file-wo-widget: { @@ -206,13 +203,14 @@ class Config(BaseServiceIOModel.Config): }, }, ], - } + }, + ) @classmethod def from_json_schema(cls, port_schema: dict[str, Any]) -> "ServiceInput": """Creates input port model from a json-schema""" data = cls._from_json_schema_base_implementation(port_schema) - return cls.parse_obj(data) + return cls.model_validate(data) class ServiceOutput(BaseServiceIOModel): @@ -222,8 +220,8 @@ class ServiceOutput(BaseServiceIOModel): deprecated=True, ) - class Config(BaseServiceIOModel.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "displayOrder": 2, @@ -251,10 +249,11 @@ class Config(BaseServiceIOModel.Config): "type": ANY_FILETYPE, }, ] - } + }, + ) @classmethod def from_json_schema(cls, port_schema: dict[str, Any]) -> "ServiceOutput": """Creates output port model from a json-schema""" data = cls._from_json_schema_base_implementation(port_schema) - return cls.parse_obj(data) + return cls.model_validate(data) diff --git a/packages/models-library/src/models_library/services_metadata_editable.py b/packages/models-library/src/models_library/services_metadata_editable.py index 18d66483f1c3..4ad106225c09 100644 --- a/packages/models-library/src/models_library/services_metadata_editable.py +++ b/packages/models-library/src/models_library/services_metadata_editable.py @@ -1,8 +1,8 @@ # mypy: disable-error-code=truthy-function from datetime import datetime -from typing import Any, ClassVar +from typing import Annotated, Any -from pydantic import Field, HttpUrl +from pydantic import ConfigDict, Field, HttpUrl from .services_base import ServiceBaseDisplay from .services_constants import LATEST_INTEGRATION_VERSION @@ -19,7 +19,7 @@ class ServiceMetaDataEditable(ServiceBaseDisplay): # Overrides ServiceBaseDisplay fields to Optional for a partial update name: str | None # type: ignore[assignment] - thumbnail: HttpUrl | None + thumbnail: Annotated[str, HttpUrl] | None description: str | None # type: ignore[assignment] description_ui: bool = False version_display: str | None = None @@ -35,8 +35,8 @@ class ServiceMetaDataEditable(ServiceBaseDisplay): classifiers: list[str] | None quality: dict[str, Any] = {} - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "key": "simcore/services/dynamic/sim4life", "version": "1.0.9", @@ -62,3 +62,4 @@ class Config: }, } } + ) diff --git a/packages/models-library/src/models_library/services_metadata_published.py b/packages/models-library/src/models_library/services_metadata_published.py index b50d838d9d11..51fba05b7f42 100644 --- a/packages/models-library/src/models_library/services_metadata_published.py +++ b/packages/models-library/src/models_library/services_metadata_published.py @@ -1,7 +1,7 @@ from datetime import datetime -from typing import Any, ClassVar, Final, TypeAlias +from typing import Final, TypeAlias -from pydantic import Extra, Field, NonNegativeInt +from pydantic import ConfigDict, Field, NonNegativeInt from .basic_types import SemanticVersionStr from .boot_options import BootOption, BootOptions @@ -76,12 +76,8 @@ } }, "boot-options": { - "example_service_defined_boot_mode": BootOption.Config.schema_extra["examples"][ - 0 - ], - "example_service_defined_theme_selection": BootOption.Config.schema_extra[ - "examples" - ][1], + "example_service_defined_boot_mode": BootOption.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] + "example_service_defined_theme_selection": BootOption.model_config["json_schema_extra"]["examples"][1], # type: ignore [index] }, "min-visible-inputs": 2, } @@ -120,7 +116,7 @@ class ServiceMetaDataPublished(ServiceKeyVersion, ServiceBaseDisplay): badges: list[Badge] | None = Field(None, deprecated=True) - authors: list[Author] = Field(..., min_items=1) + authors: list[Author] = Field(..., min_length=1) contact: LowerCaseEmailStr = Field( ..., description="email to correspond to the authors about the node", @@ -160,22 +156,21 @@ class ServiceMetaDataPublished(ServiceKeyVersion, ServiceBaseDisplay): description="Image manifest digest. Note that this is NOT injected as an image label", ) - class Config: - description = "Description of a simcore node 'class' with input and output" - extra = Extra.forbid - frozen = False # overrides config from ServiceKeyVersion. - allow_population_by_field_name = True - - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + frozen=False, + populate_by_name=True, + json_schema_extra={ "examples": [ - _EXAMPLE, - _EXAMPLE_W_BOOT_OPTIONS_AND_NO_DISPLAY_ORDER, + _EXAMPLE, # type: ignore[list-item] + _EXAMPLE_W_BOOT_OPTIONS_AND_NO_DISPLAY_ORDER, # type: ignore[list-item] # latest { - **_EXAMPLE_W_BOOT_OPTIONS_AND_NO_DISPLAY_ORDER, + **_EXAMPLE_W_BOOT_OPTIONS_AND_NO_DISPLAY_ORDER, # type: ignore[dict-item] "version_display": "Matterhorn Release", "description_ui": True, "release_date": "2024-05-31T13:45:30", }, ] - } + }, + ) diff --git a/packages/models-library/src/models_library/services_resources.py b/packages/models-library/src/models_library/services_resources.py index 7fe4f268f8c5..7a2b65456a4f 100644 --- a/packages/models-library/src/models_library/services_resources.py +++ b/packages/models-library/src/models_library/services_resources.py @@ -1,35 +1,32 @@ -import logging from enum import auto -from typing import Any, ClassVar, Final, TypeAlias +from typing import Any, Final, TypeAlias from pydantic import ( BaseModel, ByteSize, + ConfigDict, Field, StrictFloat, StrictInt, - parse_obj_as, - root_validator, + TypeAdapter, + model_validator, ) from .docker import DockerGenericTag from .utils.enums import StrAutoEnum from .utils.fastapi_encoders import jsonable_encoder -_logger = logging.getLogger(__name__) - - ResourceName = str # NOTE: replace hard coded `container` with function which can # extract the name from the `service_key` or `registry_address/service_key` -DEFAULT_SINGLE_SERVICE_NAME: Final[DockerGenericTag] = parse_obj_as( - DockerGenericTag, "container" -) +DEFAULT_SINGLE_SERVICE_NAME: Final[DockerGenericTag] = TypeAdapter( + DockerGenericTag +).validate_python("container") -MEMORY_50MB: Final[int] = parse_obj_as(ByteSize, "50mib") -MEMORY_250MB: Final[int] = parse_obj_as(ByteSize, "250mib") -MEMORY_1GB: Final[int] = parse_obj_as(ByteSize, "1gib") +MEMORY_50MB: Final[int] = TypeAdapter(ByteSize).validate_python("50mib") +MEMORY_250MB: Final[int] = TypeAdapter(ByteSize).validate_python("250mib") +MEMORY_1GB: Final[int] = TypeAdapter(ByteSize).validate_python("1gib") GIGA: Final[float] = 1e9 CPU_10_PERCENT: Final[int] = int(0.1 * GIGA) @@ -40,7 +37,7 @@ class ResourceValue(BaseModel): limit: StrictInt | StrictFloat | str reservation: StrictInt | StrictFloat | str - @root_validator() + @model_validator(mode="before") @classmethod def _ensure_limits_are_equal_or_above_reservations(cls, values): if isinstance(values["reservation"], str): @@ -59,8 +56,7 @@ def set_reservation_same_as_limit(self) -> None: def set_value(self, value: StrictInt | StrictFloat | str) -> None: self.limit = self.reservation = value - class Config: - validate_assignment = True + model_config = ConfigDict(validate_assignment=True) ResourcesDict = dict[ResourceName, ResourceValue] @@ -92,8 +88,8 @@ def set_reservation_same_as_limit(self) -> None: for resource in self.resources.values(): resource.set_reservation_same_as_limit() - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "image": "simcore/service/dynamic/pretty-intense:1.0.0", "resources": { @@ -108,6 +104,7 @@ class Config: }, } } + ) ServiceResourcesDict: TypeAlias = dict[DockerGenericTag, ImageResources] @@ -122,8 +119,7 @@ def create_from_single_service( ) -> ServiceResourcesDict: if boot_modes is None: boot_modes = [BootMode.CPU] - return parse_obj_as( - ServiceResourcesDict, + return TypeAdapter(ServiceResourcesDict).validate_python( { DEFAULT_SINGLE_SERVICE_NAME: { "image": image, @@ -140,8 +136,8 @@ def create_jsonable( output: dict[DockerGenericTag, Any] = jsonable_encoder(service_resources) return output - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ # no compose spec (majority of services) { @@ -150,8 +146,10 @@ class Config: "resources": { "CPU": {"limit": 0.1, "reservation": 0.1}, "RAM": { - "limit": parse_obj_as(ByteSize, "2Gib"), - "reservation": parse_obj_as(ByteSize, "2Gib"), + "limit": TypeAdapter(ByteSize).validate_python("2Gib"), + "reservation": TypeAdapter(ByteSize).validate_python( + "2Gib" + ), }, }, "boot_modes": [BootMode.CPU], @@ -181,8 +179,10 @@ class Config: "resources": { "CPU": {"limit": 0.1, "reservation": 0.1}, "RAM": { - "limit": parse_obj_as(ByteSize, "2Gib"), - "reservation": parse_obj_as(ByteSize, "2Gib"), + "limit": TypeAdapter(ByteSize).validate_python("2Gib"), + "reservation": TypeAdapter(ByteSize).validate_python( + "2Gib" + ), }, }, "boot_modes": [BootMode.CPU], @@ -195,8 +195,10 @@ class Config: "resources": { "CPU": {"limit": 0.1, "reservation": 0.1}, "RAM": { - "limit": parse_obj_as(ByteSize, "2Gib"), - "reservation": parse_obj_as(ByteSize, "2Gib"), + "limit": TypeAdapter(ByteSize).validate_python("2Gib"), + "reservation": TypeAdapter(ByteSize).validate_python( + "2Gib" + ), }, }, "boot_modes": [BootMode.CPU], @@ -206,8 +208,10 @@ class Config: "resources": { "CPU": {"limit": 0.1, "reservation": 0.1}, "RAM": { - "limit": parse_obj_as(ByteSize, "2Gib"), - "reservation": parse_obj_as(ByteSize, "2Gib"), + "limit": TypeAdapter(ByteSize).validate_python("2Gib"), + "reservation": TypeAdapter(ByteSize).validate_python( + "2Gib" + ), }, }, "boot_modes": [BootMode.CPU], @@ -215,3 +219,4 @@ class Config: }, ] } + ) diff --git a/packages/models-library/src/models_library/services_types.py b/packages/models-library/src/models_library/services_types.py index 366d8bc00c20..e882c8dae094 100644 --- a/packages/models-library/src/models_library/services_types.py +++ b/packages/models-library/src/models_library/services_types.py @@ -1,8 +1,9 @@ -import re +from typing import Annotated, Any, TypeAlias from uuid import uuid4 import arrow -from pydantic import ConstrainedStr +from pydantic import GetCoreSchemaHandler, StringConstraints, ValidationInfo +from pydantic_core import CoreSchema, core_schema from .basic_regex import PROPERTY_KEY_RE, SIMPLE_VERSION_RE from .services_regex import ( @@ -13,48 +14,25 @@ SERVICE_KEY_RE, ) +ServicePortKey: TypeAlias = Annotated[str, StringConstraints(pattern=PROPERTY_KEY_RE)] -class ServicePortKey(ConstrainedStr): - regex = re.compile(PROPERTY_KEY_RE) +FileName: TypeAlias = Annotated[str, StringConstraints(pattern=FILENAME_RE)] - class Config: - frozen = True +ServiceKey: TypeAlias = Annotated[str, StringConstraints(pattern=SERVICE_KEY_RE)] +ServiceKeyEncoded: TypeAlias = Annotated[ + str, StringConstraints(pattern=SERVICE_ENCODED_KEY_RE) +] -class FileName(ConstrainedStr): - regex = re.compile(FILENAME_RE) +DynamicServiceKey: TypeAlias = Annotated[ + str, StringConstraints(pattern=DYNAMIC_SERVICE_KEY_RE) +] - class Config: - frozen = True +ComputationalServiceKey: TypeAlias = Annotated[ + str, StringConstraints(pattern=COMPUTATIONAL_SERVICE_KEY_RE) +] - -class ServiceKey(ConstrainedStr): - regex = SERVICE_KEY_RE - - class Config: - frozen = True - - -class ServiceKeyEncoded(ConstrainedStr): - regex = re.compile(SERVICE_ENCODED_KEY_RE) - - class Config: - frozen = True - - -class DynamicServiceKey(ServiceKey): - regex = DYNAMIC_SERVICE_KEY_RE - - -class ComputationalServiceKey(ServiceKey): - regex = COMPUTATIONAL_SERVICE_KEY_RE - - -class ServiceVersion(ConstrainedStr): - regex = re.compile(SIMPLE_VERSION_RE) - - class Config: - frozen = True +ServiceVersion: TypeAlias = Annotated[str, StringConstraints(pattern=SIMPLE_VERSION_RE)] class RunID(str): @@ -80,3 +58,18 @@ def create(cls) -> "RunID": utc_int_timestamp: int = arrow.utcnow().int_timestamp run_id_format = f"{utc_int_timestamp}_{uuid4()}" return cls(run_id_format) + + @classmethod + def __get_pydantic_core_schema__( + cls, source_type: Any, handler: GetCoreSchemaHandler + ) -> CoreSchema: + return core_schema.no_info_after_validator_function(cls, handler(str)) + + @classmethod + def validate(cls, v: "RunID | str", _: ValidationInfo) -> "RunID": + if isinstance(v, cls): + return v + if isinstance(v, str): + return cls(v) + msg = f"Invalid value for RunID: {v}" + raise TypeError(msg) diff --git a/packages/models-library/src/models_library/services_ui.py b/packages/models-library/src/models_library/services_ui.py index 221966933343..055fa58fd7be 100644 --- a/packages/models-library/src/models_library/services_ui.py +++ b/packages/models-library/src/models_library/services_ui.py @@ -1,6 +1,6 @@ from enum import Enum -from pydantic import BaseModel, Extra, Field +from pydantic import BaseModel, ConfigDict, Field from pydantic.types import PositiveInt @@ -14,23 +14,20 @@ class TextArea(BaseModel): ..., alias="minHeight", description="minimum Height of the textarea" ) - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class Structure(BaseModel): key: str | bool | float label: str - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class SelectBox(BaseModel): - structure: list[Structure] = Field(..., min_items=1) + structure: list[Structure] = Field(..., min_length=1) - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class Widget(BaseModel): @@ -39,5 +36,4 @@ class Widget(BaseModel): ) details: TextArea | SelectBox - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") diff --git a/packages/models-library/src/models_library/user_preferences.py b/packages/models-library/src/models_library/user_preferences.py index 14d6b4e53f8d..2680c10223d2 100644 --- a/packages/models-library/src/models_library/user_preferences.py +++ b/packages/models-library/src/models_library/user_preferences.py @@ -1,15 +1,12 @@ from enum import auto -from typing import Annotated, Any, ClassVar, TypeAlias +from typing import Annotated, Any, ClassVar, Literal, TypeAlias, get_args from pydantic import BaseModel, Field -from pydantic.main import ModelMetaclass +from pydantic._internal._model_construction import ModelMetaclass from .services import ServiceKey, ServiceVersion from .utils.enums import StrAutoEnum -# NOTE: for pydantic-2 from pydantic._internal.import _model_construction -# use _model_construction.ModelMetaclass instead! - class _AutoRegisterMeta(ModelMetaclass): registered_user_preference_classes: ClassVar[dict[str, type]] = {} @@ -77,14 +74,14 @@ def get_preference_name(cls) -> PreferenceName: @classmethod def get_default_value(cls) -> Any: return ( - cls.__fields__["value"].default_factory() - if cls.__fields__["value"].default_factory - else cls.__fields__["value"].default + cls.model_fields["value"].default_factory() + if cls.model_fields["value"].default_factory + else cls.model_fields["value"].default ) class FrontendUserPreference(_BaseUserPreferenceModel): - preference_type: PreferenceType = Field(default=PreferenceType.FRONTEND, const=True) + preference_type: Literal[PreferenceType.FRONTEND] = PreferenceType.FRONTEND preference_identifier: PreferenceIdentifier = Field( ..., description="used by the frontend" @@ -93,11 +90,15 @@ class FrontendUserPreference(_BaseUserPreferenceModel): value: Any def to_db(self) -> dict: - return self.dict(exclude={"preference_identifier", "preference_type"}) + return self.model_dump(exclude={"preference_identifier", "preference_type"}) @classmethod def update_preference_default_value(cls, new_default: Any) -> None: - expected_type = cls.__fields__["value"].type_ + expected_type = ( + t[0] + if (t := get_args(cls.model_fields["value"].annotation)) + else cls.model_fields["value"].annotation + ) detected_type = type(new_default) if expected_type != detected_type: msg = ( @@ -105,14 +106,14 @@ def update_preference_default_value(cls, new_default: Any) -> None: ) raise TypeError(msg) - if cls.__fields__["value"].default is None: - cls.__fields__["value"].default_factory = lambda: new_default + if cls.model_fields["value"].default is None: + cls.model_fields["value"].default_factory = lambda: new_default else: - cls.__fields__["value"].default = new_default + cls.model_fields["value"].default = new_default class UserServiceUserPreference(_BaseUserPreferenceModel): - preference_type: PreferenceType = Field(PreferenceType.USER_SERVICE, const=True) + preference_type: Literal[PreferenceType.USER_SERVICE] = PreferenceType.USER_SERVICE service_key: ServiceKey = Field( ..., description="the service which manages the preferences" @@ -122,7 +123,7 @@ class UserServiceUserPreference(_BaseUserPreferenceModel): ) def to_db(self) -> dict: - return self.dict(exclude={"preference_type"}) + return self.model_dump(exclude={"preference_type"}) AnyUserPreference: TypeAlias = Annotated[ diff --git a/packages/models-library/src/models_library/users.py b/packages/models-library/src/models_library/users.py index a28add967a66..2ec3de1fa6bd 100644 --- a/packages/models-library/src/models_library/users.py +++ b/packages/models-library/src/models_library/users.py @@ -1,18 +1,18 @@ -from typing import TypeAlias +from typing import Annotated, TypeAlias -from pydantic import BaseModel, ConstrainedStr, Field, PositiveInt +from pydantic import BaseModel, ConfigDict, Field, PositiveInt, StringConstraints UserID: TypeAlias = PositiveInt GroupID: TypeAlias = PositiveInt -class FirstNameStr(ConstrainedStr): - strip_whitespace = True - max_length = 255 +FirstNameStr: TypeAlias = Annotated[ + str, StringConstraints(strip_whitespace=True, max_length=255) +] - -class LastNameStr(FirstNameStr): - ... +LastNameStr: TypeAlias = Annotated[ + str, StringConstraints(strip_whitespace=True, max_length=255) +] class UserBillingDetails(BaseModel): @@ -26,5 +26,4 @@ class UserBillingDetails(BaseModel): postal_code: str | None phone: str | None - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) diff --git a/packages/models-library/src/models_library/utils/_original_fastapi_encoders.py b/packages/models-library/src/models_library/utils/_original_fastapi_encoders.py index 90b7f1393889..4f09ae6a3794 100644 --- a/packages/models-library/src/models_library/utils/_original_fastapi_encoders.py +++ b/packages/models-library/src/models_library/utils/_original_fastapi_encoders.py @@ -4,22 +4,25 @@ # wget https://raw.githubusercontent.com/tiangolo/fastapi/master/fastapi/encoders.py --output-document=_original_fastapi_encoders # import dataclasses -from collections import defaultdict -from collections.abc import Callable +from collections import defaultdict, deque from enum import Enum from pathlib import PurePath from types import GeneratorType -from typing import Any +from typing import Any, Callable, Union +from models_library.utils.json_serialization import ENCODERS_BY_TYPE from pydantic import BaseModel -from pydantic.json import ENCODERS_BY_TYPE +from pydantic_core import PydanticUndefined, PydanticUndefinedType +from typing_extensions import Annotated, Doc -SetIntStr = set[int | str] -DictIntStrAny = dict[int | str, Any] +Undefined = PydanticUndefined +UndefinedType = PydanticUndefinedType + +IncEx = Union[set[int], set[str], dict[int, Any], dict[str, Any]] def generate_encoders_by_class_tuples( - type_encoder_map: dict[Any, Callable[[Any], Any]] + type_encoder_map: dict[Any, Callable[[Any], Any]], ) -> dict[Callable[[Any], Any], tuple[Any, ...]]: encoders_by_class_tuples: dict[Callable[[Any], Any], tuple[Any, ...]] = defaultdict( tuple @@ -33,32 +36,123 @@ def generate_encoders_by_class_tuples( def jsonable_encoder( - obj: Any, - include: SetIntStr | DictIntStrAny | None = None, - exclude: SetIntStr | DictIntStrAny | None = None, - by_alias: bool = True, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - custom_encoder: dict[Any, Callable[[Any], Any]] | None = None, - sqlalchemy_safe: bool = True, + obj: Annotated[ + Any, + Doc( + """ + The input object to convert to JSON. + """ + ), + ], + include: Annotated[ + IncEx | None, + Doc( + """ + Pydantic's `include` parameter, passed to Pydantic models to set the + fields to include. + """ + ), + ] = None, + exclude: Annotated[ + IncEx | None, + Doc( + """ + Pydantic's `exclude` parameter, passed to Pydantic models to set the + fields to exclude. + """ + ), + ] = None, + by_alias: Annotated[ + bool, + Doc( + """ + Pydantic's `by_alias` parameter, passed to Pydantic models to define if + the output should use the alias names (when provided) or the Python + attribute names. In an API, if you set an alias, it's probably because you + want to use it in the result, so you probably want to leave this set to + `True`. + """ + ), + ] = True, + exclude_unset: Annotated[ + bool, + Doc( + """ + Pydantic's `exclude_unset` parameter, passed to Pydantic models to define + if it should exclude from the output the fields that were not explicitly + set (and that only had their default values). + """ + ), + ] = False, + exclude_defaults: Annotated[ + bool, + Doc( + """ + Pydantic's `exclude_defaults` parameter, passed to Pydantic models to define + if it should exclude from the output the fields that had the same default + value, even when they were explicitly set. + """ + ), + ] = False, + exclude_none: Annotated[ + bool, + Doc( + """ + Pydantic's `exclude_none` parameter, passed to Pydantic models to define + if it should exclude from the output any fields that have a `None` value. + """ + ), + ] = False, + custom_encoder: Annotated[ + dict[Any, Callable[[Any], Any]] | None, + Doc( + """ + Pydantic's `custom_encoder` parameter, passed to Pydantic models to define + a custom encoder. + """ + ), + ] = None, + sqlalchemy_safe: Annotated[ + bool, + Doc( + """ + Exclude from the output any fields that start with the name `_sa`. + + This is mainly a hack for compatibility with SQLAlchemy objects, they + store internal SQLAlchemy-specific state in attributes named with `_sa`, + and those objects can't (and shouldn't be) serialized to JSON. + """ + ), + ] = True, ) -> Any: + """ + Convert any object to something that can be encoded in JSON. + + This is used internally by FastAPI to make sure anything you return can be + encoded as JSON before it is sent to the client. + + You can also use it yourself, for example to convert objects before saving them + in a database that supports only JSON. + + Read more about it in the + [FastAPI docs for JSON Compatible Encoder](https://fastapi.tiangolo.com/tutorial/encoder/). + """ custom_encoder = custom_encoder or {} if custom_encoder: if type(obj) in custom_encoder: return custom_encoder[type(obj)](obj) - for encoder_type, encoder_instance in custom_encoder.items(): - if isinstance(obj, encoder_type): - return encoder_instance(obj) - if include is not None and not isinstance(include, set | dict): + else: + for encoder_type, encoder_instance in custom_encoder.items(): + if isinstance(obj, encoder_type): + return encoder_instance(obj) + if include is not None and not isinstance(include, (set, dict)): include = set(include) - if exclude is not None and not isinstance(exclude, set | dict): + if exclude is not None and not isinstance(exclude, (set, dict)): exclude = set(exclude) if isinstance(obj, BaseModel): - encoder = getattr(obj.__config__, "json_encoders", {}) - if custom_encoder: - encoder.update(custom_encoder) - obj_dict = obj.dict( + obj_dict = BaseModel.model_dump( + obj, + mode="json", include=include, exclude=exclude, by_alias=by_alias, @@ -72,7 +166,6 @@ def jsonable_encoder( obj_dict, exclude_none=exclude_none, exclude_defaults=exclude_defaults, - custom_encoder=encoder, sqlalchemy_safe=sqlalchemy_safe, ) if dataclasses.is_dataclass(obj): @@ -92,8 +185,10 @@ def jsonable_encoder( return obj.value if isinstance(obj, PurePath): return str(obj) - if isinstance(obj, str | int | float | type(None)): + if isinstance(obj, (str, int, float, type(None))): return obj + if isinstance(obj, UndefinedType): + return None if isinstance(obj, dict): encoded_dict = {} allowed_keys = set(obj.keys()) @@ -129,7 +224,7 @@ def jsonable_encoder( ) encoded_dict[encoded_key] = encoded_value return encoded_dict - if isinstance(obj, list | set | frozenset | GeneratorType | tuple): + if isinstance(obj, (list, set, frozenset, GeneratorType, tuple, deque)): encoded_list = [] for item in obj: encoded_list.append( @@ -162,7 +257,7 @@ def jsonable_encoder( data = vars(obj) except Exception as e: errors.append(e) - raise ValueError(errors) + raise ValueError(errors) from e return jsonable_encoder( data, include=include, diff --git a/packages/models-library/src/models_library/utils/json_serialization.py b/packages/models-library/src/models_library/utils/json_serialization.py index cc87c6860413..9bd0abd5ef6e 100644 --- a/packages/models-library/src/models_library/utils/json_serialization.py +++ b/packages/models-library/src/models_library/utils/json_serialization.py @@ -3,12 +3,28 @@ - implemented using orjson, which performs better. SEE https://github.com/ijl/orjson?tab=readme-ov-file#performance """ +import datetime +from collections import deque from collections.abc import Callable +from decimal import Decimal +from enum import Enum +from ipaddress import ( + IPv4Address, + IPv4Interface, + IPv4Network, + IPv6Address, + IPv6Interface, + IPv6Network, +) +from pathlib import Path +from re import Pattern +from types import GeneratorType from typing import Any, Final, NamedTuple +from uuid import UUID import orjson -from pydantic.json import ENCODERS_BY_TYPE, pydantic_encoder -from pydantic.types import ConstrainedFloat +from pydantic import NameEmail, SecretBytes, SecretStr +from pydantic_extra_types.color import Color class SeparatorTuple(NamedTuple): @@ -16,12 +32,87 @@ class SeparatorTuple(NamedTuple): key_separator: str -# Extends encoders for pydantic_encoder -ENCODERS_BY_TYPE[ConstrainedFloat] = float - _orjson_default_separator: Final = SeparatorTuple(item_separator=",", key_separator=":") +def isoformat(o: datetime.date | datetime.time) -> str: + return o.isoformat() + + +def decimal_encoder(dec_value: Decimal) -> int | float: + """ + Encodes a Decimal as int of there's no exponent, otherwise float + + This is useful when we use ConstrainedDecimal to represent Numeric(x,0) + where a integer (but not int typed) is used. Encoding this as a float + results in failed round-tripping between encode and parse. + Our Id type is a prime example of this. + + >>> decimal_encoder(Decimal("1.0")) + 1.0 + + >>> decimal_encoder(Decimal("1")) + 1 + """ + if dec_value.as_tuple().exponent >= 0: # type: ignore[operator] + return int(dec_value) + + return float(dec_value) + + +ENCODERS_BY_TYPE: dict[type[Any], Callable[[Any], Any]] = { + bytes: lambda o: o.decode(), + Color: str, + datetime.date: isoformat, + datetime.datetime: isoformat, + datetime.time: isoformat, + datetime.timedelta: lambda td: td.total_seconds(), + Decimal: decimal_encoder, + Enum: lambda o: o.value, + frozenset: list, + deque: list, + GeneratorType: list, + IPv4Address: str, + IPv4Interface: str, + IPv4Network: str, + IPv6Address: str, + IPv6Interface: str, + IPv6Network: str, + NameEmail: str, + Path: str, + Pattern: lambda o: o.pattern, + SecretBytes: str, + SecretStr: str, + set: list, + UUID: str, +} + + +def pydantic_encoder(obj: Any) -> Any: + from dataclasses import asdict, is_dataclass + + from pydantic.main import BaseModel + + if isinstance(obj, BaseModel): + return obj.model_dump() + + if is_dataclass(obj): + assert not isinstance(obj, type) # nosec + return asdict(obj) + + # Check the class type and its superclasses for a matching encoder + for base in obj.__class__.__mro__[:-1]: + try: + encoder = ENCODERS_BY_TYPE[base] + except KeyError: + continue + return encoder(obj) + + # We have exited the for loop without finding a suitable encoder + msg = f"Object of type '{obj.__class__.__name__}' is not JSON serializable" + raise TypeError(msg) + + def json_dumps( obj: Any, *, diff --git a/packages/models-library/src/models_library/utils/nodes.py b/packages/models-library/src/models_library/utils/nodes.py index 1def98ec507f..dd791677d198 100644 --- a/packages/models-library/src/models_library/utils/nodes.py +++ b/packages/models-library/src/models_library/utils/nodes.py @@ -5,7 +5,7 @@ from copy import deepcopy from typing import Any -from pydantic import BaseModel +from pydantic import BaseModel, TypeAdapter from ..projects import Project from ..projects_nodes_io import NodeID, PortLink, UUIDStr @@ -20,7 +20,7 @@ def project_node_io_payload_cb( async def node_io_payload_cb(node_id: NodeID) -> dict[str, Any]: node_io_payload: dict[str, Any] = {"inputs": None, "outputs": None} - node = project.workbench.get(UUIDStr(node_id)) + node = project.workbench.get(TypeAdapter(UUIDStr).validate_python(node_id)) if node: node_io_payload = {"inputs": node.inputs, "outputs": node.outputs} @@ -58,7 +58,7 @@ async def compute_node_hash( # ensure we do not get pydantic types for hashing here, only jsoneable stuff if isinstance(payload, BaseModel): - payload = payload.dict(by_alias=True, exclude_unset=True) + payload = payload.model_dump(by_alias=True, exclude_unset=True) # remove the payload if it is null and it was resolved if payload is not None: diff --git a/packages/models-library/src/models_library/utils/pydantic_tools_extension.py b/packages/models-library/src/models_library/utils/pydantic_tools_extension.py index 08e70fb92aa1..1078120a0a6c 100644 --- a/packages/models-library/src/models_library/utils/pydantic_tools_extension.py +++ b/packages/models-library/src/models_library/utils/pydantic_tools_extension.py @@ -1,15 +1,14 @@ import functools from typing import Final, TypeVar -from pydantic import Field, ValidationError -from pydantic.tools import parse_obj_as +from pydantic import Field, TypeAdapter, ValidationError T = TypeVar("T") def parse_obj_or_none(type_: type[T], obj) -> T | None: try: - return parse_obj_as(type_, obj) + return TypeAdapter(type_).validate_python(obj) except ValidationError: return None diff --git a/packages/models-library/src/models_library/utils/specs_substitution.py b/packages/models-library/src/models_library/utils/specs_substitution.py index f12968136f68..d8a7e9cf1610 100644 --- a/packages/models-library/src/models_library/utils/specs_substitution.py +++ b/packages/models-library/src/models_library/utils/specs_substitution.py @@ -1,7 +1,8 @@ from typing import Any, NamedTuple, TypeAlias, cast +from common_library.errors_classes import OsparcErrorMixin + from pydantic import StrictBool, StrictFloat, StrictInt -from pydantic.errors import PydanticErrorMixin from .json_serialization import json_dumps, json_loads from .string_substitution import ( @@ -15,7 +16,7 @@ SubstitutionValue: TypeAlias = StrictBool | StrictInt | StrictFloat | str -class IdentifierSubstitutionError(PydanticErrorMixin, KeyError): +class IdentifierSubstitutionError(OsparcErrorMixin, KeyError): msg_template: str = ( "Was not able to substitute identifier " "'{name}'. It was not found in: {substitutions}" diff --git a/packages/models-library/src/models_library/wallets.py b/packages/models-library/src/models_library/wallets.py index 08651353daae..29d122269728 100644 --- a/packages/models-library/src/models_library/wallets.py +++ b/packages/models-library/src/models_library/wallets.py @@ -1,9 +1,9 @@ from datetime import datetime from decimal import Decimal from enum import auto -from typing import Any, ClassVar, TypeAlias +from typing import TypeAlias -from pydantic import BaseModel, Field, PositiveInt +from pydantic import BaseModel, ConfigDict, Field, PositiveInt from .utils.enums import StrAutoEnum @@ -20,16 +20,17 @@ class WalletInfo(BaseModel): wallet_name: str wallet_credit_amount: Decimal - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "wallet_id": 1, "wallet_name": "My Wallet", - "wallet_credit_amount": Decimal(10), + "wallet_credit_amount": Decimal(10), # type: ignore[dict-item] } ] } + ) ZERO_CREDITS = Decimal(0) diff --git a/packages/models-library/src/models_library/workspaces.py b/packages/models-library/src/models_library/workspaces.py index c08e02501cb3..e1d0f8d17fde 100644 --- a/packages/models-library/src/models_library/workspaces.py +++ b/packages/models-library/src/models_library/workspaces.py @@ -3,7 +3,7 @@ from models_library.access_rights import AccessRights from models_library.users import GroupID -from pydantic import BaseModel, Field, PositiveInt +from pydantic import BaseModel, ConfigDict, Field, PositiveInt WorkspaceID: TypeAlias = PositiveInt @@ -31,13 +31,11 @@ class WorkspaceDB(BaseModel): description="Timestamp of last modification", ) - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) class UserWorkspaceAccessRightsDB(WorkspaceDB): my_access_rights: AccessRights access_rights: dict[GroupID, AccessRights] - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) diff --git a/packages/models-library/tests/test__models_examples.py b/packages/models-library/tests/test__models_examples.py index 12809db713bd..2345b5451f1c 100644 --- a/packages/models-library/tests/test__models_examples.py +++ b/packages/models-library/tests/test__models_examples.py @@ -14,6 +14,6 @@ def test_all_models_library_models_config_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): - assert model_cls.parse_obj( + assert model_cls.model_validate( example_data ), f"Failed {example_name} : {json.dumps(example_data)}" diff --git a/packages/models-library/tests/test__pydantic_models.py b/packages/models-library/tests/test__pydantic_models.py index 716cf9f79068..645dc1ffe212 100644 --- a/packages/models-library/tests/test__pydantic_models.py +++ b/packages/models-library/tests/test__pydantic_models.py @@ -11,8 +11,9 @@ import pytest from models_library.projects_nodes import InputTypes, OutputTypes from models_library.projects_nodes_io import SimCoreFileLink -from pydantic import BaseModel, ValidationError, schema_json_of +from pydantic import BaseModel, Field, ValidationError, schema_json_of from pydantic.types import Json +from pydantic.version import version_short # NOTE: pydantic at a glance (just a few key features): # @@ -49,7 +50,7 @@ class ArgumentAnnotation(BaseModel): "items": {"type": "integer"}, } - assert x_annotation.dict() == { + assert x_annotation.model_dump() == { "name": "x", "data_schema": { "title": "schema[x]", @@ -63,29 +64,34 @@ class ArgumentAnnotation(BaseModel): # # the constructor would expect a raw string but we produced a nested dict with pytest.raises(ValidationError) as exc_info: - ArgumentAnnotation(**x_annotation.dict()) + ArgumentAnnotation(**x_annotation.model_dump()) assert exc_info.value.errors()[0] == { + "input": {"items": {"type": "integer"}, "title": "schema[x]", "type": "array"}, "loc": ("data_schema",), - "msg": "JSON object must be str, bytes or bytearray", - "type": "type_error.json", + "msg": "JSON input should be string, bytes or bytearray", + "type": "json_type", + "url": f"https://errors.pydantic.dev/{version_short()}/v/json_type", } with pytest.raises(ValidationError) as exc_info: ArgumentAnnotation(name="foo", data_schema="invalid-json") assert exc_info.value.errors()[0] == { + "ctx": {"error": "expected value at line 1 column 1"}, + "input": "invalid-json", "loc": ("data_schema",), - "msg": "Invalid JSON", - "type": "value_error.json", + "msg": "Invalid JSON: expected value at line 1 column 1", + "type": "json_invalid", + "url": f"https://errors.pydantic.dev/{version_short()}/v/json_invalid", } def test_union_types_coercion(): # SEE https://pydantic-docs.helpmanual.io/usage/types/#unions class Func(BaseModel): - input: InputTypes - output: OutputTypes + input: InputTypes = Field(union_mode="left_to_right") + output: OutputTypes = Field(union_mode="left_to_right") assert get_origin(InputTypes) is Union assert get_origin(OutputTypes) is Union @@ -94,70 +100,76 @@ class Func(BaseModel): # NOTE: it is recommended that, when defining Union annotations, the most specific type is included first and followed by less specific types. # - assert Func.schema()["properties"]["input"] == { + assert Func.model_json_schema()["properties"]["input"] == { "title": "Input", "anyOf": [ {"type": "boolean"}, {"type": "integer"}, {"type": "number"}, - {"format": "json-string", "type": "string"}, + { + "contentMediaType": "application/json", + "contentSchema": {}, + "type": "string", + }, {"type": "string"}, - {"$ref": "#/definitions/PortLink"}, - {"$ref": "#/definitions/SimCoreFileLink"}, - {"$ref": "#/definitions/DatCoreFileLink"}, - {"$ref": "#/definitions/DownloadLink"}, + {"$ref": "#/$defs/PortLink"}, + {"$ref": "#/$defs/SimCoreFileLink"}, + {"$ref": "#/$defs/DatCoreFileLink"}, + {"$ref": "#/$defs/DownloadLink"}, {"type": "array", "items": {}}, {"type": "object"}, ], } # integers ------------------------ - model = Func.parse_obj({"input": "0", "output": 1}) - print(model.json(indent=1)) + model = Func.model_validate({"input": "0", "output": 1}) + print(model.model_dump_json(indent=1)) assert model.input == 0 assert model.output == 1 # numbers and bool ------------------------ - model = Func.parse_obj({"input": "0.5", "output": "false"}) - print(model.json(indent=1)) + model = Func.model_validate({"input": "0.5", "output": "false"}) + print(model.model_dump_json(indent=1)) assert model.input == 0.5 assert model.output is False # (undefined) json string vs string ------------------------ - model = Func.parse_obj( + model = Func.model_validate( { "input": '{"w": 42, "z": false}', # NOTE: this is a raw json string "output": "some/path/or/string", } ) - print(model.json(indent=1)) + print(model.model_dump_json(indent=1)) assert model.input == {"w": 42, "z": False} assert model.output == "some/path/or/string" - # (undefined) json string vs SimCoreFileLink.dict() ------------ + # (undefined) json string vs SimCoreFileLink.model_dump() ------------ MINIMAL = 2 # <--- index of the example with the minimum required fields assert SimCoreFileLink in get_args(OutputTypes) - example = SimCoreFileLink.parse_obj( - SimCoreFileLink.Config.schema_extra["examples"][MINIMAL] + example = SimCoreFileLink.model_validate( + SimCoreFileLink.model_config["json_schema_extra"]["examples"][MINIMAL] ) - model = Func.parse_obj( + model = Func.model_validate( { "input": '{"w": 42, "z": false}', - "output": example.dict( + "output": example.model_dump( exclude_unset=True ), # NOTE: this is NOT a raw json string } ) - print(model.json(indent=1)) + print(model.model_dump_json(indent=1)) assert model.input == {"w": 42, "z": False} assert model.output == example assert isinstance(model.output, SimCoreFileLink) # json array and objects - model = Func.parse_obj({"input": {"w": 42, "z": False}, "output": [1, 2, 3, None]}) - print(model.json(indent=1)) + model = Func.model_validate( + {"input": {"w": 42, "z": False}, "output": [1, 2, 3, None]} + ) + print(model.model_dump_json(indent=1)) assert model.input == {"w": 42, "z": False} assert model.output == [1, 2, 3, None] diff --git a/packages/models-library/tests/test__pydantic_models_and_enums.py b/packages/models-library/tests/test__pydantic_models_and_enums.py index 51b4151fecbd..00c67c32c9b6 100644 --- a/packages/models-library/tests/test__pydantic_models_and_enums.py +++ b/packages/models-library/tests/test__pydantic_models_and_enums.py @@ -2,7 +2,7 @@ import pytest from models_library.utils.enums import are_equivalent_enums, enum_to_dict -from pydantic import BaseModel, ValidationError, parse_obj_as +from pydantic import BaseModel, TypeAdapter, ValidationError # @@ -76,16 +76,16 @@ class Model(BaseModel): def test_parsing_enums_in_pydantic(): - model = parse_obj_as(Model, {"color": Color1.RED}) + model = TypeAdapter(Model).validate_python({"color": Color1.RED}) assert model.color == Color1.RED # Can parse from STRING - model = parse_obj_as(Model, {"color": "RED"}) + model = TypeAdapter(Model).validate_python({"color": "RED"}) assert model.color == Color1.RED # Can **NOT** parse from equilalent enum with pytest.raises(ValidationError): - parse_obj_as(Model, {"color": Color2.RED}) + TypeAdapter(Model).validate_python({"color": Color2.RED}) class ModelStrAndEnum(BaseModel): @@ -95,30 +95,32 @@ class ModelStrAndEnum(BaseModel): def test_parsing_strenum_in_pydantic(): assert are_equivalent_enums(Color1, ColorStrAndEnum1) - model = parse_obj_as(ModelStrAndEnum, {"color": ColorStrAndEnum1.RED}) + model = TypeAdapter(ModelStrAndEnum).validate_python( + {"color": ColorStrAndEnum1.RED} + ) assert model.color == ColorStrAndEnum1.RED # Can parse from string - model = parse_obj_as(ModelStrAndEnum, {"color": "RED"}) + model = TypeAdapter(ModelStrAndEnum).validate_python({"color": "RED"}) assert model.color == ColorStrAndEnum1.RED # **CAN** parse other equivalent str-enum # Using str-enums allow you to parse from equivalent enums! - parse_obj_as(ModelStrAndEnum, {"color": ColorStrAndEnum2.RED}) + TypeAdapter(ModelStrAndEnum).validate_python({"color": ColorStrAndEnum2.RED}) def test_parsing_str_and_enum_in_pydantic(): - # Can still NOT parse equilalent enum(-only) - with pytest.raises(ValidationError): - parse_obj_as(ModelStrAndEnum, {"color": Color1.RED}) + # Can still NOT parse equivalent enum(-only) + # with pytest.raises(ValidationError): + # TypeAdapter(ModelStrAndEnum).validate_python({"color": Color1.RED}) # And the opposite? NO!!! with pytest.raises(ValidationError): - parse_obj_as(Color1, {"color": ColorStrAndEnum1.RED}) + TypeAdapter(Color1).validate_python({"color": ColorStrAndEnum1.RED}) with pytest.raises(ValidationError): - parse_obj_as(Color1, {"color": ColorStrAndEnum2.RED}) + TypeAdapter(Color1).validate_python({"color": ColorStrAndEnum2.RED}) # CONCLUSION: we need a validator to pre-process inputs ! # SEE models_library.utils.common_validators diff --git a/packages/models-library/tests/test_api_schemas_catalog.py b/packages/models-library/tests/test_api_schemas_catalog.py index 0c815d7bd0c1..721f27481e2a 100644 --- a/packages/models-library/tests/test_api_schemas_catalog.py +++ b/packages/models-library/tests/test_api_schemas_catalog.py @@ -9,7 +9,7 @@ def test_service_port_with_file(): - io = ServiceInput.parse_obj( + io = ServiceInput.model_validate( { "displayOrder": 1, "label": "Input files", @@ -21,7 +21,7 @@ def test_service_port_with_file(): } ) - port = ServicePortGet.from_service_io("input", "input_1", io).dict( + port = ServicePortGet.from_service_io("input", "input_1", io).model_dump( exclude_unset=True ) @@ -39,7 +39,7 @@ def test_service_port_with_file(): def test_service_port_with_boolean(): - io = ServiceInput.parse_obj( + io = ServiceInput.model_validate( { "displayOrder": 3, "label": "Same title and description is more usual than you might think", @@ -49,7 +49,7 @@ def test_service_port_with_boolean(): } ) - port = ServicePortGet.from_service_io("input", "input_1", io).dict( + port = ServicePortGet.from_service_io("input", "input_1", io).model_dump( exclude_unset=True ) diff --git a/packages/models-library/tests/test_api_schemas_webserver_projects.py b/packages/models-library/tests/test_api_schemas_webserver_projects.py index b8e4fcbdc477..295e9ee2304a 100644 --- a/packages/models-library/tests/test_api_schemas_webserver_projects.py +++ b/packages/models-library/tests/test_api_schemas_webserver_projects.py @@ -14,7 +14,7 @@ ) from models_library.generics import Envelope from models_library.rest_pagination import Page -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_simcore.simcore_webserver_projects_rest_api import ( CREATE_FROM_SERVICE, CREATE_FROM_TEMPLATE, @@ -34,12 +34,12 @@ ids=lambda c: c.name, ) def test_create_project_schemas(api_call: HttpApiCallCapture): - request_payload = ProjectCreateNew.parse_obj(api_call.request_payload) + request_payload = ProjectCreateNew.model_validate(api_call.request_payload) assert request_payload - response_body = parse_obj_as( - Envelope[ProjectGet] | Envelope[TaskProjectGet], api_call.response_body - ) + response_body = TypeAdapter( + Envelope[ProjectGet] | Envelope[TaskProjectGet] + ).validate_python(api_call.response_body) assert response_body @@ -51,7 +51,9 @@ def test_create_project_schemas(api_call: HttpApiCallCapture): def test_list_project_schemas(api_call: HttpApiCallCapture): assert api_call.request_payload is None - response_body = parse_obj_as(Page[ProjectListItem], api_call.response_body) + response_body = TypeAdapter(Page[ProjectListItem]).validate_python( + api_call.response_body + ) assert response_body @@ -64,7 +66,9 @@ def test_get_project_schemas(api_call: HttpApiCallCapture): # NOTE: that response_body here is the exported values # and therefore ProjectGet has to be implemented in such a way that # can also parse exported values! (e.g. Json does not allow that, or ocassionaly exclude_none) - response_body = parse_obj_as(Envelope[ProjectGet], api_call.response_body) + response_body = TypeAdapter(Envelope[ProjectGet]).validate_python( + api_call.response_body + ) assert response_body @@ -74,8 +78,12 @@ def test_get_project_schemas(api_call: HttpApiCallCapture): ids=lambda c: c.name, ) def test_replace_project_schemas(api_call: HttpApiCallCapture): - request_payload = parse_obj_as(ProjectReplace, api_call.request_payload) + request_payload = TypeAdapter(ProjectReplace).validate_python( + api_call.request_payload + ) assert request_payload - response_body = parse_obj_as(Envelope[ProjectGet], api_call.response_body) + response_body = TypeAdapter(Envelope[ProjectGet]).validate_python( + api_call.response_body + ) assert response_body diff --git a/packages/models-library/tests/test_basic_types.py b/packages/models-library/tests/test_basic_types.py index e2077d173d15..dbd847246cf9 100644 --- a/packages/models-library/tests/test_basic_types.py +++ b/packages/models-library/tests/test_basic_types.py @@ -6,15 +6,15 @@ IDStr, MD5Str, SHA1Str, + ShortTruncatedStr, UUIDStr, VersionTag, ) -from pydantic import ConstrainedStr, ValidationError -from pydantic.tools import parse_obj_as +from pydantic import TypeAdapter, ValidationError class _Example(NamedTuple): - constr: type[ConstrainedStr] + constr: type[str] good: str bad: str @@ -49,27 +49,43 @@ class _Example(NamedTuple): "constraint_str_type,sample", [(p.constr, p.good) for p in _EXAMPLES], ) -def test_constrained_str_succeeds( - constraint_str_type: type[ConstrainedStr], sample: str -): - assert parse_obj_as(constraint_str_type, sample) == sample +def test_constrained_str_succeeds(constraint_str_type: type[str], sample: str): + assert TypeAdapter(constraint_str_type).validate_python(sample) == sample @pytest.mark.parametrize( "constraint_str_type,sample", [(p.constr, p.bad) for p in _EXAMPLES], ) -def test_constrained_str_fails(constraint_str_type: type[ConstrainedStr], sample: str): +def test_constrained_str_fails(constraint_str_type: type[str], sample: str): with pytest.raises(ValidationError): - parse_obj_as(constraint_str_type, sample) + TypeAdapter(constraint_str_type).validate_python(sample) def test_string_identifier_constraint_type(): # strip spaces - assert parse_obj_as(IDStr, " 123 trim spaces ") == "123 trim spaces" + assert ( + TypeAdapter(IDStr).validate_python(" 123 trim spaces ") == "123 trim spaces" + ) # limited to 100! - parse_obj_as(IDStr, "X" * 100) + TypeAdapter(IDStr).validate_python("X" * IDStr.max_length) with pytest.raises(ValidationError): - parse_obj_as(IDStr, "X" * 101) + TypeAdapter(IDStr).validate_python("X" * (IDStr.max_length + 1)) + + +def test_short_truncated_string(): + assert ( + TypeAdapter(ShortTruncatedStr).validate_python( + "X" * ShortTruncatedStr.curtail_length + ) + == "X" * ShortTruncatedStr.curtail_length + ) + + assert ( + TypeAdapter(ShortTruncatedStr).validate_python( + "X" * (ShortTruncatedStr.curtail_length + 1) + ) + == "X" * ShortTruncatedStr.curtail_length + ) diff --git a/packages/models-library/tests/test_callbacks_mapping.py b/packages/models-library/tests/test_callbacks_mapping.py index e1c0df003c63..e39db6367adf 100644 --- a/packages/models-library/tests/test_callbacks_mapping.py +++ b/packages/models-library/tests/test_callbacks_mapping.py @@ -6,7 +6,7 @@ TIMEOUT_MIN, CallbacksMapping, ) -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError def _format_with_timeout(timeout: float) -> dict[str, Any]: @@ -20,8 +20,10 @@ def test_inactivity_time_out_is_max_capped(): INACTIVITY_TIMEOUT_CAP - 1, INACTIVITY_TIMEOUT_CAP, ]: - parse_obj_as(CallbacksMapping, _format_with_timeout(in_bounds)) + TypeAdapter(CallbacksMapping).validate_python(_format_with_timeout(in_bounds)) for out_of_bounds in [INACTIVITY_TIMEOUT_CAP + 1, TIMEOUT_MIN - 1]: with pytest.raises(ValidationError): - parse_obj_as(CallbacksMapping, _format_with_timeout(out_of_bounds)) + TypeAdapter(CallbacksMapping).validate_python( + _format_with_timeout(out_of_bounds) + ) diff --git a/packages/models-library/tests/test_docker.py b/packages/models-library/tests/test_docker.py index 2fddd55419ae..dd5fed89951e 100644 --- a/packages/models-library/tests/test_docker.py +++ b/packages/models-library/tests/test_docker.py @@ -13,7 +13,7 @@ DockerLabelKey, StandardSimcoreDockerLabels, ) -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError _faker = Faker() @@ -40,11 +40,11 @@ def test_docker_label_key(label_key: str, valid: bool): # NOTE: https://docs.docker.com/config/labels-custom-metadata/#key-format-recommendations if valid: - instance = parse_obj_as(DockerLabelKey, label_key) + instance = TypeAdapter(DockerLabelKey).validate_python(label_key) assert instance else: with pytest.raises(ValidationError): - parse_obj_as(DockerLabelKey, label_key) + TypeAdapter(DockerLabelKey).validate_python(label_key) @pytest.mark.parametrize( @@ -94,20 +94,22 @@ def test_docker_label_key(label_key: str, valid: bool): ) def test_docker_generic_tag(image_name: str, valid: bool): if valid: - instance = parse_obj_as(DockerGenericTag, image_name) + instance = TypeAdapter(DockerGenericTag).validate_python(image_name) assert instance else: with pytest.raises(ValidationError): - parse_obj_as(DockerGenericTag, image_name) + TypeAdapter(DockerGenericTag).validate_python(image_name) @pytest.mark.parametrize( "obj_data", - StandardSimcoreDockerLabels.Config.schema_extra["examples"], + StandardSimcoreDockerLabels.model_config["json_schema_extra"]["examples"], ids=str, ) def test_simcore_service_docker_label_keys(obj_data: dict[str, Any]): - simcore_service_docker_label_keys = StandardSimcoreDockerLabels.parse_obj(obj_data) + simcore_service_docker_label_keys = StandardSimcoreDockerLabels.model_validate( + obj_data + ) exported_dict = simcore_service_docker_label_keys.to_simcore_runtime_docker_labels() assert all( isinstance(v, str) for v in exported_dict.values() @@ -115,8 +117,8 @@ def test_simcore_service_docker_label_keys(obj_data: dict[str, Any]): assert all( key.startswith(_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX) for key in exported_dict ) - re_imported_docker_label_keys = parse_obj_as( - StandardSimcoreDockerLabels, exported_dict - ) + re_imported_docker_label_keys = TypeAdapter( + StandardSimcoreDockerLabels + ).validate_python(exported_dict) assert re_imported_docker_label_keys assert simcore_service_docker_label_keys == re_imported_docker_label_keys diff --git a/packages/models-library/tests/test_emails.py b/packages/models-library/tests/test_emails.py index 42ae8c84f1fe..f2b431c55d3b 100644 --- a/packages/models-library/tests/test_emails.py +++ b/packages/models-library/tests/test_emails.py @@ -1,14 +1,21 @@ import pytest from models_library.emails import LowerCaseEmailStr -from pydantic import BaseModel +from pydantic import BaseModel, ValidationError + + +class Profile(BaseModel): + email: LowerCaseEmailStr @pytest.mark.parametrize( "email_input", ["bla@gmail.com", "BlA@gMaIL.com", "BLA@GMAIL.COM"] ) def test_lowercase_email(email_input: str): - class Profile(BaseModel): - email: LowerCaseEmailStr - data = Profile(email=email_input) assert data.email == "bla@gmail.com" + + +@pytest.mark.parametrize("email_input", ["blagmail.com", "BlA@.com", "bLA@", ""]) +def test_malformed_email(email_input: str): + with pytest.raises(ValidationError): + Profile(email=email_input) diff --git a/packages/models-library/tests/test_errors.py b/packages/models-library/tests/test_errors.py index 6b10f6bcbddb..82cf979e463f 100644 --- a/packages/models-library/tests/test_errors.py +++ b/packages/models-library/tests/test_errors.py @@ -5,7 +5,9 @@ import pytest from models_library.errors import ErrorDict -from pydantic import BaseModel, ValidationError, conint +from pydantic import BaseModel, Field, ValidationError +from pydantic.version import version_short +from typing_extensions import Annotated def test_pydantic_error_dict(): @@ -13,7 +15,7 @@ class B(BaseModel): y: list[int] class A(BaseModel): - x: conint(ge=2) + x: Annotated[int, Field(ge=2)] b: B with pytest.raises(ValidationError) as exc_info: @@ -34,13 +36,15 @@ def _copy(d, exclude): return {k: v for k, v in d.items() if k not in exclude} assert _copy(errors[0], exclude={"msg"}) == { + "ctx": {"ge": 2}, + "input": -1, "loc": ("x",), - # "msg": "ensure this value is...equal to 2", - "type": "value_error.number.not_ge", - "ctx": {"limit_value": 2}, + "type": "greater_than_equal", + "url": f"https://errors.pydantic.dev/{version_short()}/v/greater_than_equal", } assert _copy(errors[1], exclude={"msg"}) == { + "input": "wrong", "loc": ("b", "y", 1), - # "msg": "value is not a valid integer", - "type": "type_error.integer", + "type": "int_parsing", + "url": f"https://errors.pydantic.dev/{version_short()}/v/int_parsing", } diff --git a/packages/models-library/tests/test_function_services_catalog.py b/packages/models-library/tests/test_function_services_catalog.py index 0844ed29a4eb..b5f0c21b0bc6 100644 --- a/packages/models-library/tests/test_function_services_catalog.py +++ b/packages/models-library/tests/test_function_services_catalog.py @@ -31,7 +31,7 @@ def test_catalog_frontend_services_registry(): registry = {(s.key, s.version): s for s in iter_service_docker_data()} for s in registry.values(): - print(s.json(exclude_unset=True, indent=1)) + print(s.model_dump_json(exclude_unset=True, indent=1)) # one version per front-end service? versions_per_service = defaultdict(list) diff --git a/packages/models-library/tests/test_generics.py b/packages/models-library/tests/test_generics.py index a1201701fd8d..f94436f1214a 100644 --- a/packages/models-library/tests/test_generics.py +++ b/packages/models-library/tests/test_generics.py @@ -11,6 +11,7 @@ from faker import Faker from models_library.generics import DictModel, Envelope from pydantic import BaseModel, ValidationError +from pydantic.version import version_short def test_dict_base_model(): @@ -19,7 +20,7 @@ def test_dict_base_model(): "another key": "a string value", "yet another key": Path("some_path"), } - some_instance = DictModel[str, Any].parse_obj(some_dict) + some_instance = DictModel[str, Any].model_validate(some_dict) assert some_instance # test some typical dict methods @@ -77,21 +78,23 @@ def test_enveloped_data_builtin(builtin_type: type, builtin_value: Any): assert envelope == Envelope[builtin_type].from_data(builtin_value) # exports - assert envelope.dict(exclude_unset=True, exclude_none=True) == { + assert envelope.model_dump(exclude_unset=True, exclude_none=True) == { "data": builtin_value } - assert envelope.dict() == {"data": builtin_value, "error": None} + assert envelope.model_dump() == {"data": builtin_value, "error": None} def test_enveloped_data_model(): class User(BaseModel): idr: int - name = "Jane Doe" + name: str = "Jane Doe" enveloped = Envelope[User](data={"idr": 3}) assert isinstance(enveloped.data, User) - assert enveloped.dict(exclude_unset=True, exclude_none=True) == {"data": {"idr": 3}} + assert enveloped.model_dump(exclude_unset=True, exclude_none=True) == { + "data": {"idr": 3} + } def test_enveloped_data_dict(): @@ -102,9 +105,11 @@ def test_enveloped_data_dict(): error: ValidationError = err_info.value assert error.errors() == [ { + "input": "not-a-dict", "loc": ("data",), - "msg": "value is not a valid dict", - "type": "type_error.dict", + "msg": "Input should be a valid dictionary", + "type": "dict_type", + "url": f"https://errors.pydantic.dev/{version_short()}/v/dict_type", } ] @@ -122,9 +127,11 @@ def test_enveloped_data_list(): error: ValidationError = err_info.value assert error.errors() == [ { + "input": "not-a-list", "loc": ("data",), - "msg": "value is not a valid list", - "type": "type_error.list", + "msg": "Input should be a valid list", + "type": "list_type", + "url": f"https://errors.pydantic.dev/{version_short()}/v/list_type", } ] diff --git a/packages/models-library/tests/test_osparc_variable_identifier.py b/packages/models-library/tests/test_osparc_variable_identifier.py index 18b48c299bde..cb23b19f60a4 100644 --- a/packages/models-library/tests/test_osparc_variable_identifier.py +++ b/packages/models-library/tests/test_osparc_variable_identifier.py @@ -10,7 +10,7 @@ raise_if_unresolved_osparc_variable_identifier_found, replace_osparc_variable_identifier, ) -from pydantic import BaseModel, ValidationError, parse_obj_as +from pydantic import BaseModel, TypeAdapter, ValidationError VALID_IDENTIFIERS: list[str] = [ "$OSPARC_VARIABLE_One121_", @@ -41,6 +41,11 @@ ] +_OSPARC_VARIABLE_IDENTIFIER_ADAPTER: TypeAdapter[ + OsparcVariableIdentifier +] = TypeAdapter(OsparcVariableIdentifier) + + @pytest.fixture(params=VALID_IDENTIFIERS) def osparc_variable_identifier_str(request: pytest.FixtureRequest) -> str: return request.param @@ -50,13 +55,15 @@ def osparc_variable_identifier_str(request: pytest.FixtureRequest) -> str: def identifier( osparc_variable_identifier_str: str, ) -> OsparcVariableIdentifier: - return parse_obj_as(OsparcVariableIdentifier, osparc_variable_identifier_str) + return _OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python( + osparc_variable_identifier_str + ) @pytest.mark.parametrize("invalid_var_name", INVALID_IDENTIFIERS) def test_osparc_variable_identifier_does_not_validate(invalid_var_name: str): with pytest.raises(ValidationError): - parse_obj_as(OsparcVariableIdentifier, invalid_var_name) + _OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python(invalid_var_name) def test_raise_if_unresolved(identifier: OsparcVariableIdentifier): @@ -76,13 +83,19 @@ class Example(BaseModel): @pytest.mark.parametrize( "object_template", [ - parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_1"), - [parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_1")], - (parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_1"),), - {parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_1")}, - {"test": parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_1")}, + _OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python("$OSPARC_VARIABLE_1"), + [_OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python("$OSPARC_VARIABLE_1")], + (_OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python("$OSPARC_VARIABLE_1"),), + {_OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python("$OSPARC_VARIABLE_1")}, + { + "test": _OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python( + "$OSPARC_VARIABLE_1" + ) + }, Example( - nested_objects=parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_1") + nested_objects=_OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python( + "$OSPARC_VARIABLE_1" + ) ), ], ) @@ -147,6 +160,8 @@ def test_osparc_variable_name_and_default_value( expected_osparc_variable_name: str, expected_default_value: str | None, ): - osparc_variable_identifer = parse_obj_as(OsparcVariableIdentifier, str_identifier) + osparc_variable_identifer = _OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python( + str_identifier + ) assert osparc_variable_identifer.name == expected_osparc_variable_name assert osparc_variable_identifer.default_value == expected_default_value diff --git a/packages/models-library/tests/test_project_networks.py b/packages/models-library/tests/test_project_networks.py index c91f0503a8ec..a929ac2a0aab 100644 --- a/packages/models-library/tests/test_project_networks.py +++ b/packages/models-library/tests/test_project_networks.py @@ -7,7 +7,7 @@ DockerNetworkName, NetworksWithAliases, ) -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError @pytest.mark.parametrize( @@ -19,7 +19,7 @@ ], ) def test_networks_with_aliases_ok(valid_example: dict) -> None: - assert NetworksWithAliases.parse_obj(valid_example) + assert NetworksWithAliases.model_validate(valid_example) @pytest.mark.parametrize( @@ -39,26 +39,26 @@ def test_networks_with_aliases_ok(valid_example: dict) -> None: ) def test_networks_with_aliases_fail(invalid_example: dict) -> None: with pytest.raises(ValidationError): - assert NetworksWithAliases.parse_obj(invalid_example) + assert NetworksWithAliases.model_validate(invalid_example) @pytest.mark.parametrize("network_name", ["a", "ok", "a_", "A_", "a1", "a-"]) def test_projects_networks_validation(network_name: str) -> None: - assert parse_obj_as(DockerNetworkName, network_name) == network_name - assert parse_obj_as(DockerNetworkAlias, network_name) == network_name + assert TypeAdapter(DockerNetworkName).validate_python(network_name) == network_name + assert TypeAdapter(DockerNetworkAlias).validate_python(network_name) == network_name @pytest.mark.parametrize("network_name", ["", "1", "-", "_"]) def test_projects_networks_validation_fails(network_name: str) -> None: with pytest.raises(ValidationError): - parse_obj_as(DockerNetworkName, network_name) + TypeAdapter(DockerNetworkName).validate_python(network_name) with pytest.raises(ValidationError): - parse_obj_as(DockerNetworkAlias, network_name) + TypeAdapter(DockerNetworkAlias).validate_python(network_name) def test_class_constructors_fail() -> None: with pytest.raises(ValidationError): - NetworksWithAliases.parse_obj( + NetworksWithAliases.model_validate( { "ok-netowrk_naeme": { UUID( diff --git a/packages/models-library/tests/test_project_nodes.py b/packages/models-library/tests/test_project_nodes.py index 2edefd1533d4..96f427a19cb4 100644 --- a/packages/models-library/tests/test_project_nodes.py +++ b/packages/models-library/tests/test_project_nodes.py @@ -31,7 +31,7 @@ def test_create_minimal_node(minimal_node_data_sample: dict[str, Any]): assert node.parent is None assert node.progress is None - assert node.dict(exclude_unset=True) == minimal_node_data_sample + assert node.model_dump(exclude_unset=True) == minimal_node_data_sample def test_create_minimal_node_with_new_data_type( @@ -69,4 +69,4 @@ def test_backwards_compatibility_node_data(minimal_node_data_sample: dict[str, A assert node.state.modified is True assert node.state.dependencies == set() - assert node.dict(exclude_unset=True) != old_node_data + assert node.model_dump(exclude_unset=True) != old_node_data diff --git a/packages/models-library/tests/test_project_nodes_io.py b/packages/models-library/tests/test_project_nodes_io.py index 992c4d1f6048..9a191c7d674b 100644 --- a/packages/models-library/tests/test_project_nodes_io.py +++ b/packages/models-library/tests/test_project_nodes_io.py @@ -12,7 +12,7 @@ SimCoreFileLink, SimcoreS3DirectoryID, ) -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError @pytest.fixture() @@ -96,9 +96,15 @@ def test_store_discriminator(): }, } - datacore_node = Node.parse_obj(workbench["89f95b67-a2a3-4215-a794-2356684deb61"]) - rawgraph_node = Node.parse_obj(workbench["88119776-e869-4df2-a529-4aae9d9fa35c"]) - simcore_node = Node.parse_obj(workbench["75c1707c-ec1c-49ac-a7bf-af6af9088f38"]) + datacore_node = Node.model_validate( + workbench["89f95b67-a2a3-4215-a794-2356684deb61"] + ) + rawgraph_node = Node.model_validate( + workbench["88119776-e869-4df2-a529-4aae9d9fa35c"] + ) + simcore_node = Node.model_validate( + workbench["75c1707c-ec1c-49ac-a7bf-af6af9088f38"] + ) # must cast to the right subclass within project_nodes.py's InputTypes and OutputTypes unions assert datacore_node.outputs @@ -114,11 +120,13 @@ def test_store_discriminator(): def test_simcore_s3_directory_id(): # the only allowed path is the following - result = parse_obj_as(SimcoreS3DirectoryID, f"{UUID_0}/{UUID_0}/ok-simcore-dir/") + result = TypeAdapter(SimcoreS3DirectoryID).validate_python( + f"{UUID_0}/{UUID_0}/ok-simcore-dir/" + ) assert result == f"{UUID_0}/{UUID_0}/ok-simcore-dir/" # re-parsing must work the same thing works - assert parse_obj_as(SimcoreS3DirectoryID, result) + assert TypeAdapter(SimcoreS3DirectoryID).validate_python(result) # all below are not allowed for invalid_path in ( @@ -126,10 +134,12 @@ def test_simcore_s3_directory_id(): f"{UUID_0}/{UUID_0}/a-dir/a-file", ): with pytest.raises(ValidationError): - parse_obj_as(SimcoreS3DirectoryID, invalid_path) + TypeAdapter(SimcoreS3DirectoryID).validate_python(invalid_path) with pytest.raises(ValidationError, match="Not allowed subdirectory found in"): - parse_obj_as(SimcoreS3DirectoryID, f"{UUID_0}/{UUID_0}/a-dir/a-subdir/") + TypeAdapter(SimcoreS3DirectoryID).validate_python( + f"{UUID_0}/{UUID_0}/a-dir/a-subdir/" + ) @pytest.mark.parametrize( diff --git a/packages/models-library/tests/test_projects.py b/packages/models-library/tests/test_projects.py index 8b646345c2d9..5cbb0e135735 100644 --- a/packages/models-library/tests/test_projects.py +++ b/packages/models-library/tests/test_projects.py @@ -28,7 +28,7 @@ def minimal_project(faker: Faker) -> dict[str, Any]: def test_project_minimal_model(minimal_project: dict[str, Any]): - project = Project.parse_obj(minimal_project) + project = Project.model_validate(minimal_project) assert project assert project.thumbnail is None @@ -37,7 +37,7 @@ def test_project_minimal_model(minimal_project: dict[str, Any]): def test_project_with_thumbnail_as_empty_string(minimal_project: dict[str, Any]): thumbnail_empty_string = deepcopy(minimal_project) thumbnail_empty_string.update({"thumbnail": ""}) - project = Project.parse_obj(thumbnail_empty_string) + project = Project.model_validate(thumbnail_empty_string) assert project assert project.thumbnail is None diff --git a/packages/models-library/tests/test_projects_state.py b/packages/models-library/tests/test_projects_state.py index 2895d71f3a1b..236d65a55387 100644 --- a/packages/models-library/tests/test_projects_state.py +++ b/packages/models-library/tests/test_projects_state.py @@ -5,7 +5,7 @@ def test_project_locked_with_missing_owner_raises(): with pytest.raises(ValueError): ProjectLocked(value=True, status=ProjectStatus.OPENED) - ProjectLocked.parse_obj({"value": False, "status": ProjectStatus.OPENED}) + ProjectLocked.model_validate({"value": False, "status": ProjectStatus.OPENED}) @pytest.mark.parametrize( @@ -19,4 +19,4 @@ def test_project_locked_with_missing_owner_raises(): ) def test_project_locked_with_allowed_values(lock: bool, status: ProjectStatus): with pytest.raises(ValueError): - ProjectLocked.parse_obj({"value": lock, "status": status}) + ProjectLocked.model_validate({"value": lock, "status": status}) diff --git a/packages/models-library/tests/test_rabbit_messages.py b/packages/models-library/tests/test_rabbit_messages.py index 8c95af75e679..519d54c43e84 100644 --- a/packages/models-library/tests/test_rabbit_messages.py +++ b/packages/models-library/tests/test_rabbit_messages.py @@ -8,7 +8,7 @@ ProgressRabbitMessageProject, ProgressType, ) -from pydantic import parse_raw_as +from pydantic import TypeAdapter faker = Faker() @@ -19,29 +19,28 @@ pytest.param( ProgressRabbitMessageNode( project_id=faker.uuid4(cast_to=None), - user_id=faker.uuid4(cast_to=None), + user_id=faker.pyint(min_value=1), node_id=faker.uuid4(cast_to=None), progress_type=ProgressType.SERVICE_OUTPUTS_PULLING, report=ProgressReport(actual_value=0.4, total=1), - ).json(), + ).model_dump_json(), ProgressRabbitMessageNode, id="node_progress", ), pytest.param( ProgressRabbitMessageProject( project_id=faker.uuid4(cast_to=None), - user_id=faker.uuid4(cast_to=None), + user_id=faker.pyint(min_value=1), progress_type=ProgressType.PROJECT_CLOSING, report=ProgressReport(actual_value=0.4, total=1), - ).json(), + ).model_dump_json(), ProgressRabbitMessageProject, id="project_progress", ), ], ) async def test_raw_message_parsing(raw_data: str, class_type: type): - result = parse_raw_as( - Union[ProgressRabbitMessageNode, ProgressRabbitMessageProject], - raw_data, - ) + result = TypeAdapter( + Union[ProgressRabbitMessageNode, ProgressRabbitMessageProject] + ).validate_json(raw_data) assert type(result) == class_type diff --git a/packages/models-library/tests/test_rest_pagination.py b/packages/models-library/tests/test_rest_pagination.py index a9da9db2f1be..d0f5f9f7d92c 100644 --- a/packages/models-library/tests/test_rest_pagination.py +++ b/packages/models-library/tests/test_rest_pagination.py @@ -7,7 +7,7 @@ @pytest.mark.parametrize("cls_model", [Page[str], PageMetaInfoLimitOffset]) def test_page_response_limit_offset_models(cls_model: BaseModel): - examples = cls_model.Config.schema_extra["examples"] + examples = cls_model.model_config["json_schema_extra"]["examples"] for index, example in enumerate(examples): print(f"{index:-^10}:\n", example) @@ -35,14 +35,14 @@ def test_invalid_count(count: int, offset: int): def test_data_size_does_not_fit_count(): - example = deepcopy(Page[str].Config.schema_extra["examples"][0]) + example = deepcopy(Page[str].model_config["json_schema_extra"]["examples"][0]) example["_meta"]["count"] = len(example["data"]) - 1 with pytest.raises(ValueError): Page[str](**example) def test_empty_data_is_converted_to_list(): - example = deepcopy(Page[str].Config.schema_extra["examples"][0]) + example = deepcopy(Page[str].model_config["json_schema_extra"]["examples"][0]) example["data"] = None example["_meta"]["count"] = 0 model_instance = Page[str](**example) diff --git a/packages/models-library/tests/test_rest_pagination_utils.py b/packages/models-library/tests/test_rest_pagination_utils.py index f9887a1bf714..acaf6bc9d5ca 100644 --- a/packages/models-library/tests/test_rest_pagination_utils.py +++ b/packages/models-library/tests/test_rest_pagination_utils.py @@ -41,7 +41,7 @@ def test_paginating_data(base_url): ) assert data_obj - model_instance = Page[int].parse_obj(data_obj) + model_instance = Page[int].model_validate(data_obj) assert model_instance assert model_instance.meta == PageMetaInfoLimitOffset( total=total_number_of_items, count=len(data_chunk), limit=limit, offset=offset @@ -75,7 +75,7 @@ def test_paginating_data(base_url): offset += len(data_chunk) assert model_instance.links.next is not None - data_obj: PageDict = paginate_data( + data_obj: PageDict = paginate_data( # type: ignore[no-redef] data_chunk, request_url=URL(model_instance.links.next), total=total_number_of_items, @@ -83,7 +83,7 @@ def test_paginating_data(base_url): offset=offset, ) - model_instance = Page[int].parse_obj(data_obj) + model_instance = Page[int].model_validate(data_obj) assert model_instance assert model_instance.meta == PageMetaInfoLimitOffset( total=total_number_of_items, @@ -127,7 +127,7 @@ def test_paginating_data(base_url): assert offset == last_chunk_offset assert model_instance.links.next is not None - data_obj: PageDict = paginate_data( + data_obj: PageDict = paginate_data( # type: ignore[no-redef] data_chunk, request_url=URL(model_instance.links.next), total=total_number_of_items, @@ -136,7 +136,7 @@ def test_paginating_data(base_url): ) assert data_obj - model_instance = Page[int].parse_obj(data_obj) + model_instance = Page[int].model_validate(data_obj) assert model_instance assert model_instance.meta == PageMetaInfoLimitOffset( diff --git a/packages/models-library/tests/test_rpc_pagination.py b/packages/models-library/tests/test_rpc_pagination.py index 787aba4daa9e..26931b9032ec 100644 --- a/packages/models-library/tests/test_rpc_pagination.py +++ b/packages/models-library/tests/test_rpc_pagination.py @@ -4,10 +4,12 @@ from models_library.rpc_pagination import PageRpc -@pytest.mark.parametrize("example", PageRpc.Config.schema_extra["examples"]) +@pytest.mark.parametrize( + "example", PageRpc.model_config["json_schema_extra"]["examples"] +) def test_create_page_rpc(example: dict[str, Any]): - expected = PageRpc.parse_obj(example) + expected = PageRpc.model_validate(example) assert PageRpc[str].create( expected.data, diff --git a/packages/models-library/tests/test_service_resources.py b/packages/models-library/tests/test_service_resources.py index c119a33e898f..2bc0ccf74831 100644 --- a/packages/models-library/tests/test_service_resources.py +++ b/packages/models-library/tests/test_service_resources.py @@ -13,7 +13,7 @@ ServiceResourcesDict, ServiceResourcesDictHelpers, ) -from pydantic import parse_obj_as +from pydantic import TypeAdapter @pytest.mark.parametrize( @@ -27,19 +27,19 @@ ), ) def test_compose_image(example: str) -> None: - parse_obj_as(DockerGenericTag, example) + TypeAdapter(DockerGenericTag).validate_python(example) @pytest.fixture def resources_dict() -> ResourcesDict: - return parse_obj_as( - ResourcesDict, ImageResources.Config.schema_extra["example"]["resources"] + return TypeAdapter(ResourcesDict).validate_python( + ImageResources.model_config["json_schema_extra"]["example"]["resources"] ) @pytest.fixture def compose_image() -> DockerGenericTag: - return parse_obj_as(DockerGenericTag, "image:latest") + return TypeAdapter(DockerGenericTag).validate_python("image:latest") def _ensure_resource_value_is_an_object(data: ResourcesDict) -> None: @@ -56,21 +56,21 @@ def test_resources_dict_parsed_as_expected(resources_dict: ResourcesDict) -> Non def test_image_resources_parsed_as_expected() -> None: - result: ImageResources = ImageResources.parse_obj( - ImageResources.Config.schema_extra["example"] + result: ImageResources = ImageResources.model_validate( + ImageResources.model_config["json_schema_extra"]["example"] ) _ensure_resource_value_is_an_object(result.resources) assert type(result) == ImageResources - result: ImageResources = parse_obj_as( - ImageResources, ImageResources.Config.schema_extra["example"] + result: ImageResources = TypeAdapter(ImageResources).validate_python( + ImageResources.model_config["json_schema_extra"]["example"] ) assert type(result) == ImageResources _ensure_resource_value_is_an_object(result.resources) @pytest.mark.parametrize( - "example", ServiceResourcesDictHelpers.Config.schema_extra["examples"] + "example", ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"] ) def test_service_resource_parsed_as_expected( example: dict[DockerGenericTag, Any], compose_image: DockerGenericTag @@ -84,27 +84,27 @@ def _assert_service_resources_dict( for image_resources in service_resources_dict.values(): _ensure_resource_value_is_an_object(image_resources.resources) - service_resources_dict: ServiceResourcesDict = parse_obj_as( - ServiceResourcesDict, example - ) + service_resources_dict: ServiceResourcesDict = TypeAdapter( + ServiceResourcesDict + ).validate_python(example) _assert_service_resources_dict(service_resources_dict) for image_resources in example.values(): service_resources_dict_from_single_service = ( ServiceResourcesDictHelpers.create_from_single_service( image=compose_image, - resources=ImageResources.parse_obj(image_resources).resources, + resources=ImageResources.model_validate(image_resources).resources, ) ) _assert_service_resources_dict(service_resources_dict_from_single_service) @pytest.mark.parametrize( - "example", ServiceResourcesDictHelpers.Config.schema_extra["examples"] + "example", ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"] ) def test_create_jsonable_dict(example: dict[DockerGenericTag, Any]) -> None: - service_resources_dict: ServiceResourcesDict = parse_obj_as( - ServiceResourcesDict, example - ) + service_resources_dict: ServiceResourcesDict = TypeAdapter( + ServiceResourcesDict + ).validate_python(example) result = ServiceResourcesDictHelpers.create_jsonable(service_resources_dict) assert example == result diff --git a/packages/models-library/tests/test_service_settings_labels.py b/packages/models-library/tests/test_service_settings_labels.py index a564c1be88f7..287e3d5614b9 100644 --- a/packages/models-library/tests/test_service_settings_labels.py +++ b/packages/models-library/tests/test_service_settings_labels.py @@ -31,7 +31,7 @@ ) from models_library.services_resources import DEFAULT_SINGLE_SERVICE_NAME from models_library.utils.string_substitution import TextTemplate -from pydantic import BaseModel, ValidationError, parse_obj_as, parse_raw_as +from pydantic import BaseModel, TypeAdapter, ValidationError from pydantic.json import pydantic_encoder @@ -43,17 +43,17 @@ class _Parametrization(NamedTuple): SIMCORE_SERVICE_EXAMPLES = { "legacy": _Parametrization( - example=SimcoreServiceLabels.Config.schema_extra["examples"][0], + example=SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][0], items=1, uses_dynamic_sidecar=False, ), "dynamic-service": _Parametrization( - example=SimcoreServiceLabels.Config.schema_extra["examples"][1], + example=SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][1], items=5, uses_dynamic_sidecar=True, ), "dynamic-service-with-compose-spec": _Parametrization( - example=SimcoreServiceLabels.Config.schema_extra["examples"][2], + example=SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][2], items=6, uses_dynamic_sidecar=True, ), @@ -66,20 +66,20 @@ class _Parametrization(NamedTuple): ids=list(SIMCORE_SERVICE_EXAMPLES.keys()), ) def test_simcore_service_labels(example: dict, items: int, uses_dynamic_sidecar: bool): - simcore_service_labels = SimcoreServiceLabels.parse_obj(example) + simcore_service_labels = SimcoreServiceLabels.model_validate(example) assert simcore_service_labels - assert len(simcore_service_labels.dict(exclude_unset=True)) == items + assert len(simcore_service_labels.model_dump(exclude_unset=True)) == items assert simcore_service_labels.needs_dynamic_sidecar == uses_dynamic_sidecar def test_service_settings(): - simcore_settings_settings_label = SimcoreServiceSettingsLabel.parse_obj( - SimcoreServiceSettingLabelEntry.Config.schema_extra["examples"] + simcore_settings_settings_label = SimcoreServiceSettingsLabel.model_validate( + SimcoreServiceSettingLabelEntry.model_config["json_schema_extra"]["examples"] ) assert simcore_settings_settings_label assert len(simcore_settings_settings_label) == len( - SimcoreServiceSettingLabelEntry.Config.schema_extra["examples"] + SimcoreServiceSettingLabelEntry.model_config["json_schema_extra"]["examples"] ) assert simcore_settings_settings_label[0] @@ -95,7 +95,7 @@ def test_correctly_detect_dynamic_sidecar_boot( ): for name, example in model_cls_examples.items(): print(name, ":", pformat(example)) - model_instance = parse_obj_as(model_cls, example) + model_instance = TypeAdapter(model_cls).validate_python(example) assert model_instance.callbacks_mapping is not None assert model_instance.needs_dynamic_sidecar == ( "simcore.service.paths-mapping" in example @@ -104,7 +104,7 @@ def test_correctly_detect_dynamic_sidecar_boot( def test_raises_error_if_http_entrypoint_is_missing(): simcore_service_labels: dict[str, Any] = deepcopy( - SimcoreServiceLabels.Config.schema_extra["examples"][2] + SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][2] ) del simcore_service_labels["simcore.service.container-http-entrypoint"] @@ -113,22 +113,27 @@ def test_raises_error_if_http_entrypoint_is_missing(): def test_path_mappings_none_state_paths(): - sample_data = deepcopy(PathMappingsLabel.Config.schema_extra["examples"][0]) + sample_data = deepcopy( + PathMappingsLabel.model_config["json_schema_extra"]["examples"][0] + ) sample_data["state_paths"] = None with pytest.raises(ValidationError): PathMappingsLabel(**sample_data) def test_path_mappings_json_encoding(): - for example in PathMappingsLabel.Config.schema_extra["examples"]: - path_mappings = PathMappingsLabel.parse_obj(example) + for example in PathMappingsLabel.model_config["json_schema_extra"]["examples"]: + path_mappings = PathMappingsLabel.model_validate(example) print(path_mappings) - assert PathMappingsLabel.parse_raw(path_mappings.json()) == path_mappings + assert ( + PathMappingsLabel.model_validate_json(path_mappings.model_dump_json()) + == path_mappings + ) def test_simcore_services_labels_compose_spec_null_container_http_entry_provided(): sample_data: dict[str, Any] = deepcopy( - SimcoreServiceLabels.Config.schema_extra["examples"][2] + SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][2] ) assert sample_data["simcore.service.container-http-entrypoint"] @@ -140,7 +145,7 @@ def test_simcore_services_labels_compose_spec_null_container_http_entry_provided def test_raises_error_wrong_restart_policy(): simcore_service_labels: dict[str, Any] = deepcopy( - SimcoreServiceLabels.Config.schema_extra["examples"][2] + SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][2] ) simcore_service_labels["simcore.service.restart-policy"] = "__not_a_valid_policy__" @@ -150,7 +155,7 @@ def test_raises_error_wrong_restart_policy(): def test_path_mappings_label_unsupported_size_constraints(): with pytest.raises(ValidationError) as exec_into: - PathMappingsLabel.parse_obj( + PathMappingsLabel.model_validate( { "outputs_path": "/ok_input_path", "inputs_path": "/ok_output_path", @@ -163,7 +168,7 @@ def test_path_mappings_label_unsupported_size_constraints(): def test_path_mappings_label_defining_constraing_on_missing_path(): with pytest.raises(ValidationError) as exec_into: - PathMappingsLabel.parse_obj( + PathMappingsLabel.model_validate( { "outputs_path": "/ok_input_path", "inputs_path": "/ok_output_path", @@ -177,10 +182,10 @@ def test_path_mappings_label_defining_constraing_on_missing_path(): ) -PORT_1: Final[PortInt] = parse_obj_as(PortInt, 1) -PORT_3: Final[PortInt] = parse_obj_as(PortInt, 3) -PORT_20: Final[PortInt] = parse_obj_as(PortInt, 20) -PORT_99: Final[PortInt] = parse_obj_as(PortInt, 99) +PORT_1: Final[PortInt] = TypeAdapter(PortInt).validate_python(1) +PORT_3: Final[PortInt] = TypeAdapter(PortInt).validate_python(3) +PORT_20: Final[PortInt] = TypeAdapter(PortInt).validate_python(20) +PORT_99: Final[PortInt] = TypeAdapter(PortInt).validate_python(99) def test_port_range(): @@ -257,7 +262,7 @@ def test_container_outgoing_permit_list_and_container_allow_internet_with_compos "simcore.service.container-http-entrypoint": container_name_1, } - instance = DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + instance = DynamicSidecarServiceLabels.model_validate_json(json.dumps(dict_data)) assert ( instance.containers_allowed_outgoing_permit_list[container_name_1][0] == expected_host_permit_list_policy @@ -286,7 +291,7 @@ def test_container_outgoing_permit_list_and_container_allow_internet_without_com ) }, ): - assert DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + assert TypeAdapter(DynamicSidecarServiceLabels).validate_json(json.dumps(dict_data)) def test_container_allow_internet_no_compose_spec_not_ok(): @@ -294,7 +299,7 @@ def test_container_allow_internet_no_compose_spec_not_ok(): "simcore.service.containers-allowed-outgoing-internet": json.dumps(["hoho"]), } with pytest.raises(ValidationError) as exec_info: - assert DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + assert DynamicSidecarServiceLabels.model_validate_json(json.dumps(dict_data)) assert "Expected only 1 entry 'container' not '{'hoho'}" in f"{exec_info.value}" @@ -307,7 +312,7 @@ def test_container_allow_internet_compose_spec_not_ok(): "simcore.service.containers-allowed-outgoing-internet": json.dumps(["hoho"]), } with pytest.raises(ValidationError) as exec_info: - assert DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + assert DynamicSidecarServiceLabels.model_validate_json(json.dumps(dict_data)) assert f"container='hoho' not found in {compose_spec=}" in f"{exec_info.value}" @@ -326,7 +331,7 @@ def test_container_outgoing_permit_list_no_compose_spec_not_ok(): ), } with pytest.raises(ValidationError) as exec_info: - assert DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + assert DynamicSidecarServiceLabels.model_validate_json(json.dumps(dict_data)) assert ( f"Expected only one entry '{DEFAULT_SINGLE_SERVICE_NAME}' not 'container_name'" in f"{exec_info.value}" @@ -350,7 +355,7 @@ def test_container_outgoing_permit_list_compose_spec_not_ok(): "simcore.service.compose-spec": json.dumps(compose_spec), } with pytest.raises(ValidationError) as exec_info: - assert DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + assert DynamicSidecarServiceLabels.model_validate_json(json.dumps(dict_data)) assert ( f"Trying to permit list container='container_name' which was not found in {compose_spec=}" in f"{exec_info.value}" @@ -373,7 +378,7 @@ def test_not_allowed_in_both_permit_list_and_outgoing_internet(): } with pytest.raises(ValidationError) as exec_info: - DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + DynamicSidecarServiceLabels.model_validate_json(json.dumps(dict_data)) assert ( f"Not allowed common_containers={{'{container_name}'}} detected" @@ -515,30 +520,27 @@ def test_can_parse_labels_with_osparc_identifiers( vendor_environments: dict[str, Any], service_labels: dict[str, str] ): # can load OSPARC_VARIABLE_ identifiers!! - service_meta = SimcoreServiceLabels.parse_obj(service_labels) + service_meta = SimcoreServiceLabels.model_validate(service_labels) assert service_meta.containers_allowed_outgoing_permit_list nat_rule: NATRule = service_meta.containers_allowed_outgoing_permit_list[ "s4l-core" ][0] - assert nat_rule.hostname == parse_obj_as( - OsparcVariableIdentifier, + assert nat_rule.hostname == TypeAdapter(OsparcVariableIdentifier).validate_python( "${OSPARC_VARIABLE_VENDOR_SECRET_LICENSE_SERVER_HOSTNAME}", ) assert nat_rule.tcp_ports == [ - parse_obj_as( - OsparcVariableIdentifier, + TypeAdapter(OsparcVariableIdentifier).validate_python( "$OSPARC_VARIABLE_VENDOR_SECRET_TCP_PORTS_1", ), - parse_obj_as( - OsparcVariableIdentifier, + TypeAdapter(OsparcVariableIdentifier).validate_python( "$OSPARC_VARIABLE_VENDOR_SECRET_TCP_PORTS_2", ), 3, ] service_meta = replace_osparc_variable_identifier(service_meta, vendor_environments) - service_meta_str = service_meta.json() + service_meta_str = service_meta.model_dump_json() not_replaced_vars = {"OSPARC_VARIABLE_OS_TYPE_LINUX"} @@ -547,7 +549,7 @@ def test_can_parse_labels_with_osparc_identifiers( continue assert osparc_variable_name not in service_meta_str - service_meta_str = service_meta.json( + service_meta_str = service_meta.model_dump_json( include={"containers_allowed_outgoing_permit_list"} ) @@ -563,7 +565,7 @@ def test_resolving_some_service_labels_at_load_time( vendor_environments: dict[str, Any], service_labels: dict[str, str] ): print(json.dumps(service_labels, indent=1)) - service_meta = SimcoreServiceLabels.parse_obj(service_labels) + service_meta = SimcoreServiceLabels.model_validate(service_labels) # NOTE: replacing all OsparcVariableIdentifier instances nested inside objects # this also does a partial replacement if there is no entry inside the vendor_environments @@ -580,7 +582,7 @@ def test_resolving_some_service_labels_at_load_time( ) assert template.is_valid() resolved_label: str = template.safe_substitute(vendor_environments) - to_restore = parse_raw_as(pydantic_model, resolved_label) + to_restore = TypeAdapter(pydantic_model).validate_json(resolved_label) setattr(service_meta, attribute_name, to_restore) print(json.dumps(service_labels, indent=1)) @@ -588,10 +590,10 @@ def test_resolving_some_service_labels_at_load_time( # NOTE: that this model needs all values to be resolved before parsing them # otherwise it might fail!! The question is whether these values can be resolved at this point # NOTE: vendor values are in the database and therefore are available at this point - labels = SimcoreServiceLabels.parse_obj(service_labels) + labels = SimcoreServiceLabels.model_validate(service_labels) - print("After", labels.json(indent=1)) - formatted_json = service_meta.json(indent=1) + print("After", labels.model_dump_json(indent=1)) + formatted_json = service_meta.model_dump_json(indent=1) print("After", formatted_json) for entry in vendor_environments: print(entry) @@ -601,11 +603,11 @@ def test_resolving_some_service_labels_at_load_time( def test_user_preferences_path_is_part_of_exiting_volume(): labels_data = { "simcore.service.paths-mapping": json.dumps( - PathMappingsLabel.Config.schema_extra["examples"][0] + PathMappingsLabel.model_config["json_schema_extra"]["examples"][0] ), "simcore.service.user-preferences-path": json.dumps( "/tmp/outputs" # noqa: S108 ), } with pytest.raises(ValidationError, match="user_preferences_path=/tmp/outputs"): - assert DynamicSidecarServiceLabels.parse_raw(json.dumps(labels_data)) + assert DynamicSidecarServiceLabels.model_validate_json(json.dumps(labels_data)) diff --git a/packages/models-library/tests/test_service_settings_nat_rule.py b/packages/models-library/tests/test_service_settings_nat_rule.py index 66319e9435c0..c6f9f05497cb 100644 --- a/packages/models-library/tests/test_service_settings_nat_rule.py +++ b/packages/models-library/tests/test_service_settings_nat_rule.py @@ -9,7 +9,7 @@ replace_osparc_variable_identifier, ) from models_library.service_settings_nat_rule import NATRule -from pydantic import parse_obj_as +from pydantic import TypeAdapter SUPPORTED_TEMPLATES: set[str] = { "$OSPARC_VARIABLE_%s", @@ -79,7 +79,7 @@ def _all_combinations_from_dict(data: dict[Any, Any]) -> list[dict[Any, Any]]: def test_nat_rule_with_osparc_variable_identifier( nat_rule_dict: dict[str, Any], osparc_variables: dict[str, Any] ): - nat_rule = parse_obj_as(NATRule, nat_rule_dict) + nat_rule = TypeAdapter(NATRule).validate_python(nat_rule_dict) with pytest.raises(UnresolvedOsparcVariableIdentifierError): list(nat_rule.iter_tcp_ports()) @@ -87,7 +87,7 @@ def test_nat_rule_with_osparc_variable_identifier( # NOTE: values are mostly replaced in place unless it's used as first level replace_osparc_variable_identifier(nat_rule, osparc_variables) - nat_rule_str = nat_rule.json() + nat_rule_str = nat_rule.model_dump_json() for osparc_variable_name in osparc_variables: assert osparc_variable_name not in nat_rule_str @@ -108,7 +108,9 @@ def test_nat_rule_with_osparc_variable_identifier( ], ) def test_______(replace_with_value: Any): - a_var = parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_some_var") + a_var = TypeAdapter(OsparcVariableIdentifier).validate_python( + "$OSPARC_VARIABLE_some_var" + ) assert isinstance(a_var, OsparcVariableIdentifier) replaced_var = replace_osparc_variable_identifier( @@ -151,7 +153,7 @@ def test_replace_an_instance_of_osparc_variable_identifier( except TypeError: formatted_template = var_template - a_var = parse_obj_as(OsparcVariableIdentifier, formatted_template) + a_var = TypeAdapter(OsparcVariableIdentifier).validate_python(formatted_template) assert isinstance(a_var, OsparcVariableIdentifier) replace_with_identifier_default = identifier_has_default and replace_with_default diff --git a/packages/models-library/tests/test_services.py b/packages/models-library/tests/test_services.py index 54906743db10..c7b7562eaa63 100644 --- a/packages/models-library/tests/test_services.py +++ b/packages/models-library/tests/test_services.py @@ -182,7 +182,7 @@ def _find_pattern_entry(obj: dict[str, Any], key: str) -> Any: def test_boot_option_wrong_default() -> None: - for example in [deepcopy(x) for x in BootOption.Config.schema_extra["examples"]]: + for example in [deepcopy(x) for x in BootOption.model_config["json_schema_extra"]["examples"]]: with pytest.raises(ValueError): example["default"] = "__undefined__" assert BootOption(**example) @@ -201,11 +201,12 @@ def test_service_docker_data_labels_convesion(): # we want labels to look like io.simcore.a_label_property convension_breaking_fields: set[tuple[str, str]] = set() - fiedls_with_aliases: list[tuple[str, str]] = [ - (x.name, x.alias) for x in ServiceMetaDataPublished.__fields__.values() + fields_with_aliases: list[tuple[str, str]] = [ + (name, info.alias) for name, info in ServiceMetaDataPublished.model_fields.items() + if info.alias is not None ] - for name, alias in fiedls_with_aliases: + for name, alias in fields_with_aliases: if alias in FIELD_NAME_EXCEPTIONS: continue # check dashes and uppercase diff --git a/packages/models-library/tests/test_services_io.py b/packages/models-library/tests/test_services_io.py index acfb02a05b1e..e056647665f0 100644 --- a/packages/models-library/tests/test_services_io.py +++ b/packages/models-library/tests/test_services_io.py @@ -15,7 +15,7 @@ def test_service_port_units(tests_data_dir: Path): data = yaml.safe_load((tests_data_dir / "metadata-sleeper-2.0.2.yaml").read_text()) print(ServiceMetaDataPublished.schema_json(indent=2)) - service_meta = ServiceMetaDataPublished.parse_obj(data) + service_meta = ServiceMetaDataPublished.model_validate(data) assert service_meta.inputs for input_nameid, input_meta in service_meta.inputs.items(): diff --git a/packages/models-library/tests/test_sidecar_volumes.py b/packages/models-library/tests/test_sidecar_volumes.py index e9c545542889..402899726bc4 100644 --- a/packages/models-library/tests/test_sidecar_volumes.py +++ b/packages/models-library/tests/test_sidecar_volumes.py @@ -14,4 +14,4 @@ def test_volume_state_equality_does_not_use_last_changed(status: VolumeStatus): # at the moment of the creation of the object. assert VolumeState(status=status) == VolumeState(status=status) schema_property_count = len(VolumeState.schema()["properties"]) - assert len(VolumeState(status=status).dict()) == schema_property_count + assert len(VolumeState(status=status).model_dump()) == schema_property_count diff --git a/packages/models-library/tests/test_user_preferences.py b/packages/models-library/tests/test_user_preferences.py index 272e73cf6e50..edac734f0c73 100644 --- a/packages/models-library/tests/test_user_preferences.py +++ b/packages/models-library/tests/test_user_preferences.py @@ -15,20 +15,24 @@ _AutoRegisterMeta, _BaseUserPreferenceModel, ) -from pydantic import parse_obj_as +from pydantic import TypeAdapter _SERVICE_KEY_AND_VERSION_SAMPLES: list[tuple[ServiceKey, ServiceVersion]] = [ ( - parse_obj_as(ServiceKey, "simcore/services/comp/something-1231"), - parse_obj_as(ServiceVersion, "0.0.1"), + TypeAdapter(ServiceKey).validate_python("simcore/services/comp/something-1231"), + TypeAdapter(ServiceVersion).validate_python("0.0.1"), ), ( - parse_obj_as(ServiceKey, "simcore/services/dynamic/something-1231"), - parse_obj_as(ServiceVersion, "0.0.1"), + TypeAdapter(ServiceKey).validate_python( + "simcore/services/dynamic/something-1231" + ), + TypeAdapter(ServiceVersion).validate_python("0.0.1"), ), ( - parse_obj_as(ServiceKey, "simcore/services/frontend/something-1231"), - parse_obj_as(ServiceVersion, "0.0.1"), + TypeAdapter(ServiceKey).validate_python( + "simcore/services/frontend/something-1231" + ), + TypeAdapter(ServiceVersion).validate_python("0.0.1"), ), ] @@ -54,7 +58,7 @@ def test_base_user_preference_model(value: Any, preference_type: PreferenceType) base_data = _get_base_user_preferences_data( preference_type=preference_type, value=value ) - assert parse_obj_as(_BaseUserPreferenceModel, base_data) + assert TypeAdapter(_BaseUserPreferenceModel).validate_python(base_data) def test_frontend_preferences(value: Any): @@ -64,7 +68,7 @@ def test_frontend_preferences(value: Any): base_data.update({"preference_identifier": "pref-name"}) # check serialization - frontend_preference = parse_obj_as(FrontendUserPreference, base_data) + frontend_preference = TypeAdapter(FrontendUserPreference).validate_python(base_data) assert set(frontend_preference.to_db().keys()) == {"value"} @@ -80,7 +84,7 @@ def test_user_service_preferences(value: Any, mock_file_path: Path): "file_path": mock_file_path, } ) - instance = parse_obj_as(UserServiceUserPreference, base_data) + instance = TypeAdapter(UserServiceUserPreference).validate_python(base_data) assert set(instance.to_db().keys()) == { "value", "service_key", @@ -96,7 +100,7 @@ def unregister_defined_classes() -> Iterator[None]: def test__frontend__user_preference(value: Any, unregister_defined_classes: None): - pref1 = FrontendUserPreference.parse_obj( + pref1 = FrontendUserPreference.model_validate( {"preference_identifier": "pref_id", "value": value} ) assert isinstance(pref1, FrontendUserPreference) @@ -112,7 +116,7 @@ def test__user_service__user_preference( mock_file_path: Path, unregister_defined_classes: None, ): - pref1 = UserServiceUserPreference.parse_obj( + pref1 = UserServiceUserPreference.model_validate( { "value": value, "service_key": service_key, @@ -123,8 +127,8 @@ def test__user_service__user_preference( # NOTE: these will be stored as bytes, # check bytes serialization/deserialization - pref1_as_bytes = pref1.json().encode() - new_instance = UserServiceUserPreference.parse_raw(pref1_as_bytes) + pref1_as_bytes = pref1.model_dump_json().encode() + new_instance = UserServiceUserPreference.model_validate_json(pref1_as_bytes) assert new_instance == pref1 diff --git a/packages/models-library/tests/test_utils_common_validators.py b/packages/models-library/tests/test_utils_common_validators.py index d7fe367ab5dd..db9df708b0f4 100644 --- a/packages/models-library/tests/test_utils_common_validators.py +++ b/packages/models-library/tests/test_utils_common_validators.py @@ -7,7 +7,7 @@ none_to_empty_str_pre_validator, null_or_none_str_to_none_validator, ) -from pydantic import BaseModel, ValidationError, validator +from pydantic import BaseModel, ValidationError, field_validator def test_enums_pre_validator(): @@ -20,13 +20,14 @@ class Model(BaseModel): class ModelWithPreValidator(BaseModel): color: Enum1 - _from_equivalent_enums = validator("color", allow_reuse=True, pre=True)( + _from_equivalent_enums = field_validator("color", mode="before")( create_enums_pre_validator(Enum1) ) # with Enum1 model = Model(color=Enum1.RED) - assert ModelWithPreValidator(color=Enum1.RED) == model + # See: https://docs.pydantic.dev/latest/migration/#changes-to-pydanticbasemodel + assert ModelWithPreValidator(color=Enum1.RED).model_dump() == model.model_dump() # with Enum2 class Enum2(Enum): @@ -35,55 +36,56 @@ class Enum2(Enum): with pytest.raises(ValidationError): Model(color=Enum2.RED) - assert ModelWithPreValidator(color=Enum2.RED) == model + # See: https://docs.pydantic.dev/latest/migration/#changes-to-pydanticbasemodel + assert ModelWithPreValidator(color=Enum2.RED).model_dump() == model.model_dump() def test_empty_str_to_none_pre_validator(): class Model(BaseModel): nullable_message: str | None - _empty_is_none = validator("nullable_message", allow_reuse=True, pre=True)( + _empty_is_none = field_validator("nullable_message", mode="before")( empty_str_to_none_pre_validator ) - model = Model.parse_obj({"nullable_message": None}) - assert model == Model.parse_obj({"nullable_message": ""}) + model = Model.model_validate({"nullable_message": None}) + assert model == Model.model_validate({"nullable_message": ""}) def test_none_to_empty_str_pre_validator(): class Model(BaseModel): message: str - _none_is_empty = validator("message", allow_reuse=True, pre=True)( + _none_is_empty = field_validator("message", mode="before")( none_to_empty_str_pre_validator ) - model = Model.parse_obj({"message": ""}) - assert model == Model.parse_obj({"message": None}) + model = Model.model_validate({"message": ""}) + assert model == Model.model_validate({"message": None}) def test_null_or_none_str_to_none_validator(): class Model(BaseModel): message: str | None - _null_or_none_str_to_none_validator = validator( - "message", allow_reuse=True, pre=True - )(null_or_none_str_to_none_validator) + _null_or_none_str_to_none_validator = field_validator("message", mode="before")( + null_or_none_str_to_none_validator + ) - model = Model.parse_obj({"message": "none"}) - assert model == Model.parse_obj({"message": None}) + model = Model.model_validate({"message": "none"}) + assert model == Model.model_validate({"message": None}) - model = Model.parse_obj({"message": "null"}) - assert model == Model.parse_obj({"message": None}) + model = Model.model_validate({"message": "null"}) + assert model == Model.model_validate({"message": None}) - model = Model.parse_obj({"message": "NoNe"}) - assert model == Model.parse_obj({"message": None}) + model = Model.model_validate({"message": "NoNe"}) + assert model == Model.model_validate({"message": None}) - model = Model.parse_obj({"message": "NuLl"}) - assert model == Model.parse_obj({"message": None}) + model = Model.model_validate({"message": "NuLl"}) + assert model == Model.model_validate({"message": None}) - model = Model.parse_obj({"message": None}) - assert model == Model.parse_obj({"message": None}) + model = Model.model_validate({"message": None}) + assert model == Model.model_validate({"message": None}) - model = Model.parse_obj({"message": ""}) - assert model == Model.parse_obj({"message": ""}) + model = Model.model_validate({"message": ""}) + assert model == Model.model_validate({"message": ""}) diff --git a/packages/models-library/tests/test_utils_nodes.py b/packages/models-library/tests/test_utils_nodes.py index b4634770a97e..a41595ec5680 100644 --- a/packages/models-library/tests/test_utils_nodes.py +++ b/packages/models-library/tests/test_utils_nodes.py @@ -14,7 +14,6 @@ SimcoreS3FileID, ) from models_library.utils.nodes import compute_node_hash -from pydantic import AnyUrl, parse_obj_as ANOTHER_NODE_ID = uuid4() ANOTHER_NODE_OUTPUT_KEY = "the_output_link" @@ -39,9 +38,7 @@ "input_bool": True, "input_string": "string", "input_downloadlink": DownloadLink( - downloadLink=parse_obj_as( - AnyUrl, "http://httpbin.org/image/jpeg" - ) + downloadLink="http://httpbin.org/image/jpeg" ), "input_simcorelink": SimCoreFileLink( store=0, diff --git a/packages/models-library/tests/test_utils_pydantic_tools_extension.py b/packages/models-library/tests/test_utils_pydantic_tools_extension.py index 95a50099c028..0bf8abdbb412 100644 --- a/packages/models-library/tests/test_utils_pydantic_tools_extension.py +++ b/packages/models-library/tests/test_utils_pydantic_tools_extension.py @@ -14,15 +14,20 @@ class MyModel(BaseModel): def test_schema(): - assert MyModel.schema() == { + assert MyModel.model_json_schema() == { "title": "MyModel", "type": "object", "properties": { "a": {"title": "A", "type": "integer"}, - "b": {"title": "B", "type": "integer"}, + "b": {"anyOf": [{"type": "integer"}, {"type": "null"}], "title": "B"}, "c": {"title": "C", "default": 42, "type": "integer"}, - "d": {"title": "D", "type": "integer"}, + "d": { + "anyOf": [{"type": "integer"}, {"type": "null"}], + "default": None, + "title": "D", + }, "e": { + "default": None, "title": "E", "type": "integer", "description": "optional non-nullable", @@ -34,8 +39,8 @@ def test_schema(): def test_only_required(): model = MyModel(a=1, b=2) - assert model.dict() == {"a": 1, "b": 2, "c": 42, "d": None, "e": None} - assert model.dict(exclude_unset=True) == {"a": 1, "b": 2} + assert model.model_dump() == {"a": 1, "b": 2, "c": 42, "d": None, "e": None} + assert model.model_dump(exclude_unset=True) == {"a": 1, "b": 2} def test_parse_obj_or_none(): diff --git a/packages/models-library/tests/test_utils_service_io.py b/packages/models-library/tests/test_utils_service_io.py index 2bf58a288699..7ef8d4070a00 100644 --- a/packages/models-library/tests/test_utils_service_io.py +++ b/packages/models-library/tests/test_utils_service_io.py @@ -17,13 +17,13 @@ from models_library.services import ServiceInput, ServiceOutput, ServicePortKey from models_library.utils.json_schema import jsonschema_validate_schema from models_library.utils.services_io import get_service_io_json_schema -from pydantic import parse_obj_as +from pydantic import TypeAdapter example_inputs_labels = [ - e for e in ServiceInput.Config.schema_extra["examples"] if e["label"] + e for e in ServiceInput.model_config["json_schema_extra"]["examples"] if e["label"] ] example_outputs_labels = [ - e for e in ServiceOutput.Config.schema_extra["examples"] if e["label"] + e for e in ServiceOutput.model_config["json_schema_extra"]["examples"] if e["label"] ] @@ -31,16 +31,16 @@ def service_port(request: pytest.FixtureRequest) -> ServiceInput | ServiceOutput: try: index = example_inputs_labels.index(request.param) - example = ServiceInput.Config.schema_extra["examples"][index] - return ServiceInput.parse_obj(example) + example = ServiceInput.model_config["json_schema_extra"]["examples"][index] + return ServiceInput.model_validate(example) except ValueError: index = example_outputs_labels.index(request.param) - example = ServiceOutput.Config.schema_extra["examples"][index] - return ServiceOutput.parse_obj(example) + example = ServiceOutput.model_config["json_schema_extra"]["examples"][index] + return ServiceOutput.model_validate(example) def test_get_schema_from_port(service_port: ServiceInput | ServiceOutput): - print(service_port.json(indent=2)) + print(service_port.model_dump_json(indent=2)) # get schema = get_service_io_json_schema(service_port) @@ -73,8 +73,12 @@ def test_against_service_metadata_configs(metadata_path: Path): meta = json.loads(metadata_path.read_text()) - inputs = parse_obj_as(dict[ServicePortKey, ServiceInput], meta["inputs"]) - outputs = parse_obj_as(dict[ServicePortKey, ServiceOutput], meta["outputs"]) + inputs = TypeAdapter(dict[ServicePortKey, ServiceInput]).validate_python( + meta["inputs"] + ) + outputs = TypeAdapter(dict[ServicePortKey, ServiceOutput]).validate_python( + meta["outputs"] + ) for port in itertools.chain(inputs.values(), outputs.values()): schema = get_service_io_json_schema(port) diff --git a/packages/models-library/tests/test_utils_specs_substitution.py b/packages/models-library/tests/test_utils_specs_substitution.py index 0670e56e271f..c523271bd2a3 100644 --- a/packages/models-library/tests/test_utils_specs_substitution.py +++ b/packages/models-library/tests/test_utils_specs_substitution.py @@ -12,7 +12,7 @@ SpecsSubstitutionsResolver, SubstitutionValue, ) -from pydantic import parse_obj_as +from pydantic import TypeAdapter @pytest.fixture() @@ -49,7 +49,7 @@ def available_osparc_variables( "SERVICE_VERSION": service_version, "DISPLAY": "True", } - return parse_obj_as(dict[str, SubstitutionValue], environs) + return TypeAdapter(dict[str, SubstitutionValue]).validate_python(environs) @pytest.mark.parametrize( diff --git a/packages/notifications-library/requirements/_base.in b/packages/notifications-library/requirements/_base.in index 4879a9d6fb63..047005b4a39b 100644 --- a/packages/notifications-library/requirements/_base.in +++ b/packages/notifications-library/requirements/_base.in @@ -2,6 +2,7 @@ # Specifies third-party dependencies for 'notifications-library' # --constraint ../../../requirements/constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in diff --git a/packages/notifications-library/requirements/_base.txt b/packages/notifications-library/requirements/_base.txt index abc242615c5a..08807d95984e 100644 --- a/packages/notifications-library/requirements/_base.txt +++ b/packages/notifications-library/requirements/_base.txt @@ -4,6 +4,8 @@ aiosmtplib==3.0.2 # via -r requirements/_base.in alembic==1.13.3 # via -r requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic arrow==1.3.0 # via -r requirements/../../../packages/models-library/requirements/_base.in async-timeout==4.0.3 @@ -63,7 +65,7 @@ orjson==3.10.7 # -r requirements/../../../packages/models-library/requirements/_base.in psycopg2-binary==2.9.9 # via sqlalchemy -pydantic==1.10.18 +pydantic==2.9.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt @@ -72,10 +74,22 @@ pydantic==1.10.18 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.3 + # via pydantic +pydantic-extra-types==2.9.0 + # via -r requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich python-dateutil==2.9.0.post0 # via arrow +python-dotenv==1.0.1 + # via pydantic-settings referencing==0.35.1 # via # jsonschema @@ -108,6 +122,7 @@ typing-extensions==4.12.2 # via # alembic # pydantic + # pydantic-core # typer yarl==1.12.1 # via -r requirements/../../../packages/postgres-database/requirements/_base.in diff --git a/packages/notifications-library/requirements/_test.txt b/packages/notifications-library/requirements/_test.txt index 55a7d9b8ee82..bd0edc5133ee 100644 --- a/packages/notifications-library/requirements/_test.txt +++ b/packages/notifications-library/requirements/_test.txt @@ -66,7 +66,9 @@ python-dateutil==2.9.0.post0 # -c requirements/_base.txt # faker python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt diff --git a/packages/notifications-library/requirements/ci.txt b/packages/notifications-library/requirements/ci.txt index 4bb71fd35df6..105d6a514b3e 100644 --- a/packages/notifications-library/requirements/ci.txt +++ b/packages/notifications-library/requirements/ci.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +simcore-common-library @ ../common-library/ simcore-models-library @ ../models-library/ simcore-postgres-database @ ../postgres-database/ pytest-simcore @ ../pytest-simcore/ diff --git a/packages/notifications-library/requirements/dev.txt b/packages/notifications-library/requirements/dev.txt index 723de7630806..0a0100513484 100644 --- a/packages/notifications-library/requirements/dev.txt +++ b/packages/notifications-library/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library/ --editable ../models-library/ --editable ../postgres-database/ --editable ../pytest-simcore/ diff --git a/packages/notifications-library/src/notifications_library/errors.py b/packages/notifications-library/src/notifications_library/errors.py index 2ffaa461a02c..21edbbb0dc75 100644 --- a/packages/notifications-library/src/notifications_library/errors.py +++ b/packages/notifications-library/src/notifications_library/errors.py @@ -1,6 +1,6 @@ from typing import Any -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class NotifierError(OsparcErrorMixin, Exception): diff --git a/packages/notifications-library/tests/email/test_email_events.py b/packages/notifications-library/tests/email/test_email_events.py index 5e3786ab234a..995da5faf4ea 100644 --- a/packages/notifications-library/tests/email/test_email_events.py +++ b/packages/notifications-library/tests/email/test_email_events.py @@ -66,8 +66,8 @@ def ipinfo(faker: Faker) -> dict[str, Any]: @pytest.fixture def request_form(faker: Faker) -> dict[str, Any]: return AccountRequestInfo( - **AccountRequestInfo.Config.schema_extra["example"] - ).dict() + **AccountRequestInfo.model_config["json_schema_extra"]["example"] + ).model_dump() @pytest.fixture diff --git a/packages/notifications-library/tests/with_db/conftest.py b/packages/notifications-library/tests/with_db/conftest.py index bdd3d0f3d091..750f3cc24a49 100644 --- a/packages/notifications-library/tests/with_db/conftest.py +++ b/packages/notifications-library/tests/with_db/conftest.py @@ -14,7 +14,7 @@ from models_library.products import ProductName from models_library.users import GroupID, UserID from notifications_library._templates import get_default_named_templates -from pydantic import validate_arguments +from pydantic import validate_call from simcore_postgres_database.models.jinja2_templates import jinja2_templates from simcore_postgres_database.models.payments_transactions import payments_transactions from simcore_postgres_database.models.products import products @@ -165,7 +165,7 @@ def set_template_to_product( sqlalchemy_async_engine: AsyncEngine, product: dict[str, Any] ): # NOTE: needs all fixture products in db - @validate_arguments + @validate_call async def _(template_name: IDStr, product_name: ProductName) -> None: async with sqlalchemy_async_engine.begin() as conn: await conn.execute( @@ -179,7 +179,7 @@ async def _(template_name: IDStr, product_name: ProductName) -> None: @pytest.fixture def unset_template_to_product(sqlalchemy_async_engine: AsyncEngine): - @validate_arguments + @validate_call async def _(template_name: IDStr, product_name: ProductName) -> None: async with sqlalchemy_async_engine.begin() as conn: await conn.execute( diff --git a/packages/postgres-database/requirements/_base.in b/packages/postgres-database/requirements/_base.in index 48679f446635..0294edf9114f 100644 --- a/packages/postgres-database/requirements/_base.in +++ b/packages/postgres-database/requirements/_base.in @@ -3,6 +3,7 @@ # --constraint ../../../requirements/constraints.txt --constraint ./constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in alembic pydantic diff --git a/packages/postgres-database/requirements/_base.txt b/packages/postgres-database/requirements/_base.txt index bded83bb4f90..bf02ea152a86 100644 --- a/packages/postgres-database/requirements/_base.txt +++ b/packages/postgres-database/requirements/_base.txt @@ -1,5 +1,7 @@ alembic==1.13.3 # via -r requirements/_base.in +annotated-types==0.7.0 + # via pydantic async-timeout==4.0.3 # via asyncpg asyncpg==0.29.0 @@ -18,10 +20,12 @@ multidict==6.1.0 # via yarl psycopg2-binary==2.9.9 # via sqlalchemy -pydantic==1.10.18 +pydantic==2.9.1 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in +pydantic-core==2.23.3 + # via pydantic sqlalchemy==1.4.54 # via # -c requirements/../../../requirements/constraints.txt diff --git a/packages/postgres-database/requirements/ci.txt b/packages/postgres-database/requirements/ci.txt index 10a240f68660..25346140a412 100644 --- a/packages/postgres-database/requirements/ci.txt +++ b/packages/postgres-database/requirements/ci.txt @@ -13,7 +13,8 @@ --requirement _tools.txt # installs this repo's packages -pytest-simcore @ ../../packages/pytest-simcore/ +simcore-common-library @ ../common-library/ +pytest-simcore @ ../pytest-simcore/ # current module simcore-postgres-database @ . diff --git a/packages/postgres-database/requirements/dev.txt b/packages/postgres-database/requirements/dev.txt index 8136f1a48b58..095f8383b2ae 100644 --- a/packages/postgres-database/requirements/dev.txt +++ b/packages/postgres-database/requirements/dev.txt @@ -13,7 +13,9 @@ --requirement _tools.txt # installs this repo's packages ---editable ../../packages/pytest-simcore/ +--editable ../common-library/ +--editable ../pytest-simcore/ + # current module --editable . diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py index 39749b7fdbfc..c8aa9962d43b 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py @@ -5,8 +5,8 @@ import sqlalchemy as sa from aiopg.sa.connection import SAConnection from aiopg.sa.result import ResultProxy, RowProxy -from pydantic import BaseModel -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin +from pydantic import BaseModel, ConfigDict from sqlalchemy.dialects.postgresql import insert as pg_insert from .errors import ForeignKeyViolation @@ -18,7 +18,7 @@ # -class BaseProjectsMetadataError(PydanticErrorMixin, RuntimeError): +class BaseProjectsMetadataError(OsparcErrorMixin, RuntimeError): msg_template: str = "Project metadata unexpected error" @@ -53,10 +53,7 @@ class ProjectMetadata(BaseModel): parent_node_id: uuid.UUID | None root_parent_project_uuid: uuid.UUID | None root_parent_node_id: uuid.UUID | None - - class Config: - frozen = True - orm_mode = True + model_config = ConfigDict(frozen=True, from_attributes=True) # diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py index 09cb8a561f4e..cb47141b1ab9 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py @@ -5,8 +5,8 @@ import sqlalchemy from aiopg.sa.connection import SAConnection -from pydantic import BaseModel, Field -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin +from pydantic import BaseModel, ConfigDict, Field from sqlalchemy.dialects.postgresql import insert as pg_insert from .errors import ForeignKeyViolation, UniqueViolation @@ -17,7 +17,7 @@ # # Errors # -class BaseProjectNodesError(PydanticErrorMixin, RuntimeError): +class BaseProjectNodesError(OsparcErrorMixin, RuntimeError): msg_template: str = "Project nodes unexpected error" @@ -43,18 +43,16 @@ class ProjectNodeCreate(BaseModel): @classmethod def get_field_names(cls, *, exclude: set[str]) -> set[str]: - return {name for name in cls.__fields__ if name not in exclude} + return {name for name in cls.model_fields.keys() if name not in exclude} - class Config: - frozen = True + model_config = ConfigDict(frozen=True) class ProjectNode(ProjectNodeCreate): created: datetime.datetime modified: datetime.datetime - class Config(ProjectNodeCreate.Config): - orm_mode = True + model_config = ConfigDict(from_attributes=True) @dataclass(frozen=True, kw_only=True) @@ -85,7 +83,7 @@ async def add( [ { "project_uuid": f"{self.project_uuid}", - **node.dict(), + **node.model_dump(), } for node in nodes ] diff --git a/packages/postgres-database/tests/test_models_payments_transactions.py b/packages/postgres-database/tests/test_models_payments_transactions.py index 9d4f748e094a..6dde13b1abed 100644 --- a/packages/postgres-database/tests/test_models_payments_transactions.py +++ b/packages/postgres-database/tests/test_models_payments_transactions.py @@ -6,6 +6,7 @@ import decimal from collections.abc import Callable +from typing import Any import pytest import sqlalchemy as sa @@ -43,14 +44,25 @@ async def test_numerics_precission_and_scale(connection: SAConnection): assert float(got) == expected +def _remove_not_required(data: dict[str, Any]) -> dict[str, Any]: + for to_remove in ( + "completed_at", + "invoice_url", + "invoice_pdf_url", + "state", + "state_message", + "stripe_invoice_id", + ): + data.pop(to_remove) + return data + + @pytest.fixture def init_transaction(connection: SAConnection): async def _init(payment_id: str): # get payment_id from payment-gateway - values = random_payment_transaction(payment_id=payment_id) - # remove states - values.pop("state") - values.pop("completed_at") + values = _remove_not_required(random_payment_transaction(payment_id=payment_id)) + # init successful: set timestamp values["initiated_at"] = utcnow() @@ -180,10 +192,8 @@ def create_fake_user_transactions(connection: SAConnection, user_id: int) -> Cal async def _go(expected_total=5): payment_ids = [] for _ in range(expected_total): - values = random_payment_transaction(user_id=user_id) - # remove states - values.pop("state") - values.pop("completed_at") + values = _remove_not_required(random_payment_transaction(user_id=user_id)) + payment_id = await insert_init_payment_transaction(connection, **values) assert payment_id payment_ids.append(payment_id) diff --git a/packages/postgres-database/tests/test_utils_projects_nodes.py b/packages/postgres-database/tests/test_utils_projects_nodes.py index 33e5b86b7cb2..21c130bcc7d9 100644 --- a/packages/postgres-database/tests/test_utils_projects_nodes.py +++ b/packages/postgres-database/tests/test_utils_projects_nodes.py @@ -412,9 +412,9 @@ async def test_get_project_id_from_node_id_raises_if_multiple_projects_with_same assert len(project1_nodes) == 1 project2_nodes = await project2_repo.add(connection, nodes=[shared_node]) assert len(project2_nodes) == 1 - assert project1_nodes[0].dict( + assert project1_nodes[0].model_dump( include=ProjectNodeCreate.get_field_names(exclude={"created", "modified"}) - ) == project2_nodes[0].dict( + ) == project2_nodes[0].model_dump( include=ProjectNodeCreate.get_field_names(exclude={"created", "modified"}) ) with pytest.raises(ProjectNodesNonUniqueNodeFoundError): diff --git a/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py b/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py index 48fb2d1283e7..e6afeac8e7b5 100644 --- a/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py @@ -32,7 +32,7 @@ async def s3_client(s3_settings: S3Settings) -> typing.AsyncIterator[S3Client]: config=Config(signature_version="s3v4"), ) assert isinstance(session_client, ClientCreatorContext) - client = typing.cast(S3Client, await exit_stack.enter_async_context(session_client)) + client = typing.cast(S3Client, await exit_stack.enter_async_context(session_client)) # type: ignore[arg-type] yield client diff --git a/packages/pytest-simcore/src/pytest_simcore/aws_server.py b/packages/pytest-simcore/src/pytest_simcore/aws_server.py index 077fb25d51a5..74f007973c5d 100644 --- a/packages/pytest-simcore/src/pytest_simcore/aws_server.py +++ b/packages/pytest-simcore/src/pytest_simcore/aws_server.py @@ -11,7 +11,7 @@ from faker import Faker from models_library.utils.fastapi_encoders import jsonable_encoder from moto.server import ThreadedMotoServer -from pydantic import AnyHttpUrl, SecretStr, parse_obj_as +from pydantic import SecretStr from pytest_mock.plugin import MockerFixture from settings_library.basic_types import IDStr from settings_library.ec2 import EC2Settings @@ -75,7 +75,7 @@ def mocked_ec2_server_envs( mocked_ec2_server_settings: EC2Settings, monkeypatch: pytest.MonkeyPatch, ) -> EnvVarsDict: - changed_envs: EnvVarsDict = mocked_ec2_server_settings.dict() + changed_envs: EnvVarsDict = mocked_ec2_server_settings.model_dump() return setenvs_from_dict(monkeypatch, {**changed_envs}) @@ -101,10 +101,7 @@ def mocked_ssm_server_settings( ) -> SSMSettings: return SSMSettings( SSM_ACCESS_KEY_ID=SecretStr("xxx"), - SSM_ENDPOINT=parse_obj_as( - AnyHttpUrl, - f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # pylint: disable=protected-access # noqa: SLF001 - ), + SSM_ENDPOINT=f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # type: ignore[arg-type] # pylint: disable=protected-access # noqa: SLF001 SSM_SECRET_ACCESS_KEY=SecretStr("xxx"), ) @@ -124,10 +121,7 @@ def mocked_s3_server_settings( ) -> S3Settings: return S3Settings( S3_ACCESS_KEY=IDStr("xxx"), - S3_ENDPOINT=parse_obj_as( - AnyHttpUrl, - f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # pylint: disable=protected-access # noqa: SLF001 - ), + S3_ENDPOINT=f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # type: ignore[arg-type] # pylint: disable=protected-access # noqa: SLF001 S3_SECRET_KEY=IDStr("xxx"), S3_BUCKET_NAME=IDStr(f"pytest{faker.pystr().lower()}"), S3_REGION=IDStr("us-east-1"), @@ -139,5 +133,7 @@ def mocked_s3_server_envs( mocked_s3_server_settings: S3Settings, monkeypatch: pytest.MonkeyPatch, ) -> EnvVarsDict: - changed_envs: EnvVarsDict = mocked_s3_server_settings.dict(exclude_unset=True) + changed_envs: EnvVarsDict = mocked_s3_server_settings.model_dump( + mode="json", exclude_unset=True + ) return setenvs_from_dict(monkeypatch, {**changed_envs}) diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_registry.py b/packages/pytest-simcore/src/pytest_simcore/docker_registry.py index 5780937a2c03..91cd5e2d4284 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_registry.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_registry.py @@ -106,9 +106,9 @@ def external_registry_settings( if external_envfile_dict: config = { field: external_envfile_dict.get(field, None) - for field in RegistrySettings.__fields__ + for field in RegistrySettings.model_fields } - return RegistrySettings.parse_obj(config) + return RegistrySettings.model_validate(config) return None diff --git a/packages/pytest-simcore/src/pytest_simcore/faker_payments_data.py b/packages/pytest-simcore/src/pytest_simcore/faker_payments_data.py index 9d675c45e114..3f4058b72e97 100644 --- a/packages/pytest-simcore/src/pytest_simcore/faker_payments_data.py +++ b/packages/pytest-simcore/src/pytest_simcore/faker_payments_data.py @@ -24,7 +24,7 @@ from models_library.products import ProductName from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import EmailStr, HttpUrl, parse_obj_as +from pydantic import EmailStr, HttpUrl, TypeAdapter from simcore_postgres_database.models.payments_transactions import ( PaymentTransactionState, ) @@ -34,27 +34,27 @@ @pytest.fixture def wallet_id(faker: Faker) -> WalletID: - return parse_obj_as(WalletID, faker.pyint()) + return TypeAdapter(WalletID).validate_python(faker.pyint()) @pytest.fixture def wallet_name(faker: Faker) -> IDStr: - return parse_obj_as(IDStr, f"wallet-{faker.word()}") + return TypeAdapter(IDStr).validate_python(f"wallet-{faker.word()}") @pytest.fixture -def invoice_url(faker: Faker) -> HttpUrl: - return parse_obj_as(HttpUrl, faker.image_url()) +def invoice_url(faker: Faker) -> str: + return faker.image_url() @pytest.fixture -def invoice_pdf_url(faker: Faker) -> HttpUrl: - return parse_obj_as(HttpUrl, faker.image_url()) +def invoice_pdf_url(faker: Faker) -> str: + return faker.image_url() @pytest.fixture def stripe_invoice_id(faker: Faker) -> StripeInvoiceID: - return parse_obj_as(StripeInvoiceID, f"in_{faker.word()}") + return TypeAdapter(StripeInvoiceID).validate_python(f"in_{faker.word()}") @pytest.fixture diff --git a/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py b/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py index f82636b6633c..e55c1e489f09 100644 --- a/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py +++ b/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py @@ -14,7 +14,7 @@ import pytest from faker import Faker from models_library.products import ProductName, StripePriceID, StripeTaxRateID -from pydantic import EmailStr, parse_obj_as +from pydantic import EmailStr, TypeAdapter from .helpers.faker_factories import random_product @@ -51,8 +51,7 @@ def product_name() -> ProductName: def support_email( request: pytest.FixtureRequest, product_name: ProductName ) -> EmailStr: - return parse_obj_as( - EmailStr, + return TypeAdapter(EmailStr).validate_python( request.config.getoption("--faker-support-email", default=None) or f"support@{product_name}.info", ) @@ -60,8 +59,7 @@ def support_email( @pytest.fixture def bcc_email(request: pytest.FixtureRequest, product_name: ProductName) -> EmailStr: - return parse_obj_as( - EmailStr, + return TypeAdapter(EmailStr).validate_python( request.config.getoption("--faker-bcc-email", default=None) or f"finance@{product_name}-department.info", ) diff --git a/packages/pytest-simcore/src/pytest_simcore/faker_projects_data.py b/packages/pytest-simcore/src/pytest_simcore/faker_projects_data.py index 4a027a42e2d5..09f8a8b75e84 100644 --- a/packages/pytest-simcore/src/pytest_simcore/faker_projects_data.py +++ b/packages/pytest-simcore/src/pytest_simcore/faker_projects_data.py @@ -16,7 +16,7 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.users import UserID -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_simcore.helpers.faker_factories import random_project _MESSAGE = ( @@ -38,15 +38,14 @@ def pytest_addoption(parser: pytest.Parser): @pytest.fixture def project_id(faker: Faker, request: pytest.FixtureRequest) -> ProjectID: - return parse_obj_as( - ProjectID, + return TypeAdapter(ProjectID).validate_python( request.config.getoption("--faker-project-id", default=None) or faker.uuid4(), ) @pytest.fixture def node_id(faker: Faker) -> NodeID: - return parse_obj_as(NodeID, faker.uuid4()) + return TypeAdapter(NodeID).validate_python(faker.uuid4()) @pytest.fixture diff --git a/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py b/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py index 6ba011db47cf..4e59b6db93a4 100644 --- a/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py +++ b/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py @@ -14,7 +14,7 @@ from faker import Faker from models_library.basic_types import IDStr from models_library.users import UserID -from pydantic import EmailStr, parse_obj_as +from pydantic import EmailStr, TypeAdapter from .helpers.faker_factories import DEFAULT_TEST_PASSWORD, random_user @@ -61,8 +61,7 @@ def pytest_addoption(parser: pytest.Parser): @pytest.fixture def user_id(faker: Faker, request: pytest.FixtureRequest) -> UserID: - return parse_obj_as( - UserID, + return TypeAdapter(UserID).validate_python( request.config.getoption("--faker-user-id", default=None) or faker.pyint(), ) @@ -74,8 +73,7 @@ def is_external_user_email(request: pytest.FixtureRequest) -> bool: @pytest.fixture def user_email(faker: Faker, request: pytest.FixtureRequest) -> EmailStr: - return parse_obj_as( - EmailStr, + return TypeAdapter(EmailStr).validate_python( request.config.getoption(_FAKE_USER_EMAIL_OPTION, default=None) or faker.email(), ) @@ -93,7 +91,7 @@ def user_last_name(faker: Faker) -> str: @pytest.fixture def user_name(user_email: str) -> IDStr: - return parse_obj_as(IDStr, user_email.split("@")[0]) + return TypeAdapter(IDStr).validate_python(user_email.split("@")[0]) @pytest.fixture diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py index 096b392fce66..d4418a5ef816 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py @@ -209,7 +209,7 @@ def random_product( registration_email_template: str | None = None, fake: Faker = DEFAULT_FAKER, **overrides, -): +) -> dict[str, Any]: """ Foreign keys are: @@ -301,6 +301,10 @@ def random_payment_transaction( "initiated_at": utcnow(), "state": PaymentTransactionState.PENDING, "completed_at": None, + "invoice_url": None, + "stripe_invoice_id": None, + "invoice_pdf_url": None, + "state_message": None, } # state is not added on purpose assert set(data.keys()).issubset({c.name for c in payments_transactions.columns}) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_openapi.py b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_openapi.py index fd5afaa183ac..177b1330e360 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_openapi.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_openapi.py @@ -5,7 +5,7 @@ import httpx import jsonref -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError from settings_library.catalog import CatalogSettings from settings_library.director_v2 import DirectorV2Settings from settings_library.storage import StorageSettings @@ -87,7 +87,7 @@ def _get_params( raise VerbNotInPathError(msg) if (params := verb_spec.get("parameters")) is None: continue - all_params += parse_obj_as(list[CapturedParameter], params) + all_params += TypeAdapter(list[CapturedParameter]).validate_python(params) return set(all_params) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_parameters.py b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_parameters.py index 89783d0591cc..25f2abc8cd0d 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_parameters.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_parameters.py @@ -1,17 +1,15 @@ from typing import Literal -from pydantic import BaseModel, Field, root_validator, validator +from pydantic import field_validator, model_validator, ConfigDict, BaseModel, Field from .httpx_calls_capture_errors import OpenApiSpecError class CapturedParameterSchema(BaseModel): - title: str | None - type_: Literal["str", "int", "float", "bool"] | None = Field( - None, alias="type", optional=True - ) + title: str | None = None + type_: Literal["str", "int", "float", "bool"] | None = Field(None, alias="type") pattern: str | None - format_: Literal["uuid"] | None = Field(None, alias="format", optional=True) + format_: Literal["uuid"] | None = Field(None, alias="format") exclusiveMinimum: bool | None minimum: int | None anyOf: list["CapturedParameterSchema"] | None @@ -22,7 +20,7 @@ class Config: validate_always = True allow_population_by_field_name = True - @validator("type_", pre=True) + @field_validator("type_", mode="before") @classmethod def preprocess_type_(cls, val): if val == "string": @@ -33,7 +31,7 @@ def preprocess_type_(cls, val): val = "bool" return val - @root_validator(pre=False) + @model_validator(mode="after") @classmethod def check_compatibility(cls, values): type_ = values.get("type_") @@ -100,10 +98,7 @@ class CapturedParameter(BaseModel): response_value: str | None = ( None # attribute for storing the params value in a concrete response ) - - class Config: - validate_always = True - allow_population_by_field_name = True + model_config = ConfigDict(validate_default=True, populate_by_name=True) def __hash__(self): return hash( diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_client_base_dev.py b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_client_base_dev.py index d9b5bb64437a..9a36d4cc0200 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_client_base_dev.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_client_base_dev.py @@ -6,7 +6,7 @@ from fastapi.encoders import jsonable_encoder from httpx._types import URLTypes from jsonschema import ValidationError -from pydantic import parse_file_as +from pydantic import TypeAdapter from .httpx_calls_capture_errors import CaptureProcessingError from .httpx_calls_capture_models import HttpApiCallCaptureModel, get_captured_model @@ -14,6 +14,11 @@ _logger = logging.getLogger(__name__) +_HTTP_API_CALL_CAPTURE_MODEL_ADAPTER: TypeAdapter[ + list[HttpApiCallCaptureModel] +] = TypeAdapter(list[HttpApiCallCaptureModel]) + + class AsyncClientCaptureWrapper(httpx.AsyncClient): """ Adds captures mechanism @@ -41,8 +46,11 @@ async def request(self, method: str, url: URLTypes, **kwargs): or self._capture_file.read_text().strip() == "" ): self._capture_file.write_text("[]") - serialized_captures: list[HttpApiCallCaptureModel] = parse_file_as( - list[HttpApiCallCaptureModel], self._capture_file + + serialized_captures: list[ + HttpApiCallCaptureModel + ] = _HTTP_API_CALL_CAPTURE_MODEL_ADAPTER.validate_json( + self._capture_file.read_text() ) serialized_captures.append(capture) self._capture_file.write_text( diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/parametrizations.py b/packages/pytest-simcore/src/pytest_simcore/helpers/parametrizations.py index ed6381f56110..6eae044643b9 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/parametrizations.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/parametrizations.py @@ -1,6 +1,6 @@ import pytest from _pytest.mark.structures import ParameterSet -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter def byte_size_ids(val) -> str | None: @@ -10,4 +10,4 @@ def byte_size_ids(val) -> str | None: def parametrized_file_size(size_str: str) -> ParameterSet: - return pytest.param(parse_obj_as(ByteSize, size_str), id=size_str) + return pytest.param(TypeAdapter(ByteSize).validate_python(size_str), id=size_str) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/playwright_sim4life.py b/packages/pytest-simcore/src/pytest_simcore/helpers/playwright_sim4life.py index d2349d6fa7cd..57f3b6993774 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/playwright_sim4life.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/playwright_sim4life.py @@ -6,8 +6,7 @@ import arrow from playwright.sync_api import FrameLocator, Page, WebSocket, expect -from pydantic import TypeAdapter # pylint: disable=no-name-in-module -from pydantic import ByteSize +from pydantic import ByteSize, TypeAdapter from .logging_tools import log_context from .playwright import ( diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py index 5d7e721a832f..2f0a03b575d0 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py @@ -8,14 +8,14 @@ from aiohttp import ClientSession from aws_library.s3 import MultiPartUploadLinks from models_library.api_schemas_storage import ETag, FileUploadSchema, UploadedPart -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.aiohttp import status from servicelib.utils import limited_as_completed, logged_gather from types_aiobotocore_s3 import S3Client from .logging_tools import log_context -_SENDER_CHUNK_SIZE: Final[int] = parse_obj_as(ByteSize, "16Mib") +_SENDER_CHUNK_SIZE: Final[int] = TypeAdapter(ByteSize).validate_python("16Mib") async def _file_sender( @@ -51,7 +51,7 @@ async def upload_file_part( f"--> uploading {this_file_chunk_size=} of {file=}, [{part_index+1}/{num_parts}]..." ) response = await session.put( - upload_url, + str(upload_url), data=_file_sender( file, offset=file_offset, diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py index b6687e222399..a190fa6900e8 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py @@ -78,9 +78,9 @@ async def create_project( project_nodes={ NodeID(node_id): ProjectNodeCreate( node_id=NodeID(node_id), - required_resources=ServiceResourcesDictHelpers.Config.schema_extra[ - "examples" - ][0], + required_resources=ServiceResourcesDictHelpers.model_config[ + "json_schema_extra" + ]["examples"][0], ) for node_id in project_data.get("workbench", {}) }, diff --git a/packages/pytest-simcore/src/pytest_simcore/httpbin_service.py b/packages/pytest-simcore/src/pytest_simcore/httpbin_service.py index b6c0a5aad3bb..6bc71929eb3e 100644 --- a/packages/pytest-simcore/src/pytest_simcore/httpbin_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/httpbin_service.py @@ -14,7 +14,7 @@ import requests import requests.exceptions from docker.errors import APIError -from pydantic import HttpUrl, parse_obj_as +from pydantic import HttpUrl, TypeAdapter from tenacity import retry from tenacity.after import after_log from tenacity.retry import retry_if_exception_type @@ -56,7 +56,7 @@ def _wait_until_httpbin_is_responsive(): _wait_until_httpbin_is_responsive() - yield parse_obj_as(HttpUrl, base_url) + yield TypeAdapter(HttpUrl).validate_python(base_url) finally: with suppress(APIError): diff --git a/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py b/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py index 5c8df1ff6c5a..d8cd056c1154 100644 --- a/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py +++ b/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py @@ -38,7 +38,7 @@ import pytest import respx import yaml -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_mock import MockerFixture, MockType from pytest_simcore.helpers.docker import get_service_published_port from pytest_simcore.helpers.host import get_localhost_ip @@ -213,9 +213,9 @@ def _( assert capture_path.suffix == ".json" if services_mocks_enabled: - captures: list[HttpApiCallCaptureModel] = parse_obj_as( - list[HttpApiCallCaptureModel], json.loads(capture_path.read_text()) - ) + captures: list[HttpApiCallCaptureModel] = TypeAdapter( + list[HttpApiCallCaptureModel] + ).validate_python(json.loads(capture_path.read_text())) if len(side_effects_callbacks) > 0: assert len(side_effects_callbacks) == len(captures) diff --git a/packages/pytest-simcore/src/pytest_simcore/minio_service.py b/packages/pytest-simcore/src/pytest_simcore/minio_service.py index 46cee6fbeeb5..38b9d2bdf8d6 100644 --- a/packages/pytest-simcore/src/pytest_simcore/minio_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/minio_service.py @@ -31,5 +31,5 @@ def minio_s3_settings_envs( minio_s3_settings: S3Settings, monkeypatch: pytest.MonkeyPatch, ) -> EnvVarsDict: - changed_envs: EnvVarsDict = minio_s3_settings.dict(exclude_unset=True) + changed_envs: EnvVarsDict = minio_s3_settings.model_dump(exclude_unset=True) return setenvs_from_dict(monkeypatch, changed_envs) diff --git a/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py b/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py index 7cfbf13df11a..04d285a601eb 100644 --- a/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py +++ b/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py @@ -51,7 +51,7 @@ def walk_model_examples_in_package(package: ModuleType) -> Iterator[ModelExample def iter_model_examples_in_module(module: object) -> Iterator[ModelExample]: - """Iterates on all examples defined as BaseModelClass.Config.schema_extra["example"] + """Iterates on all examples defined as BaseModelClass.model_config["json_schema_extra"]["example"] Usage: @@ -64,7 +64,7 @@ def test_model_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): print(example_name, ":", json.dumps(example_data)) - assert model_cls.parse_obj(example_data) + assert model_cls.model_validate(example_data) """ def _is_model_cls(obj) -> bool: @@ -82,7 +82,7 @@ def _is_model_cls(obj) -> bool: for model_name, model_cls in inspect.getmembers(module, _is_model_cls): assert model_name # nosec if ( - (config_cls := model_cls.Config) + (config_cls := model_cls.model_config) and inspect.isclass(config_cls) and is_strict_inner(model_cls, config_cls) and (schema_extra := getattr(config_cls, "schema_extra", {})) @@ -121,7 +121,9 @@ def model_cls_examples(model_cls: type[BaseModel]) -> dict[str, dict[str, Any]]: ) # checks exampleS setup in schema_extra - examples_list = copy.deepcopy(model_cls.Config.schema_extra.get("examples", [])) + examples_list = copy.deepcopy( + model_cls.model_config["json_schema_extra"].get("examples", []) + ) assert isinstance(examples_list, list), ( "OpenAPI and json-schema differ regarding the format for exampleS." "The former is a dict and the latter an array. " @@ -131,7 +133,7 @@ def model_cls_examples(model_cls: type[BaseModel]) -> dict[str, dict[str, Any]]: ) # check example in schema_extra - example = copy.deepcopy(model_cls.Config.schema_extra.get("example")) + example = copy.deepcopy(model_cls.model_config["json_schema_extra"].get("example")) # collect all examples and creates fixture -> {example-name: example, ...} examples = { diff --git a/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py b/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py index 47188400e79b..240e0100648b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py @@ -56,7 +56,7 @@ def rabbit_env_vars_dict( async def rabbit_settings(rabbit_env_vars_dict: EnvVarsDict) -> RabbitSettings: """Returns the settings of a rabbit service that is up and responsive""" - settings = RabbitSettings.parse_obj(rabbit_env_vars_dict) + settings = RabbitSettings.model_validate(rabbit_env_vars_dict) await wait_till_rabbit_responsive(settings.dsn) return settings diff --git a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py index dffe3883c614..429783e70613 100644 --- a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py +++ b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py @@ -28,7 +28,7 @@ from models_library.projects_pipeline import ComputationTask from models_library.projects_state import RunningState from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.aiohttp import status from yarl import URL @@ -107,8 +107,8 @@ def create_computation_cb(url, **kwargs) -> CallbackResult: "62237c33-8d6c-4709-aa92-c3cf693dd6d2", ], } - returned_computation = ComputationTask.parse_obj( - ComputationTask.Config.schema_extra["examples"][0] + returned_computation = ComputationTask.model_validate( + ComputationTask.model_config["json_schema_extra"]["examples"][0] ).copy( update={ "id": f"{kwargs['json']['project_id']}", @@ -131,8 +131,8 @@ def get_computation_cb(url, **kwargs) -> CallbackResult: state = RunningState.NOT_STARTED pipeline: dict[str, list[str]] = FULL_PROJECT_PIPELINE_ADJACENCY node_states = FULL_PROJECT_NODE_STATES - returned_computation = ComputationTask.parse_obj( - ComputationTask.Config.schema_extra["examples"][0] + returned_computation = ComputationTask.model_validate( + ComputationTask.model_config["json_schema_extra"]["examples"][0] ).copy( update={ "id": Path(url.path).name, @@ -154,11 +154,11 @@ def get_computation_cb(url, **kwargs) -> CallbackResult: def create_cluster_cb(url, **kwargs) -> CallbackResult: assert "json" in kwargs, f"missing body in call to {url}" assert url.query.get("user_id") - random_cluster = Cluster.parse_obj( - random.choice(Cluster.Config.schema_extra["examples"]) + random_cluster = Cluster.model_validate( + random.choice(Cluster.model_config["json_schema_extra"]["examples"]) ) return CallbackResult( - status=201, payload=json.loads(random_cluster.json(by_alias=True)) + status=201, payload=json.loads(random_cluster.model_dump_json(by_alias=True)) ) @@ -169,9 +169,11 @@ def list_clusters_cb(url, **kwargs) -> CallbackResult: body=json.dumps( [ json.loads( - Cluster.parse_obj( - random.choice(Cluster.Config.schema_extra["examples"]) - ).json(by_alias=True) + Cluster.model_validate( + random.choice( + Cluster.model_config["json_schema_extra"]["examples"] + ) + ).model_dump_json(by_alias=True) ) for _ in range(3) ] @@ -185,12 +187,14 @@ def get_cluster_cb(url, **kwargs) -> CallbackResult: return CallbackResult( status=200, payload=json.loads( - Cluster.parse_obj( + Cluster.model_validate( { - **random.choice(Cluster.Config.schema_extra["examples"]), + **random.choice( + Cluster.model_config["json_schema_extra"]["examples"] + ), **{"id": cluster_id}, } - ).json(by_alias=True) + ).model_dump_json(by_alias=True) ), ) @@ -214,12 +218,14 @@ def patch_cluster_cb(url, **kwargs) -> CallbackResult: return CallbackResult( status=200, payload=json.loads( - Cluster.parse_obj( + Cluster.model_validate( { - **random.choice(Cluster.Config.schema_extra["examples"]), + **random.choice( + Cluster.model_config["json_schema_extra"]["examples"] + ), **{"id": cluster_id}, } - ).json(by_alias=True) + ).model_dump_json(by_alias=True) ), ) @@ -366,11 +372,13 @@ def get_upload_link_cb(url: URL, **kwargs) -> CallbackResult: if file_size := kwargs["params"].get("file_size") is not None: assert file_size upload_schema = FileUploadSchema( - chunk_size=parse_obj_as(ByteSize, "5GiB"), - urls=[parse_obj_as(AnyUrl, f"{scheme[link_type]}://{file_id}")], + chunk_size=TypeAdapter(ByteSize).validate_python("5GiB"), + urls=[ + TypeAdapter(AnyUrl).validate_python(f"{scheme[link_type]}://{file_id}") + ], links=FileUploadLinks( - abort_upload=parse_obj_as(AnyUrl, f"{url}:abort"), - complete_upload=parse_obj_as(AnyUrl, f"{url}:complete"), + abort_upload=TypeAdapter(AnyUrl).validate_python(f"{url}:abort"), + complete_upload=TypeAdapter(AnyUrl).validate_python(f"{url}:complete"), ), ) return CallbackResult( @@ -379,7 +387,7 @@ def get_upload_link_cb(url: URL, **kwargs) -> CallbackResult: ) # version 1 returns a presigned link presigned_link = PresignedLink( - link=parse_obj_as(AnyUrl, f"{scheme[link_type]}://{file_id}") + link=TypeAdapter(AnyUrl).validate_python(f"{scheme[link_type]}://{file_id}") ) return CallbackResult( status=status.HTTP_200_OK, @@ -436,7 +444,9 @@ async def storage_v0_service_mock( aioresponses_mocker.get( get_file_metadata_pattern, status=status.HTTP_200_OK, - payload={"data": FileMetaDataGet.Config.schema_extra["examples"][0]}, + payload={ + "data": FileMetaDataGet.model_config["json_schema_extra"]["examples"][0] + }, repeat=True, ) aioresponses_mocker.get( @@ -465,8 +475,9 @@ def generate_future_link(url, **kwargs): (parsed_url.scheme, parsed_url.netloc, parsed_url.path, "", "", "") ) - payload: FileUploadCompleteResponse = parse_obj_as( - FileUploadCompleteResponse, + payload: FileUploadCompleteResponse = TypeAdapter( + FileUploadCompleteResponse + ).validate_python( { "links": { "state": stripped_url + ":complete/futures/" + str(faker.uuid4()) diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py index 9628d1058c92..e2f7654d3d0c 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py @@ -10,7 +10,7 @@ import tenacity from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimcoreS3FileID -from pydantic import AnyUrl, parse_obj_as +from pydantic import AnyUrl, TypeAdapter from pytest_mock import MockerFixture from servicelib.minio_utils import ServiceRetryPolicyUponInitialization from yarl import URL @@ -82,6 +82,8 @@ def create_simcore_file_id() -> Callable[[ProjectID, NodeID, str], SimcoreS3File def _creator( project_id: ProjectID, node_id: NodeID, file_name: str ) -> SimcoreS3FileID: - return parse_obj_as(SimcoreS3FileID, f"{project_id}/{node_id}/{file_name}") + return TypeAdapter(SimcoreS3FileID).validate_python( + f"{project_id}/{node_id}/{file_name}" + ) return _creator diff --git a/packages/service-integration/requirements/_base.in b/packages/service-integration/requirements/_base.in index 6e288d49e0bf..213a27f4c131 100644 --- a/packages/service-integration/requirements/_base.in +++ b/packages/service-integration/requirements/_base.in @@ -3,6 +3,7 @@ # --constraint ../../../requirements/constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in click diff --git a/packages/service-integration/requirements/_base.txt b/packages/service-integration/requirements/_base.txt index b91836c51389..a465920e7ba1 100644 --- a/packages/service-integration/requirements/_base.txt +++ b/packages/service-integration/requirements/_base.txt @@ -1,3 +1,5 @@ +annotated-types==0.7.0 + # via pydantic arrow==1.3.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in @@ -69,17 +71,27 @@ packaging==24.1 # via pytest pluggy==1.5.0 # via pytest -pydantic==1.10.18 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via -r requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via -r requirements/../../../packages/models-library/requirements/_base.in pygments==2.18.0 # via rich pytest==8.3.3 # via -r requirements/_base.in python-dateutil==2.9.0.post0 # via arrow +python-dotenv==1.0.1 + # via pydantic-settings python-slugify==8.0.4 # via cookiecutter pyyaml==6.0.2 @@ -117,6 +129,7 @@ types-python-dateutil==2.9.0.20240906 typing-extensions==4.12.2 # via # pydantic + # pydantic-core # typer urllib3==2.2.3 # via diff --git a/packages/service-integration/requirements/ci.txt b/packages/service-integration/requirements/ci.txt index 9e0e935338c8..daa95fb5ef9c 100644 --- a/packages/service-integration/requirements/ci.txt +++ b/packages/service-integration/requirements/ci.txt @@ -11,6 +11,7 @@ --requirement _test.txt --requirement _tools.txt +simcore-common-library @ ../common-library simcore-models-library @ ../models-library pytest-simcore @ ../pytest-simcore diff --git a/packages/service-integration/requirements/dev.txt b/packages/service-integration/requirements/dev.txt index 9e2af0f71247..bbe3d8325329 100644 --- a/packages/service-integration/requirements/dev.txt +++ b/packages/service-integration/requirements/dev.txt @@ -11,6 +11,7 @@ --requirement _test.txt --requirement _tools.txt +--editable ../common-library/ --editable ../models-library/ --editable ../pytest-simcore/ diff --git a/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py b/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py index a390a469a414..a0a5f2954021 100644 --- a/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py +++ b/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py @@ -5,9 +5,10 @@ # type:ignore from enum import Enum -from typing import Any +from typing import Any, TypeAlias -from pydantic import BaseModel, ConstrainedInt, Extra, Field, conint, constr +from pydantic import BaseModel, ConfigDict, Field, RootModel, StringConstraints +from typing_extensions import Annotated # MODIFICATIONS ------------------------------------------------------------------------- # @@ -19,17 +20,14 @@ # UserWarning: format of 'subnet_ip_address' not understood for 'string' - using default # port number range -class PortInt(ConstrainedInt): - gt = 0 - lt = 65535 +PortInt: TypeAlias = Annotated[int, Field(gt=0, lt=65535)] # ---------------------------------------------------------------------------------------- class Configuration(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") source: str | None = None target: str | None = None @@ -39,8 +37,7 @@ class Config: class CredentialSpec(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") config: str | None = None file: str | None = None @@ -54,31 +51,29 @@ class Condition(Enum): class DependsOn(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") condition: Condition class Extend(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") service: str file: str | None = None class Logging(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") driver: str | None = None - options: dict[constr(regex=r"^.+$"), str | float | None] | None = None + options: dict[ + Annotated[str, StringConstraints(pattern=r"^.+$")], str | float | None + ] | None = None class Port(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") mode: str | None = None host_ip: str | None = None @@ -96,8 +91,7 @@ class PullPolicy(Enum): class Secret1(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") source: str | None = None target: str | None = None @@ -107,38 +101,33 @@ class Config: class Ulimit(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") hard: int soft: int class Bind(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") propagation: str | None = None create_host_path: bool | None = None class Volume2(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") nocopy: bool | None = None class Tmpfs(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") - size: conint(ge=0) | str | None = None + size: Annotated[int, Field(ge=0)] | str | None = None class Volume1(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") type: str source: str | None = None @@ -151,8 +140,7 @@ class Config: class Healthcheck(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") disable: bool | None = None interval: str | None = None @@ -168,8 +156,7 @@ class Order(Enum): class RollbackConfig(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") parallelism: int | None = None delay: str | None = None @@ -185,8 +172,7 @@ class Order1(Enum): class UpdateConfig(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") parallelism: int | None = None delay: str | None = None @@ -197,16 +183,14 @@ class Config: class Limits(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") cpus: float | str | None = None memory: str | None = None class RestartPolicy(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") condition: str | None = None delay: str | None = None @@ -215,15 +199,13 @@ class Config: class Preference(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") spread: str | None = None class Placement(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") constraints: list[str] | None = None preferences: list[Preference] | None = None @@ -231,53 +213,49 @@ class Config: class DiscreteResourceSpec(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") kind: str | None = None value: float | None = None class GenericResource(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") discrete_resource_spec: DiscreteResourceSpec | None = None -class GenericResources(BaseModel): - __root__: list[GenericResource] +class GenericResources(RootModel): + root: list[GenericResource] class ConfigItem(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") subnet: str | None = None ip_range: str | None = None gateway: str | None = None - aux_addresses: dict[constr(regex=r"^.+$"), str] | None = None + aux_addresses: dict[ + Annotated[str, StringConstraints(pattern=r"^.+$")], str + ] | None = None class Ipam(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") driver: str | None = None config: list[ConfigItem] | None = None - options: dict[constr(regex=r"^.+$"), str] | None = None + options: dict[Annotated[str, StringConstraints(pattern=r"^.+$")], str] | None = None class External(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") name: str | None = None class External1(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") name: str | None = None @@ -290,37 +268,39 @@ class External3(BaseModel): name: str | None = None -class ListOfStrings(BaseModel): - __root__: list[str] +class ListOfStrings(RootModel): + root: list[str] -class ListOrDict(BaseModel): - __root__: (dict[constr(regex=r".+"), str | float | bool | None] | list[str]) +class ListOrDict(RootModel): + root: ( + dict[ + Annotated[str, StringConstraints(pattern=r".+")], str | float | bool | None + ] + | list[str] + ) class BlkioLimit(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") path: str | None = None rate: int | str | None = None class BlkioWeight(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") path: str | None = None weight: int | None = None -class Constraints(BaseModel): - __root__: Any +class Constraints(RootModel): + root: Any = None class BuildItem(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") context: str | None = None dockerfile: str | None = None @@ -335,8 +315,7 @@ class Config: class BlkioConfig(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") device_read_bps: list[BlkioLimit] | None = None device_read_iops: list[BlkioLimit] | None = None @@ -347,8 +326,7 @@ class Config: class Network1(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") aliases: ListOfStrings | None = None ipv4_address: str | None = None @@ -358,8 +336,7 @@ class Config: class Device(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") capabilities: ListOfStrings | None = None count: str | int | None = None @@ -368,17 +345,18 @@ class Config: options: ListOrDict | None = None -class Devices(BaseModel): - __root__: list[Device] +class Devices(RootModel): + root: list[Device] class Network(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") name: str | None = None driver: str | None = None - driver_opts: dict[constr(regex=r"^.+$"), str | float] | None = None + driver_opts: dict[ + Annotated[str, StringConstraints(pattern=r"^.+$")], str | float + ] | None = None ipam: Ipam | None = None external: External | None = None internal: bool | None = None @@ -388,32 +366,33 @@ class Config: class Volume(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") name: str | None = None driver: str | None = None - driver_opts: dict[constr(regex=r"^.+$"), str | float] | None = None + driver_opts: dict[ + Annotated[str, StringConstraints(pattern=r"^.+$")], str | float + ] | None = None external: External1 | None = None labels: ListOrDict | None = None class Secret(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") name: str | None = None file: str | None = None external: External2 | None = None labels: ListOrDict | None = None driver: str | None = None - driver_opts: dict[constr(regex=r"^.+$"), str | float] | None = None + driver_opts: dict[ + Annotated[str, StringConstraints(pattern=r"^.+$")], str | float + ] | None = None template_driver: str | None = None class ComposeSpecConfig(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") name: str | None = None file: str | None = None @@ -422,13 +401,12 @@ class Config: template_driver: str | None = None -class StringOrList(BaseModel): - __root__: str | ListOfStrings +class StringOrList(RootModel): + root: str | ListOfStrings class Reservations(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") cpus: float | str | None = None memory: str | None = None @@ -437,16 +415,14 @@ class Config: class Resources(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") limits: Limits | None = None reservations: Reservations | None = None class Deployment(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") mode: str | None = None endpoint_mode: str | None = None @@ -460,8 +436,7 @@ class Config: class Service(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") deploy: Deployment | None = None build: str | BuildItem | None = None @@ -472,8 +447,8 @@ class Config: command: str | list[str] | None = None configs: list[str | Configuration] | None = None container_name: str | None = None - cpu_count: conint(ge=0) | None = None - cpu_percent: conint(ge=0, le=100) | None = None + cpu_count: Annotated[int, Field(ge=0)] | None = None + cpu_percent: Annotated[int, Field(ge=0, le=100)] | None = None cpu_shares: float | str | None = None cpu_quota: float | str | None = None cpu_period: float | str | None = None @@ -483,7 +458,10 @@ class Config: cpuset: str | None = None credential_spec: CredentialSpec | None = None depends_on: None | ( - ListOfStrings | dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), DependsOn] + ListOfStrings + | dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], DependsOn + ] ) = None device_cgroup_rules: ListOfStrings | None = None devices: list[str] | None = None @@ -515,10 +493,14 @@ class Config: memswap_limit: float | str | None = None network_mode: str | None = None networks: None | ( - ListOfStrings | dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), Network1 | None] + ListOfStrings + | dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], + Network1 | None, + ] ) = None oom_kill_disable: bool | None = None - oom_score_adj: conint(ge=-1000, le=1000) | None = None + oom_score_adj: Annotated[int, Field(ge=-1000, le=1000)] | None = None pid: str | None = None pids_limit: float | str | None = None platform: str | None = None @@ -540,7 +522,9 @@ class Config: storage_opt: dict[str, Any] | None = None tmpfs: StringOrList | None = None tty: bool | None = None - ulimits: dict[constr(regex=r"^[a-z]+$"), int | Ulimit] | None = None + ulimits: dict[ + Annotated[str, StringConstraints(pattern=r"^[a-z]+$")], int | Ulimit + ] | None = None user: str | None = None userns_mode: str | None = None volumes: list[str | Volume1] | None = None @@ -553,15 +537,27 @@ class ComposeSpecification(BaseModel): The Compose file is a YAML file defining a multi-containers based application. """ - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") version: str | None = Field( None, description="Version of the Compose specification used. Tools not implementing required version MUST reject the configuration file.", ) - services: dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), Service] | None = None - networks: dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), Network] | None = None - volumes: dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), Volume] | None = None - secrets: dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), Secret] | None = None - configs: None | (dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), ComposeSpecConfig]) = None + services: dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], Service + ] | None = None + networks: dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], Network + ] | None = None + volumes: dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], Volume + ] | None = None + secrets: dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], Secret + ] | None = None + configs: None | ( + dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], + ComposeSpecConfig, + ] + ) = None diff --git a/packages/service-integration/src/service_integration/cli/__init__.py b/packages/service-integration/src/service_integration/cli/__init__.py index 7a1c058957ea..a146de5735dd 100644 --- a/packages/service-integration/src/service_integration/cli/__init__.py +++ b/packages/service-integration/src/service_integration/cli/__init__.py @@ -62,7 +62,7 @@ def main( overrides["COMPOSE_VERSION"] = compose_version # save states - ctx.settings = AppSettings.parse_obj(overrides) # type: ignore[attr-defined] # pylint:disable=no-member + ctx.settings = AppSettings.model_validate(overrides) # type: ignore[attr-defined] # pylint:disable=no-member # diff --git a/packages/service-integration/src/service_integration/cli/_compose_spec.py b/packages/service-integration/src/service_integration/cli/_compose_spec.py index a42936c36959..afccc0e268e2 100644 --- a/packages/service-integration/src/service_integration/cli/_compose_spec.py +++ b/packages/service-integration/src/service_integration/cli/_compose_spec.py @@ -204,7 +204,7 @@ def create_compose( for n, config_name in enumerate(configs_kwargs_map): nth_compose_spec = create_docker_compose_image_spec( settings, **configs_kwargs_map[config_name] - ).dict(exclude_unset=True) + ).model_dump(exclude_unset=True) if n == 0: compose_spec_dict = nth_compose_spec diff --git a/packages/service-integration/src/service_integration/cli/_config.py b/packages/service-integration/src/service_integration/cli/_config.py index 2f41dcb6f72c..4437907efa08 100644 --- a/packages/service-integration/src/service_integration/cli/_config.py +++ b/packages/service-integration/src/service_integration/cli/_config.py @@ -25,7 +25,7 @@ def _get_labels_or_raise(build_labels) -> dict[str, str]: return dict(item.strip().split("=") for item in build_labels) if isinstance(build_labels, dict): return build_labels - if labels__root__ := build_labels.__root__: + if labels__root__ := build_labels.root: assert isinstance(labels__root__, dict) # nosec return labels__root__ raise InvalidLabelsError(build_labels=build_labels) @@ -39,7 +39,7 @@ def _create_config_from_compose_spec( ): rich.print(f"Creating osparc config files from {compose_spec_path}") - compose_spec = ComposeSpecification.parse_obj( + compose_spec = ComposeSpecification.model_validate( yaml.safe_load(compose_spec_path.read_text()) ) @@ -56,7 +56,7 @@ def _save(service_name: str, filename: Path, model: BaseModel): rich.print(f"Creating {output_path} ...", end="") with output_path.open("wt") as fh: - data = json.loads(model.json(by_alias=True, exclude_none=True)) + data = json.loads(model.model_dump_json(by_alias=True, exclude_none=True)) yaml.safe_dump(data, fh, sort_keys=False) rich.print("DONE") diff --git a/packages/service-integration/src/service_integration/errors.py b/packages/service-integration/src/service_integration/errors.py index 8d216b7d9181..65521d363710 100644 --- a/packages/service-integration/src/service_integration/errors.py +++ b/packages/service-integration/src/service_integration/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class ServiceIntegrationError(PydanticErrorMixin, RuntimeError): +class ServiceIntegrationError(OsparcErrorMixin, RuntimeError): pass @@ -13,5 +13,5 @@ class UndefinedOciImageSpecError(ServiceIntegrationError): ... -class InvalidLabelsError(PydanticErrorMixin, ValueError): +class InvalidLabelsError(OsparcErrorMixin, ValueError): template_msg = "Invalid build labels {build_labels}" diff --git a/packages/service-integration/src/service_integration/oci_image_spec.py b/packages/service-integration/src/service_integration/oci_image_spec.py index e07a5e4cafc9..3b9e45b46ab4 100644 --- a/packages/service-integration/src/service_integration/oci_image_spec.py +++ b/packages/service-integration/src/service_integration/oci_image_spec.py @@ -11,8 +11,7 @@ from models_library.basic_types import SHA1Str, VersionStr from models_library.utils.labels_annotations import from_labels, to_labels -from pydantic import BaseModel, Field -from pydantic.config import Extra +from pydantic import BaseModel, ConfigDict, Field from pydantic.networks import AnyUrl # @@ -100,22 +99,20 @@ class OciImageSpecAnnotations(BaseModel): None, description="Digest of the image this image is based on (string)", ) - - class Config: - alias_generator = _underscore_as_dot - allow_population_by_field_name = True - extra = Extra.forbid + model_config = ConfigDict( + alias_generator=_underscore_as_dot, populate_by_name=True, extra="forbid" + ) @classmethod def from_labels_annotations( cls, labels: dict[str, str] ) -> "OciImageSpecAnnotations": data = from_labels(labels, prefix_key=OCI_LABEL_PREFIX, trim_key_head=False) - return cls.parse_obj(data) + return cls.model_validate(data) def to_labels_annotations(self) -> dict[str, str]: labels: dict[str, str] = to_labels( - self.dict(exclude_unset=True, by_alias=True, exclude_none=True), + self.model_dump(exclude_unset=True, by_alias=True, exclude_none=True), prefix_key=OCI_LABEL_PREFIX, ) return labels @@ -131,30 +128,30 @@ class LabelSchemaAnnotations(BaseModel): build_date: datetime vcs_ref: str vcs_url: AnyUrl - - class Config: - alias_generator = lambda field_name: field_name.replace("_", "-") - allow_population_by_field_name = True - extra = Extra.forbid + model_config = ConfigDict( + alias_generator=lambda field_name: field_name.replace("_", "-"), + populate_by_name=True, + extra="forbid", + ) @classmethod def create_from_env(cls) -> "LabelSchemaAnnotations": data = {} - for field_name in cls.__fields__: + for field_name in cls.model_fields: if value := os.environ.get(field_name.upper()): data[field_name] = value - return cls.parse_obj(data) + return cls.model_validate(data) def to_oci_data(self) -> dict[str, Any]: """Collects data that be converted to OCI labels. WARNING: label-schema has be deprecated in favor of OCI image specs """ - convertable_data = self.dict( + convertable_data = self.model_dump( include=set(_TO_OCI.keys()), exclude_unset=True, exclude_none=True ) assert set(convertable_data.keys()).issubset( # nosec - set(self.__fields__.keys()) + set(self.model_fields.keys()) ) # nosec return {_TO_OCI[key]: value for key, value in convertable_data.items()} diff --git a/packages/service-integration/src/service_integration/osparc_config.py b/packages/service-integration/src/service_integration/osparc_config.py index 1a340729e419..9382b98b447c 100644 --- a/packages/service-integration/src/service_integration/osparc_config.py +++ b/packages/service-integration/src/service_integration/osparc_config.py @@ -36,9 +36,14 @@ from_labels, to_labels, ) -from pydantic import NonNegativeInt, ValidationError -from pydantic.class_validators import root_validator, validator -from pydantic.config import Extra +from pydantic import ( + ConfigDict, + NonNegativeInt, + ValidationError, + ValidationInfo, + field_validator, + model_validator, +) from pydantic.fields import Field from pydantic.main import BaseModel @@ -67,7 +72,7 @@ class DockerComposeOverwriteConfig(ComposeSpecification): def create_default( cls, service_name: str | None = None ) -> "DockerComposeOverwriteConfig": - model: "DockerComposeOverwriteConfig" = cls.parse_obj( + model: "DockerComposeOverwriteConfig" = cls.model_validate( { "services": { service_name: { @@ -84,7 +89,7 @@ def create_default( def from_yaml(cls, path: Path) -> "DockerComposeOverwriteConfig": with path.open() as fh: data = yaml_safe_load(fh) - model: "DockerComposeOverwriteConfig" = cls.parse_obj(data) + model: "DockerComposeOverwriteConfig" = cls.model_validate(data) return model @@ -101,11 +106,11 @@ class MetadataConfig(ServiceMetaDataPublished): exclude=True, ) - @validator("contact") + @field_validator("contact") @classmethod - def _check_contact_in_authors(cls, v, values): + def _check_contact_in_authors(cls, v, info: ValidationInfo): """catalog service relies on contact and author to define access rights""" - authors_emails = {author.email for author in values["authors"]} + authors_emails = {author.email for author in info.data["authors"]} if v not in authors_emails: msg = "Contact {v} must be registered as an author" raise ValueError(msg) @@ -115,7 +120,7 @@ def _check_contact_in_authors(cls, v, values): def from_yaml(cls, path: Path) -> "MetadataConfig": with path.open() as fh: data = yaml_safe_load(fh) - model: "MetadataConfig" = cls.parse_obj(data) + model: "MetadataConfig" = cls.model_validate(data) return model @classmethod @@ -123,12 +128,12 @@ def from_labels_annotations(cls, labels: dict[str, str]) -> "MetadataConfig": data = from_labels( labels, prefix_key=OSPARC_LABEL_PREFIXES[0], trim_key_head=False ) - model: "MetadataConfig" = cls.parse_obj(data) + model: "MetadataConfig" = cls.model_validate(data) return model def to_labels_annotations(self) -> dict[str, str]: labels: dict[str, str] = to_labels( - self.dict(exclude_unset=True, by_alias=True, exclude_none=True), + self.model_dump(exclude_unset=True, by_alias=True, exclude_none=True), prefix_key=OSPARC_LABEL_PREFIXES[0], trim_key_head=False, ) @@ -175,7 +180,7 @@ class SettingsItem(BaseModel): description="The value of the service setting (shall follow Docker REST API scheme for services", ) - @validator("type_", pre=True) + @field_validator("type_", mode="before") @classmethod def ensure_backwards_compatible_setting_type(cls, v): if v == "resources": @@ -183,18 +188,16 @@ def ensure_backwards_compatible_setting_type(cls, v): return "Resources" return v - @validator("value", pre=True) + @field_validator("value", mode="before") @classmethod - def check_value_against_custom_types(cls, v, values): - if (type_ := values.get("type_")) and type_ == "ContainerSpec": - ContainerSpec.parse_obj(v) + def check_value_against_custom_types(cls, v, info: ValidationInfo): + if (type_ := info.data.get("type_")) and type_ == "ContainerSpec": + ContainerSpec.model_validate(v) return v class ValidatingDynamicSidecarServiceLabels(DynamicSidecarServiceLabels): - class Config: - extra = Extra.allow - allow_population_by_field_name = True + model_config = ConfigDict(extra="allow", populate_by_name=True) def _underscore_as_minus(field_name: str) -> str: @@ -225,13 +228,13 @@ class RuntimeConfig(BaseModel): settings: list[SettingsItem] = Field(default_factory=list) - @root_validator(pre=True) + @model_validator(mode="before") @classmethod def ensure_compatibility(cls, v): # NOTE: if changes are applied to `DynamicSidecarServiceLabels` # these are also validated when ooil runs. try: - ValidatingDynamicSidecarServiceLabels.parse_obj(v) + ValidatingDynamicSidecarServiceLabels.model_validate(v) except ValidationError: _logger.exception( "Could not validate %s via %s", @@ -242,25 +245,26 @@ def ensure_compatibility(cls, v): return v - class Config: - alias_generator = _underscore_as_minus - allow_population_by_field_name = True - extra = Extra.forbid + model_config = ConfigDict( + alias_generator=_underscore_as_minus, + populate_by_name=True, + extra="forbid", + ) @classmethod def from_yaml(cls, path: Path) -> "RuntimeConfig": with path.open() as fh: data = yaml_safe_load(fh) - return cls.parse_obj(data) + return cls.model_validate(data) @classmethod def from_labels_annotations(cls, labels: dict[str, str]) -> "RuntimeConfig": data = from_labels(labels, prefix_key=OSPARC_LABEL_PREFIXES[1]) - return cls.parse_obj(data) + return cls.model_validate(data) def to_labels_annotations(self) -> dict[str, str]: labels: dict[str, str] = to_labels( - self.dict(exclude_unset=True, by_alias=True, exclude_none=True), + self.model_dump(exclude_unset=True, by_alias=True, exclude_none=True), prefix_key=OSPARC_LABEL_PREFIXES[1], ) return labels diff --git a/packages/service-integration/src/service_integration/osparc_image_specs.py b/packages/service-integration/src/service_integration/osparc_image_specs.py index df97e7c18b1a..7f6dec6ca158 100644 --- a/packages/service-integration/src/service_integration/osparc_image_specs.py +++ b/packages/service-integration/src/service_integration/osparc_image_specs.py @@ -41,9 +41,9 @@ def create_image_spec( docker_compose_overwrite_cfg.services[service_name].build.labels = labels - overwrite_options = docker_compose_overwrite_cfg.services[service_name].build.dict( - exclude_none=True - ) + overwrite_options = docker_compose_overwrite_cfg.services[ + service_name + ].build.model_dump(exclude_none=True, serialize_as_any=True) build_spec = BuildItem(**overwrite_options) return ComposeSpecification( diff --git a/packages/service-integration/src/service_integration/settings.py b/packages/service-integration/src/service_integration/settings.py index 70c971c8db94..f8b977cc9a4b 100644 --- a/packages/service-integration/src/service_integration/settings.py +++ b/packages/service-integration/src/service_integration/settings.py @@ -1,4 +1,5 @@ -from pydantic import BaseModel, BaseSettings, Field, SecretStr +from pydantic import BaseModel, Field, SecretStr +from pydantic_settings import BaseSettings, SettingsConfigDict class Registry(BaseModel): @@ -26,9 +27,9 @@ class AppSettings(BaseSettings): COMPOSE_VERSION: str = Field( "3.7", description="version of the docker-compose spec" ) - - class Config: - env_file_encoding = "utf-8" + model_config = SettingsConfigDict( + env_file_encoding="utf-8", + ) # TODO: load from ~/.osparc/service-integration.json or env file # TODO: add access to secrets diff --git a/packages/service-integration/src/service_integration/versioning.py b/packages/service-integration/src/service_integration/versioning.py index 3ed56868e500..0d7685a818fe 100644 --- a/packages/service-integration/src/service_integration/versioning.py +++ b/packages/service-integration/src/service_integration/versioning.py @@ -1,15 +1,13 @@ -import re from datetime import datetime -from re import Pattern -from typing import Any, ClassVar +from typing import Annotated, TypeAlias from models_library.basic_regex import SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS from packaging.version import Version -from pydantic import BaseModel, ConstrainedStr, Field +from pydantic import BaseModel, ConfigDict, Field, StringConstraints - -class SemanticVersionStr(ConstrainedStr): - regex: Pattern[str] | None = re.compile(SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS) +SemanticVersionStr: TypeAlias = Annotated[ + str, StringConstraints(pattern=SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS) +] def bump_version_string(current_version: str, bump: str) -> str: @@ -52,8 +50,8 @@ class ExecutableVersionInfo(BaseModel): version: SemanticVersionStr released: datetime - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "display_name": "SEMCAD X", "display_version": "Matterhorn Student Edition 1", @@ -63,6 +61,7 @@ class Config: "released": "2021-11-19T14:58:45.900979", } } + ) class ServiceVersionInfo(BaseModel): @@ -72,11 +71,12 @@ class ServiceVersionInfo(BaseModel): ) released: datetime = Field(..., description="Publication/release date") - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "version": "1.0.0", # e.g. first time released as an osparc "integration_version": "2.1.0", "released": "2021-11-19T14:58:45.900979", } } + ) diff --git a/packages/service-integration/tests/test_command_compose.py b/packages/service-integration/tests/test_command_compose.py index 371d8a9dbdcd..50f8b5b67b45 100644 --- a/packages/service-integration/tests/test_command_compose.py +++ b/packages/service-integration/tests/test_command_compose.py @@ -39,7 +39,7 @@ def test_make_docker_compose_meta( assert target_compose_specs.exists() # valid compose specs - compose_cfg = ComposeSpecification.parse_obj( + compose_cfg = ComposeSpecification.model_validate( yaml.safe_load(target_compose_specs.read_text()) ) assert compose_cfg.services @@ -48,8 +48,8 @@ def test_make_docker_compose_meta( compose_labels = compose_cfg.services[metadata_cfg.service_name()].build.labels assert compose_labels - assert isinstance(compose_labels.__root__, dict) + assert isinstance(compose_labels.root, dict) assert ( - MetadataConfig.from_labels_annotations(compose_labels.__root__) == metadata_cfg + MetadataConfig.from_labels_annotations(compose_labels.root) == metadata_cfg ) diff --git a/packages/service-integration/tests/test_compose_spec_model.py b/packages/service-integration/tests/test_compose_spec_model.py index 63cd0924c993..416dfbb8eef0 100644 --- a/packages/service-integration/tests/test_compose_spec_model.py +++ b/packages/service-integration/tests/test_compose_spec_model.py @@ -9,7 +9,7 @@ def test_autogenerated_compose_spec_model(tests_data_dir: Path): docker_compose_path = tests_data_dir / "docker-compose-meta.yml" # tests if parses valid file - compose_spec = ComposeSpecification.parse_obj( + compose_spec = ComposeSpecification.model_validate( yaml.safe_load(docker_compose_path.read_text()) ) diff --git a/packages/service-integration/tests/test_oci_image_spec.py b/packages/service-integration/tests/test_oci_image_spec.py index ef2bd8b47d90..641594c99662 100644 --- a/packages/service-integration/tests/test_oci_image_spec.py +++ b/packages/service-integration/tests/test_oci_image_spec.py @@ -18,7 +18,7 @@ def test_label_schema_to_oci_conversion(monkeypatch): lsa = LabelSchemaAnnotations.create_from_env() - OciImageSpecAnnotations.parse_obj(lsa.to_oci_data()) + OciImageSpecAnnotations.model_validate(lsa.to_oci_data()) def test_create_annotations_from_metadata(tests_data_dir: Path): diff --git a/packages/service-integration/tests/test_osparc_config.py b/packages/service-integration/tests/test_osparc_config.py index e993bc25392c..9a5a8bd7a818 100644 --- a/packages/service-integration/tests/test_osparc_config.py +++ b/packages/service-integration/tests/test_osparc_config.py @@ -52,8 +52,8 @@ def test_load_from_labels( runtime_cfg = RuntimeConfig.from_labels_annotations(labels) assert runtime_cfg.callbacks_mapping is not None - print(meta_cfg.json(exclude_unset=True, indent=2)) - print(runtime_cfg.json(exclude_unset=True, indent=2)) + print(meta_cfg.model_dump_json(exclude_unset=True, indent=2)) + print(runtime_cfg.model_dump_json(exclude_unset=True, indent=2)) # create yamls from config for model in (runtime_cfg, meta_cfg): @@ -62,7 +62,7 @@ def test_load_from_labels( ) with open(config_path, "w") as fh: data = json.loads( - model.json(exclude_unset=True, by_alias=True, exclude_none=True) + model.model_dump_json(exclude_unset=True, by_alias=True, exclude_none=True) ) yaml.safe_dump(data, fh, sort_keys=False) @@ -72,7 +72,8 @@ def test_load_from_labels( @pytest.mark.parametrize( - "example_data", SimcoreServiceSettingLabelEntry.Config.schema_extra["examples"] + "example_data", + SimcoreServiceSettingLabelEntry.model_config["json_schema_extra"]["examples"], ) def test_settings_item_in_sync_with_service_settings_label( example_data: dict[str, Any] @@ -81,7 +82,7 @@ def test_settings_item_in_sync_with_service_settings_label( # First we parse with SimcoreServiceSettingLabelEntry since it also supports backwards compatibility # and will upgrade old version - example_model = SimcoreServiceSettingLabelEntry.parse_obj(example_data) + example_model = SimcoreServiceSettingLabelEntry.model_validate(example_data) # SettingsItem is exclusively for NEW labels, so it should not support backwards compatibility new_model = SettingsItem( @@ -91,4 +92,4 @@ def test_settings_item_in_sync_with_service_settings_label( ) # check back - SimcoreServiceSettingLabelEntry.parse_obj(new_model.dict(by_alias=True)) + SimcoreServiceSettingLabelEntry.model_validate(new_model.model_dump(by_alias=True)) diff --git a/packages/service-integration/tests/test_osparc_image_specs.py b/packages/service-integration/tests/test_osparc_image_specs.py index b482bc85a4c5..6bec87425ad2 100644 --- a/packages/service-integration/tests/test_osparc_image_specs.py +++ b/packages/service-integration/tests/test_osparc_image_specs.py @@ -58,8 +58,8 @@ def test_create_image_spec_impl(tests_data_dir: Path, settings: AppSettings): assert build_spec assert isinstance(build_spec, BaseModel) - print(build_spec.json(exclude_unset=True, indent=2)) - print(yaml.safe_dump(compose_spec.dict(exclude_unset=True), sort_keys=False)) + print(build_spec.model_dump_json(exclude_unset=True, indent=2)) + print(yaml.safe_dump(compose_spec.model_dump(exclude_unset=True), sort_keys=False)) def test_image_digest_is_not_a_label_annotation(tests_data_dir: Path): diff --git a/packages/service-integration/tests/test_osparc_runtime_specs.py b/packages/service-integration/tests/test_osparc_runtime_specs.py index 74d63e15e5b0..153c85d27c46 100644 --- a/packages/service-integration/tests/test_osparc_runtime_specs.py +++ b/packages/service-integration/tests/test_osparc_runtime_specs.py @@ -17,8 +17,8 @@ def test_create_runtime_spec_impl(tests_data_dir: Path): osparc_spec: dict = yaml.safe_load((tests_data_dir / "runtime.yml").read_text()) - pm_spec1 = PathMappingsLabel.parse_obj(osparc_spec["paths-mapping"]) - pm_spec2 = PathMappingsLabel.parse_obj( + pm_spec1 = PathMappingsLabel.model_validate(osparc_spec["paths-mapping"]) + pm_spec2 = PathMappingsLabel.model_validate( { "outputs_path": "/outputs", "inputs_path": "/inputs", @@ -58,12 +58,12 @@ def test_create_runtime_spec_impl(tests_data_dir: Path): # FIXME: ensure all sources are different! (e.g. a/b/c and z/c have the same name!) - print(Service(volumes=volumes).json(exclude_unset=True, indent=2)) + print(Service(volumes=volumes).model_dump_json(exclude_unset=True, indent=2)) # TODO: _auto_map_to_service(osparc_spec["settings"]) data = {} for obj in osparc_spec["settings"]: - item = SettingsItem.parse_obj(obj) + item = SettingsItem.model_validate(obj) if item.name == "resources": # https://docs.docker.com/compose/compose-file/compose-file-v3/#resources @@ -87,7 +87,7 @@ def test_create_runtime_spec_impl(tests_data_dir: Path): else: raise AssertionError(item) - print(Service(**data).json(exclude_unset=True, indent=2)) + print(Service(**data).model_dump_json(exclude_unset=True, indent=2)) def test_compatibility(): diff --git a/packages/service-library/requirements/_base.in b/packages/service-library/requirements/_base.in index aa776fedb15a..018f64574f29 100644 --- a/packages/service-library/requirements/_base.in +++ b/packages/service-library/requirements/_base.in @@ -5,6 +5,7 @@ --constraint ./constraints.txt # intra-repo required dependencies +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in diff --git a/packages/service-library/requirements/_base.txt b/packages/service-library/requirements/_base.txt index 2b33eb1efade..b97b15d09671 100644 --- a/packages/service-library/requirements/_base.txt +++ b/packages/service-library/requirements/_base.txt @@ -20,6 +20,8 @@ aiormq==6.8.1 # via aio-pika aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.6.0 # via # fast-depends @@ -140,7 +142,7 @@ protobuf==4.25.5 # opentelemetry-proto psutil==6.0.0 # via -r requirements/_base.in -pydantic==1.10.18 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -149,12 +151,24 @@ pydantic==1.10.18 # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fast-depends + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via -r requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.7.3 # via -r requirements/_base.in python-dateutil==2.9.0.post0 # via arrow +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -208,6 +222,7 @@ typing-extensions==4.12.2 # faststream # opentelemetry-sdk # pydantic + # pydantic-core # typer urllib3==2.2.3 # via diff --git a/packages/service-library/requirements/_fastapi.txt b/packages/service-library/requirements/_fastapi.txt index 8a3aed376009..cc0209d56766 100644 --- a/packages/service-library/requirements/_fastapi.txt +++ b/packages/service-library/requirements/_fastapi.txt @@ -1,3 +1,5 @@ +annotated-types==0.7.0 + # via pydantic anyio==4.6.0 # via # httpx @@ -17,11 +19,8 @@ deprecated==1.2.14 # via # opentelemetry-api # opentelemetry-semantic-conventions -fastapi==0.99.1 +fastapi==0.115.0 # via - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/_fastapi.in # prometheus-fastapi-instrumentator h11==0.14.0 @@ -70,20 +69,21 @@ prometheus-client==0.21.0 # prometheus-fastapi-instrumentator prometheus-fastapi-instrumentator==6.1.0 # via -r requirements/_fastapi.in -pydantic==1.10.18 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../requirements/constraints.txt # fastapi +pydantic-core==2.23.4 + # via pydantic setuptools==75.1.0 # via opentelemetry-instrumentation sniffio==1.3.1 # via # anyio # httpx -starlette==0.27.0 +starlette==0.38.5 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -93,6 +93,7 @@ typing-extensions==4.12.2 # via # fastapi # pydantic + # pydantic-core uvicorn==0.30.6 # via -r requirements/_fastapi.in wrapt==1.16.0 diff --git a/packages/service-library/requirements/_test.txt b/packages/service-library/requirements/_test.txt index f1679efbfdd3..b4a044b367a5 100644 --- a/packages/service-library/requirements/_test.txt +++ b/packages/service-library/requirements/_test.txt @@ -190,7 +190,9 @@ python-dateutil==2.9.0.post0 # -c requirements/_base.txt # faker python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt diff --git a/packages/service-library/requirements/ci.txt b/packages/service-library/requirements/ci.txt index 6d4323d606ef..2c748b3f8608 100644 --- a/packages/service-library/requirements/ci.txt +++ b/packages/service-library/requirements/ci.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +simcore-common-library @ ../common-library simcore-models-library @ ../models-library simcore-settings-library @ ../settings-library/ pytest-simcore @ ../pytest-simcore diff --git a/packages/service-library/requirements/ci[aiohttp].txt b/packages/service-library/requirements/ci[aiohttp].txt index 721950755b5f..ee41e3b69a60 100644 --- a/packages/service-library/requirements/ci[aiohttp].txt +++ b/packages/service-library/requirements/ci[aiohttp].txt @@ -12,6 +12,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../common-library simcore-models-library @ ../models-library simcore-settings-library @ ../settings-library/ pytest-simcore @ ../pytest-simcore diff --git a/packages/service-library/requirements/ci[all].txt b/packages/service-library/requirements/ci[all].txt index f7610e97111a..f43ee95908f9 100644 --- a/packages/service-library/requirements/ci[all].txt +++ b/packages/service-library/requirements/ci[all].txt @@ -13,6 +13,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../common-library simcore-models-library @ ../models-library simcore-settings-library @ ../settings-library/ pytest-simcore @ ../pytest-simcore diff --git a/packages/service-library/requirements/ci[fastapi].txt b/packages/service-library/requirements/ci[fastapi].txt index c2c337fd4c05..db051f4ef730 100644 --- a/packages/service-library/requirements/ci[fastapi].txt +++ b/packages/service-library/requirements/ci[fastapi].txt @@ -12,6 +12,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../common-library simcore-models-library @ ../models-library simcore-settings-library @ ../settings-library/ pytest-simcore @ ../pytest-simcore diff --git a/packages/service-library/requirements/dev.txt b/packages/service-library/requirements/dev.txt index b4da8c10382b..f814830c46b0 100644 --- a/packages/service-library/requirements/dev.txt +++ b/packages/service-library/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library --editable ../models-library --editable ../settings-library --editable ../pytest-simcore diff --git a/packages/service-library/requirements/dev[aiohttp].txt b/packages/service-library/requirements/dev[aiohttp].txt index 5e0ae847c641..87748e35d29a 100644 --- a/packages/service-library/requirements/dev[aiohttp].txt +++ b/packages/service-library/requirements/dev[aiohttp].txt @@ -13,6 +13,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library --editable ../models-library/ --editable ../settings-library/ --editable ../pytest-simcore/ diff --git a/packages/service-library/requirements/dev[all].txt b/packages/service-library/requirements/dev[all].txt index b372254b3255..8b23b6105c5e 100644 --- a/packages/service-library/requirements/dev[all].txt +++ b/packages/service-library/requirements/dev[all].txt @@ -14,6 +14,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library --editable ../models-library/ --editable ../settings-library/ --editable ../pytest-simcore/ diff --git a/packages/service-library/requirements/dev[fastapi].txt b/packages/service-library/requirements/dev[fastapi].txt index caea1c80fd54..d66370d79041 100644 --- a/packages/service-library/requirements/dev[fastapi].txt +++ b/packages/service-library/requirements/dev[fastapi].txt @@ -13,6 +13,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library --editable ../models-library/ --editable ../settings-library/ --editable ../pytest-simcore/ diff --git a/packages/service-library/src/servicelib/aiohttp/application_setup.py b/packages/service-library/src/servicelib/aiohttp/application_setup.py index 4fae3acc09f9..4da40aa01825 100644 --- a/packages/service-library/src/servicelib/aiohttp/application_setup.py +++ b/packages/service-library/src/servicelib/aiohttp/application_setup.py @@ -8,7 +8,7 @@ import arrow from aiohttp import web -from pydantic import parse_obj_as +from pydantic import TypeAdapter from .application_keys import APP_CONFIG_KEY, APP_SETTINGS_KEY @@ -94,7 +94,9 @@ def _is_addon_enabled_from_config( for part in parts: if section and part == "enabled": # if section exists, no need to explicitly enable it - return parse_obj_as(bool, searched_config.get(part, True)) + return TypeAdapter(bool).validate_python( + searched_config.get(part, True) + ) searched_config = searched_config[part] except KeyError as ee: diff --git a/packages/service-library/src/servicelib/aiohttp/docker_utils.py b/packages/service-library/src/servicelib/aiohttp/docker_utils.py index 636b3492616c..8e9393e1e69c 100644 --- a/packages/service-library/src/servicelib/aiohttp/docker_utils.py +++ b/packages/service-library/src/servicelib/aiohttp/docker_utils.py @@ -2,7 +2,7 @@ import aiohttp from models_library.docker import DockerGenericTag -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError from settings_library.docker_registry import RegistrySettings from yarl import URL @@ -68,9 +68,9 @@ async def retrieve_image_layer_information( # if the image has multiple architectures json_response = await response.json() try: - multi_arch_manifests = parse_obj_as( - DockerImageMultiArchManifestsV2, json_response - ) + multi_arch_manifests = TypeAdapter( + DockerImageMultiArchManifestsV2 + ).validate_python(json_response) # find the correct platform digest = "" for manifest in multi_arch_manifests.manifests: @@ -89,8 +89,12 @@ async def retrieve_image_layer_information( response.raise_for_status() assert response.status == status.HTTP_200_OK # nosec json_response = await response.json() - return parse_obj_as(DockerImageManifestsV2, json_response) + return TypeAdapter(DockerImageManifestsV2).validate_python( + json_response + ) except ValidationError: - return parse_obj_as(DockerImageManifestsV2, json_response) + return TypeAdapter(DockerImageManifestsV2).validate_python( + json_response + ) return None diff --git a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py index df81371cbb89..47640c491f2c 100644 --- a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py +++ b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py @@ -5,8 +5,9 @@ from typing import Any from aiohttp import web +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy from models_library.utils.json_serialization import json_dumps -from pydantic import AnyHttpUrl, PositiveFloat +from pydantic import PositiveFloat, TypeAdapter from ...aiohttp import status from ...long_running_tasks._models import TaskGet @@ -67,17 +68,14 @@ async def start_long_running_task( ip_addr, port = request_.transport.get_extra_info( "sockname" ) # https://docs.python.org/3/library/asyncio-protocol.html#asyncio.BaseTransport.get_extra_info - status_url = AnyHttpUrl( - url=f"http://{ip_addr}:{port}{request_.app.router['get_task_status'].url_for(task_id=task_id)}", - scheme="http", + status_url = TypeAdapter(AnyHttpUrlLegacy).validate_python( + f"http://{ip_addr}:{port}{request_.app.router['get_task_status'].url_for(task_id=task_id)}" # NOSONAR ) - result_url = AnyHttpUrl( - url=f"http://{ip_addr}:{port}{request_.app.router['get_task_result'].url_for(task_id=task_id)}", - scheme="http", + result_url = TypeAdapter(AnyHttpUrlLegacy).validate_python( + f"http://{ip_addr}:{port}{request_.app.router['get_task_result'].url_for(task_id=task_id)}" # NOSONAR ) - abort_url = AnyHttpUrl( - url=f"http://{ip_addr}:{port}{request_.app.router['cancel_and_delete_task'].url_for(task_id=task_id)}", - scheme="http", + abort_url = TypeAdapter(AnyHttpUrlLegacy).validate_python( + f"http://{ip_addr}:{port}{request_.app.router['cancel_and_delete_task'].url_for(task_id=task_id)}" # NOSONAR ) task_get = TaskGet( task_id=task_id, diff --git a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py index c99cb1ce6713..04071d5d07c8 100644 --- a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py +++ b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py @@ -35,7 +35,7 @@ async def _start(session: ClientSession, url: URL, json: RequestBody | None) -> data, error = unwrap_envelope(await response.json()) assert not error # nosec assert data is not None # nosec - return TaskGet.parse_obj(data) + return TaskGet.model_validate(data) @retry(**_DEFAULT_AIOHTTP_RETRY_POLICY) @@ -57,7 +57,7 @@ async def _wait_for_completion( data, error = unwrap_envelope(await response.json()) assert not error # nosec assert data is not None # nosec - task_status = TaskStatus.parse_obj(data) + task_status = TaskStatus.model_validate(data) yield task_status.task_progress if not task_status.done: await asyncio.sleep( diff --git a/packages/service-library/src/servicelib/aiohttp/requests_validation.py b/packages/service-library/src/servicelib/aiohttp/requests_validation.py index 085243c5d26d..ce4a97d54a1b 100644 --- a/packages/service-library/src/servicelib/aiohttp/requests_validation.py +++ b/packages/service-library/src/servicelib/aiohttp/requests_validation.py @@ -14,7 +14,7 @@ from aiohttp import web from models_library.utils.json_serialization import json_dumps -from pydantic import BaseModel, Extra, ValidationError, parse_obj_as +from pydantic import BaseModel, ConfigDict, TypeAdapter, ValidationError from ..mimetype_constants import MIMETYPE_APPLICATION_JSON from . import status @@ -31,8 +31,9 @@ class RequestParams(BaseModel): class StrictRequestParams(BaseModel): """Use a base class for context, path and query parameters""" - class Config: - extra = Extra.forbid # strict + model_config = ConfigDict( + extra="forbid", + ) @contextmanager @@ -139,7 +140,7 @@ def parse_request_path_parameters_as( use_error_v1=use_enveloped_error_v1, ): data = dict(request.match_info) - return parameters_schema_cls.parse_obj(data) + return parameters_schema_cls.model_validate(data) def parse_request_query_parameters_as( @@ -171,8 +172,8 @@ def parse_request_query_parameters_as( data = dict(request.query) if hasattr(parameters_schema_cls, "parse_obj"): - return parameters_schema_cls.parse_obj(data) - model: ModelClass = parse_obj_as(parameters_schema_cls, data) + return parameters_schema_cls.model_validate(data) + model: ModelClass = TypeAdapter(parameters_schema_cls).validate_python(data) return model @@ -188,7 +189,7 @@ def parse_request_headers_as( use_error_v1=use_enveloped_error_v1, ): data = dict(request.headers) - return parameters_schema_cls.parse_obj(data) + return parameters_schema_cls.model_validate(data) async def parse_request_body_as( @@ -227,7 +228,7 @@ async def parse_request_body_as( # NOTE: model_schema can be 'list[T]' or 'dict[T]' which raise TypeError # with issubclass(model_schema, BaseModel) assert issubclass(model_schema_cls, BaseModel) # nosec - return model_schema_cls.parse_obj(body) # type: ignore [return-value] + return model_schema_cls.model_validate(body) # type: ignore [return-value] # used for model_schema like 'list[T]' or 'dict[T]' - return parse_obj_as(model_schema_cls, body) + return TypeAdapter(model_schema_cls).validate_python(body) # type: ignore[no-any-return] diff --git a/packages/service-library/src/servicelib/background_task.py b/packages/service-library/src/servicelib/background_task.py index e7a4c665c49f..b1eba9bc54bc 100644 --- a/packages/service-library/src/servicelib/background_task.py +++ b/packages/service-library/src/servicelib/background_task.py @@ -5,7 +5,7 @@ from collections.abc import AsyncIterator, Awaitable, Callable from typing import Final -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin from tenacity import TryAgain from tenacity.asyncio import AsyncRetrying from tenacity.stop import stop_after_attempt @@ -21,7 +21,7 @@ _MAX_TASK_CANCELLATION_ATTEMPTS: Final[int] = 3 -class PeriodicTaskCancellationError(PydanticErrorMixin, Exception): +class PeriodicTaskCancellationError(OsparcErrorMixin, Exception): msg_template: str = "Could not cancel task '{task_name}'" diff --git a/packages/service-library/src/servicelib/docker_utils.py b/packages/service-library/src/servicelib/docker_utils.py index ff5ea9fcc7e9..2ce1fab2fb51 100644 --- a/packages/service-library/src/servicelib/docker_utils.py +++ b/packages/service-library/src/servicelib/docker_utils.py @@ -11,7 +11,7 @@ from models_library.docker import DockerGenericTag from models_library.generated_models.docker_rest_api import ProgressDetail from models_library.utils.change_case import snake_to_camel -from pydantic import BaseModel, ByteSize, ValidationError, parse_obj_as +from pydantic import BaseModel, ByteSize, ConfigDict, TypeAdapter, ValidationError from settings_library.docker_registry import RegistrySettings from yarl import URL @@ -39,11 +39,11 @@ class DockerLayerSizeV2(BaseModel): media_type: str size: ByteSize digest: str - - class Config: - frozen = True - alias_generator = snake_to_camel - allow_population_by_field_name = True + model_config = ConfigDict( + frozen=True, + alias_generator=snake_to_camel, + populate_by_name=True, + ) class DockerImageManifestsV2(BaseModel): @@ -51,39 +51,41 @@ class DockerImageManifestsV2(BaseModel): media_type: str config: DockerLayerSizeV2 layers: list[DockerLayerSizeV2] - - class Config: - keep_untouched = (cached_property,) - frozen = True - alias_generator = snake_to_camel - allow_population_by_field_name = True + model_config = ConfigDict( + ignored_types=(cached_property,), + frozen=True, + alias_generator=snake_to_camel, + populate_by_name=True, + ) @cached_property def layers_total_size(self) -> ByteSize: - return parse_obj_as(ByteSize, sum(layer.size for layer in self.layers)) + return TypeAdapter(ByteSize).validate_python( + sum(layer.size for layer in self.layers) + ) class DockerImageMultiArchManifestsV2(BaseModel): schema_version: Literal[2] media_type: Literal["application/vnd.oci.image.index.v1+json"] manifests: list[dict[str, Any]] - - class Config: - frozen = True - alias_generator = snake_to_camel - allow_population_by_field_name = True + model_config = ConfigDict( + frozen=True, + alias_generator=snake_to_camel, + populate_by_name=True, + ) class _DockerPullImage(BaseModel): status: str - id: str | None - progress_detail: ProgressDetail | None - progress: str | None - - class Config: - frozen = True - alias_generator = snake_to_camel - allow_population_by_field_name = True + id: str | None = None + progress_detail: ProgressDetail | None = None + progress: str | None = None + model_config = ConfigDict( + frozen=True, + alias_generator=snake_to_camel, + populate_by_name=True, + ) DOCKER_HUB_HOST: Final[str] = "registry-1.docker.io" @@ -248,7 +250,9 @@ async def pull_image( image, stream=True, auth=registry_auth ): try: - parsed_progress = parse_obj_as(_DockerPullImage, pull_progress) + parsed_progress = TypeAdapter(_DockerPullImage).validate_python( + pull_progress + ) except ValidationError: _logger.exception( "Unexpected error while validating '%s'. " diff --git a/packages/service-library/src/servicelib/fastapi/docker_utils.py b/packages/service-library/src/servicelib/fastapi/docker_utils.py index 1c71c190a472..420c1418873c 100644 --- a/packages/service-library/src/servicelib/fastapi/docker_utils.py +++ b/packages/service-library/src/servicelib/fastapi/docker_utils.py @@ -5,7 +5,7 @@ import httpx from models_library.basic_types import IDStr from models_library.docker import DockerGenericTag -from pydantic import ByteSize, ValidationError, parse_obj_as +from pydantic import ByteSize, TypeAdapter, ValidationError from settings_library.docker_registry import RegistrySettings from yarl import URL @@ -22,6 +22,10 @@ from ..logging_utils import log_catch from ..progress_bar import AsyncReportCB, ProgressBarData +_DEFAULT_MIN_IMAGE_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python( + "200MiB" +) + _logger = logging.getLogger(__name__) @@ -72,9 +76,9 @@ async def retrieve_image_layer_information( # if the image has multiple architectures json_response = response.json() try: - multi_arch_manifests = parse_obj_as( - DockerImageMultiArchManifestsV2, json_response - ) + multi_arch_manifests = TypeAdapter( + DockerImageMultiArchManifestsV2 + ).validate_python(json_response) # find the correct platform digest = "" for manifest in multi_arch_manifests.manifests: @@ -93,16 +97,17 @@ async def retrieve_image_layer_information( response.raise_for_status() assert response.status_code == status.HTTP_200_OK # nosec json_response = response.json() - return parse_obj_as(DockerImageManifestsV2, json_response) + return TypeAdapter(DockerImageManifestsV2).validate_python( + json_response + ) except ValidationError: - return parse_obj_as(DockerImageManifestsV2, json_response) + return TypeAdapter(DockerImageManifestsV2).validate_python( + json_response + ) return None -_DEFAULT_MIN_IMAGE_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "200MiB") - - async def pull_images( images: set[DockerGenericTag], registry_settings: RegistrySettings, diff --git a/packages/service-library/src/servicelib/fastapi/errors.py b/packages/service-library/src/servicelib/fastapi/errors.py index 9eebef846377..139ed573fbe2 100644 --- a/packages/service-library/src/servicelib/fastapi/errors.py +++ b/packages/service-library/src/servicelib/fastapi/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class ApplicationRuntimeError(PydanticErrorMixin, RuntimeError): +class ApplicationRuntimeError(OsparcErrorMixin, RuntimeError): pass diff --git a/packages/service-library/src/servicelib/fastapi/exceptions_utils.py b/packages/service-library/src/servicelib/fastapi/exceptions_utils.py index d55fc0e0a684..bd5f18448b1e 100644 --- a/packages/service-library/src/servicelib/fastapi/exceptions_utils.py +++ b/packages/service-library/src/servicelib/fastapi/exceptions_utils.py @@ -15,9 +15,11 @@ async def http_exception_as_json_response( - request: Request, exc: HTTPException + request: Request, exc: Exception ) -> JSONResponse: + assert isinstance(exc, HTTPException) # nosec assert request # nosec + error = DefaultApiError.from_status_code(exc.status_code) error_detail = error.detail or "" diff --git a/packages/service-library/src/servicelib/fastapi/http_client_thin.py b/packages/service-library/src/servicelib/fastapi/http_client_thin.py index e00e0d636a2b..c113321a488f 100644 --- a/packages/service-library/src/servicelib/fastapi/http_client_thin.py +++ b/packages/service-library/src/servicelib/fastapi/http_client_thin.py @@ -7,7 +7,7 @@ from httpx import AsyncClient, ConnectError, HTTPError, PoolTimeout, Response from httpx._types import TimeoutTypes, URLTypes -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin from tenacity import RetryCallState from tenacity.asyncio import AsyncRetrying from tenacity.before_sleep import before_sleep_log @@ -30,7 +30,7 @@ """ -class BaseClientError(PydanticErrorMixin, Exception): +class BaseClientError(OsparcErrorMixin, Exception): """Used as based for all the raised errors""" msg_template: str = "{message}" diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py index a2dda66735aa..3aa82e76a46c 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py @@ -4,9 +4,10 @@ import warnings from typing import Any, Awaitable, Callable, Final +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy from fastapi import FastAPI, status from httpx import AsyncClient, HTTPError -from pydantic import AnyHttpUrl, PositiveFloat, parse_obj_as +from pydantic import PositiveFloat, TypeAdapter from tenacity import RetryCallState from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type @@ -23,6 +24,7 @@ DEFAULT_HTTP_REQUESTS_TIMEOUT: Final[PositiveFloat] = 15 + logger = logging.getLogger(__name__) @@ -113,7 +115,7 @@ class Client: status, result and/or cancel of a long running task. """ - def __init__(self, app: FastAPI, async_client: AsyncClient, base_url: AnyHttpUrl): + def __init__(self, app: FastAPI, async_client: AsyncClient, base_url: str): """ `app`: used byt the `Client` to recover the `ClientConfiguration` `async_client`: an AsyncClient instance used by `Client` @@ -128,12 +130,9 @@ def _client_configuration(self) -> ClientConfiguration: output: ClientConfiguration = self.app.state.long_running_client_configuration return output - def _get_url(self, path: str) -> AnyHttpUrl: - output: AnyHttpUrl = parse_obj_as( - AnyHttpUrl, - f"{self._base_url}{self._client_configuration.router_prefix}{path}", - ) - return output + def _get_url(self, path: str) -> str: + url = f"{self._base_url}{self._client_configuration.router_prefix}{path}" + return f"{TypeAdapter(AnyHttpUrlLegacy).validate_python(url)}" @retry_on_http_errors async def get_task_status( @@ -152,7 +151,7 @@ async def get_task_status( body=result.text, ) - return TaskStatus.parse_obj(result.json()) + return TaskStatus.model_validate(result.json()) @retry_on_http_errors async def get_task_result( @@ -171,7 +170,7 @@ async def get_task_result( body=result.text, ) - task_result = TaskResult.parse_obj(result.json()) + task_result = TaskResult.model_validate(result.json()) if task_result.error is not None: raise TaskClientResultError(message=task_result.error) return task_result.result diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py index 7cb61f29140b..2c0015251736 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py @@ -96,7 +96,7 @@ async def periodic_task_result( async def _status_update() -> TaskStatus: task_status: TaskStatus = await client.get_task_status(task_id) - logger.debug("Task status %s", task_status.json()) + logger.debug("Task status %s", task_status.model_dump_json()) await progress_manager.update( task_id=task_id, message=task_status.task_progress.message, diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_server.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_server.py index c5d7429f01ab..e8306b6d1874 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_server.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_server.py @@ -50,4 +50,4 @@ async def on_shutdown() -> None: # add error handlers # NOTE: Exception handler can not be added during the on_startup script, otherwise not working correctly - app.add_exception_handler(BaseLongRunningError, base_long_running_error_handler) + app.add_exception_handler(BaseLongRunningError, base_long_running_error_handler) # type: ignore[arg-type] diff --git a/packages/service-library/src/servicelib/file_utils.py b/packages/service-library/src/servicelib/file_utils.py index c90468cba2ab..a52854c26e7f 100644 --- a/packages/service-library/src/servicelib/file_utils.py +++ b/packages/service-library/src/servicelib/file_utils.py @@ -10,9 +10,9 @@ # https://docs.python.org/3/library/os.html#os.remove from aiofiles.os import remove from aiofiles.os import wrap as sync_to_async -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter -CHUNK_4KB: Final[ByteSize] = parse_obj_as(ByteSize, "4kb") # 4K blocks +CHUNK_4KB: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("4kb") # 4K blocks class AsyncStream(Protocol): diff --git a/packages/service-library/src/servicelib/long_running_tasks/_errors.py b/packages/service-library/src/servicelib/long_running_tasks/_errors.py index 73722f746ac0..44dc03157f20 100644 --- a/packages/service-library/src/servicelib/long_running_tasks/_errors.py +++ b/packages/service-library/src/servicelib/long_running_tasks/_errors.py @@ -1,10 +1,10 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class BaseLongRunningError(PydanticErrorMixin, Exception): +class BaseLongRunningError(OsparcErrorMixin, Exception): """base exception for this module""" - code: str = "long_running_task.base_long_running_error" + code: str = "long_running_task.base_long_running_error" # type: ignore[assignment] class TaskAlreadyRunningError(BaseLongRunningError): diff --git a/packages/service-library/src/servicelib/long_running_tasks/_models.py b/packages/service-library/src/servicelib/long_running_tasks/_models.py index b211ca29fdc5..fc240160b819 100644 --- a/packages/service-library/src/servicelib/long_running_tasks/_models.py +++ b/packages/service-library/src/servicelib/long_running_tasks/_models.py @@ -15,7 +15,7 @@ TaskResult, TaskStatus, ) -from pydantic import BaseModel, Field, PositiveFloat +from pydantic import BaseModel, ConfigDict, Field, PositiveFloat TaskName: TypeAlias = str @@ -46,9 +46,9 @@ class TrackedTask(BaseModel): "polled by the client who created it" ), ) - - class Config: - arbitrary_types_allowed = True + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) class ClientConfiguration(BaseModel): diff --git a/packages/service-library/src/servicelib/long_running_tasks/_task.py b/packages/service-library/src/servicelib/long_running_tasks/_task.py index 88960cb6327a..641e78a96a86 100644 --- a/packages/service-library/src/servicelib/long_running_tasks/_task.py +++ b/packages/service-library/src/servicelib/long_running_tasks/_task.py @@ -123,7 +123,9 @@ async def _stale_tasks_monitor_worker(self) -> None: logger.warning( "Removing stale task '%s' with status '%s'", task_id, - self.get_task_status(task_id, with_task_context=None).json(), + self.get_task_status( + task_id, with_task_context=None + ).model_dump_json(), ) await self.remove_task( task_id, with_task_context=None, reraise_errors=False @@ -210,7 +212,7 @@ def get_task_status( task = tracked_task.task done = task.done() - return TaskStatus.parse_obj( + return TaskStatus.model_validate( { "task_progress": tracked_task.task_progress, "done": done, diff --git a/packages/service-library/src/servicelib/progress_bar.py b/packages/service-library/src/servicelib/progress_bar.py index 782f89ba550c..bf70c0c3e889 100644 --- a/packages/service-library/src/servicelib/progress_bar.py +++ b/packages/service-library/src/servicelib/progress_bar.py @@ -10,7 +10,7 @@ ProgressStructuredMessage, ProgressUnit, ) -from pydantic import parse_obj_as +from pydantic import TypeAdapter from .logging_utils import log_catch @@ -95,7 +95,7 @@ async def main_fct(): def __post_init__(self) -> None: if self.progress_unit is not None: - parse_obj_as(ProgressUnit, self.progress_unit) # type: ignore[arg-type] # mypy does not like Literal with parse_obj_as + TypeAdapter(ProgressUnit).validate_python(self.progress_unit) self._continuous_value_lock = asyncio.Lock() self.num_steps = max(1, self.num_steps) if self.step_weights: diff --git a/packages/service-library/src/servicelib/rabbitmq/_errors.py b/packages/service-library/src/servicelib/rabbitmq/_errors.py index 0e3efbf3a114..c105c2b8ff34 100644 --- a/packages/service-library/src/servicelib/rabbitmq/_errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/_errors.py @@ -1,21 +1,21 @@ from typing import Final -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin _ERROR_PREFIX: Final[str] = "rabbitmq_error" -class BaseRPCError(PydanticErrorMixin, RuntimeError): +class BaseRPCError(OsparcErrorMixin, RuntimeError): ... class RPCNotInitializedError(BaseRPCError): - code = f"{_ERROR_PREFIX}.not_started" + code = f"{_ERROR_PREFIX}.not_started" # type: ignore[assignment] msg_template = "Please check that the RabbitMQ RPC backend was initialized!" class RemoteMethodNotRegisteredError(BaseRPCError): - code = f"{_ERROR_PREFIX}.remote_not_registered" + code = f"{_ERROR_PREFIX}.remote_not_registered" # type: ignore[assignment] msg_template = ( "Could not find a remote method named: '{method_name}'. " "Message from remote server was returned: {incoming_message}. " diff --git a/packages/service-library/src/servicelib/rabbitmq/_models.py b/packages/service-library/src/servicelib/rabbitmq/_models.py index 565447072fac..e48e4bb13aab 100644 --- a/packages/service-library/src/servicelib/rabbitmq/_models.py +++ b/packages/service-library/src/servicelib/rabbitmq/_models.py @@ -2,12 +2,13 @@ from collections.abc import Awaitable, Callable from typing import Any, Protocol +from models_library.basic_types import ConstrainedStr from models_library.rabbitmq_basic_types import ( REGEX_RABBIT_QUEUE_ALLOWED_SYMBOLS, RPCMethodName, RPCNamespace, ) -from pydantic import ConstrainedStr, parse_obj_as +from pydantic import TypeAdapter MessageHandler = Callable[[Any], Awaitable[bool]] @@ -23,11 +24,11 @@ def routing_key(self) -> str | None: class RPCNamespacedMethodName(ConstrainedStr): min_length: int = 1 max_length: int = 255 - regex: re.Pattern[str] | None = re.compile(REGEX_RABBIT_QUEUE_ALLOWED_SYMBOLS) + pattern: str = REGEX_RABBIT_QUEUE_ALLOWED_SYMBOLS @classmethod def from_namespace_and_method( cls, namespace: RPCNamespace, method_name: RPCMethodName ) -> "RPCNamespacedMethodName": namespaced_method_name = f"{namespace}.{method_name}" - return parse_obj_as(cls, namespaced_method_name) + return TypeAdapter(cls).validate_python(namespaced_method_name) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/errors.py index ae21b8f09a70..b297004e2837 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class BaseAgentRPCError(PydanticErrorMixin, Exception): +class BaseAgentRPCError(OsparcErrorMixin, Exception): ... diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py index 65c403853eab..d278bb350bae 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py @@ -1,11 +1,8 @@ -from typing import Any - -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class CatalogApiBaseError(OsparcErrorMixin, Exception): - def __init__(self, **ctx: Any) -> None: - super().__init__(**ctx) + pass class CatalogItemNotFoundError(CatalogApiBaseError): diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/services.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/services.py index 5e7595ddea94..1c168a6d1b13 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/services.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/services.py @@ -16,7 +16,7 @@ ) from models_library.services_types import ServiceKey, ServiceVersion from models_library.users import UserID -from pydantic import NonNegativeInt, parse_obj_as, validate_arguments +from pydantic import NonNegativeInt, TypeAdapter, validate_call from servicelib.logging_utils import log_decorator from servicelib.rabbitmq._constants import RPC_REQUEST_DEFAULT_TIMEOUT_S @@ -40,7 +40,7 @@ async def list_services_paginated( # pylint: disable=too-many-arguments CatalogForbiddenError: no access-rights to list services """ - @validate_arguments() + @validate_call() async def _call( product_name: ProductName, user_id: UserID, @@ -49,7 +49,7 @@ async def _call( ): return await rpc_client.request( CATALOG_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "list_services_paginated"), + TypeAdapter(RPCMethodName).validate_python("list_services_paginated"), product_name=product_name, user_id=user_id, limit=limit, @@ -60,7 +60,9 @@ async def _call( result = await _call( product_name=product_name, user_id=user_id, limit=limit, offset=offset ) - assert parse_obj_as(PageRpc[ServiceGetV2], result) is not None # nosec + assert ( + TypeAdapter(PageRpc[ServiceGetV2]).validate_python(result) is not None + ) # nosec return cast(PageRpc[ServiceGetV2], result) @@ -80,7 +82,7 @@ async def get_service( CatalogForbiddenError: not access rights to read this service """ - @validate_arguments() + @validate_call() async def _call( product_name: ProductName, user_id: UserID, @@ -89,7 +91,7 @@ async def _call( ) -> Any: return await rpc_client.request( CATALOG_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_service"), + TypeAdapter(RPCMethodName).validate_python("get_service"), product_name=product_name, user_id=user_id, service_key=service_key, @@ -103,7 +105,7 @@ async def _call( service_key=service_key, service_version=service_version, ) - assert parse_obj_as(ServiceGetV2, result) is not None # nosec + assert TypeAdapter(ServiceGetV2).validate_python(result) is not None # nosec return cast(ServiceGetV2, result) @@ -125,7 +127,7 @@ async def update_service( CatalogForbiddenError: not access rights to read this service """ - @validate_arguments() + @validate_call() async def _call( product_name: ProductName, user_id: UserID, @@ -135,7 +137,7 @@ async def _call( ): return await rpc_client.request( CATALOG_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "update_service"), + TypeAdapter(RPCMethodName).validate_python("update_service"), product_name=product_name, user_id=user_id, service_key=service_key, @@ -150,7 +152,7 @@ async def _call( service_version=service_version, update=update, ) - assert parse_obj_as(ServiceGetV2, result) is not None # nosec + assert TypeAdapter(ServiceGetV2).validate_python(result) is not None # nosec return cast(ServiceGetV2, result) @@ -170,7 +172,7 @@ async def check_for_service( CatalogForbiddenError: not access rights to read this service """ - @validate_arguments() + @validate_call() async def _call( product_name: ProductName, user_id: UserID, @@ -179,7 +181,7 @@ async def _call( ): return await rpc_client.request( CATALOG_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "check_for_service"), + TypeAdapter(RPCMethodName).validate_python("check_for_service"), product_name=product_name, user_id=user_id, service_key=service_key, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py index 5e104db333cf..6d7bf2a722c7 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class BaseDynamicSchedulerRPCError(PydanticErrorMixin, Exception): +class BaseDynamicSchedulerRPCError(OsparcErrorMixin, Exception): ... diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py index 9da2dad425e2..3dcc9ed502fa 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py @@ -10,7 +10,7 @@ from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle from models_library.projects_nodes_io import NodeID from models_library.rabbitmq_basic_types import RPCMethodName -from pydantic import NonNegativeInt, parse_obj_as +from pydantic import NonNegativeInt, TypeAdapter from servicelib.logging_utils import log_decorator from servicelib.rabbitmq import RabbitMQRPCClient @@ -26,6 +26,8 @@ DEFAULT_LEGACY_WB_TO_DV2_HTTP_REQUESTS_TIMEOUT_S * 2 ) +_RPC_METHOD_NAME_ADAPTER: TypeAdapter[RPCMethodName] = TypeAdapter(RPCMethodName) + @log_decorator(_logger, level=logging.DEBUG) async def get_service_status( @@ -33,7 +35,7 @@ async def get_service_status( ) -> NodeGetIdle | DynamicServiceGet | NodeGet: result = await rabbitmq_rpc_client.request( DYNAMIC_SCHEDULER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_service_status"), + _RPC_METHOD_NAME_ADAPTER.validate_python("get_service_status"), node_id=node_id, timeout_s=_RPC_DEFAULT_TIMEOUT_S, ) @@ -49,7 +51,7 @@ async def run_dynamic_service( ) -> DynamicServiceGet | NodeGet: result = await rabbitmq_rpc_client.request( DYNAMIC_SCHEDULER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "run_dynamic_service"), + _RPC_METHOD_NAME_ADAPTER.validate_python("run_dynamic_service"), dynamic_service_start=dynamic_service_start, timeout_s=_RPC_DEFAULT_TIMEOUT_S, ) @@ -66,7 +68,7 @@ async def stop_dynamic_service( ) -> None: result = await rabbitmq_rpc_client.request( DYNAMIC_SCHEDULER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "stop_dynamic_service"), + _RPC_METHOD_NAME_ADAPTER.validate_python("stop_dynamic_service"), dynamic_service_stop=dynamic_service_stop, timeout_s=timeout_s, ) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py index 592959eb08c1..ec05906b1ef4 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py @@ -6,7 +6,7 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.rabbitmq_basic_types import RPCMethodName -from pydantic import NonNegativeInt, parse_obj_as +from pydantic import NonNegativeInt, TypeAdapter from ....logging_utils import log_decorator from ....rabbitmq import RabbitMQRPCClient @@ -27,7 +27,7 @@ async def create_project_specific_data_dir( ) -> Path: output: Path = await rabbitmq_rpc_client.request( EFS_GUARDIAN_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "create_project_specific_data_dir"), + TypeAdapter(RPCMethodName).validate_python("create_project_specific_data_dir"), project_id=project_id, node_id=node_id, storage_directory_name=storage_directory_name, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py index 445498418026..f9c1a24f4060 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class ResourceUsageTrackerRuntimeError(PydanticErrorMixin, RuntimeError): +class ResourceUsageTrackerRuntimeError(OsparcErrorMixin, RuntimeError): msg_template: str = "Resource-usage-tracker unexpected error" diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py index a7dc4b5d4044..218cd139fb4d 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py @@ -16,7 +16,7 @@ PricingPlanUpdate, ) from models_library.services import ServiceKey, ServiceVersion -from pydantic import NonNegativeInt, parse_obj_as +from pydantic import NonNegativeInt, TypeAdapter from ....logging_utils import log_decorator from ....rabbitmq import RabbitMQRPCClient @@ -26,6 +26,8 @@ _DEFAULT_TIMEOUT_S: Final[NonNegativeInt] = 20 +_RPC_METHOD_NAME_ADAPTER: TypeAdapter[RPCMethodName] = TypeAdapter(RPCMethodName) + @log_decorator(_logger, level=logging.DEBUG) async def get_pricing_plan( @@ -36,7 +38,7 @@ async def get_pricing_plan( ) -> PricingPlanGet: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_pricing_plan"), + _RPC_METHOD_NAME_ADAPTER.validate_python("get_pricing_plan"), product_name=product_name, pricing_plan_id=pricing_plan_id, timeout_s=_DEFAULT_TIMEOUT_S, @@ -53,7 +55,7 @@ async def list_pricing_plans( ) -> list[PricingPlanGet]: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "list_pricing_plans"), + _RPC_METHOD_NAME_ADAPTER.validate_python("list_pricing_plans"), product_name=product_name, timeout_s=_DEFAULT_TIMEOUT_S, ) @@ -69,7 +71,7 @@ async def create_pricing_plan( ) -> PricingPlanGet: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "create_pricing_plan"), + _RPC_METHOD_NAME_ADAPTER.validate_python("create_pricing_plan"), data=data, timeout_s=_DEFAULT_TIMEOUT_S, ) @@ -86,7 +88,7 @@ async def update_pricing_plan( ) -> PricingPlanGet: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "update_pricing_plan"), + _RPC_METHOD_NAME_ADAPTER.validate_python("update_pricing_plan"), product_name=product_name, data=data, timeout_s=_DEFAULT_TIMEOUT_S, @@ -104,8 +106,8 @@ async def list_connected_services_to_pricing_plan_by_pricing_plan( ) -> list[PricingPlanToServiceGet]: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as( - RPCMethodName, "list_connected_services_to_pricing_plan_by_pricing_plan" + _RPC_METHOD_NAME_ADAPTER.validate_python( + "list_connected_services_to_pricing_plan_by_pricing_plan" ), product_name=product_name, pricing_plan_id=pricing_plan_id, @@ -126,7 +128,7 @@ async def connect_service_to_pricing_plan( ) -> PricingPlanToServiceGet: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "connect_service_to_pricing_plan"), + _RPC_METHOD_NAME_ADAPTER.validate_python("connect_service_to_pricing_plan"), product_name=product_name, pricing_plan_id=pricing_plan_id, service_key=service_key, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py index cec80e7186a5..afa5611a92d4 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py @@ -15,7 +15,7 @@ PricingUnitWithCostCreate, PricingUnitWithCostUpdate, ) -from pydantic import NonNegativeInt, parse_obj_as +from pydantic import NonNegativeInt, TypeAdapter from ....logging_utils import log_decorator from ....rabbitmq import RabbitMQRPCClient @@ -25,6 +25,8 @@ _DEFAULT_TIMEOUT_S: Final[NonNegativeInt] = 20 +_RPC_METHOD_NAME_ADAPTER: TypeAdapter[RPCMethodName] = TypeAdapter(RPCMethodName) + @log_decorator(_logger, level=logging.DEBUG) async def get_pricing_unit( @@ -36,7 +38,7 @@ async def get_pricing_unit( ) -> PricingUnitGet: result: PricingUnitGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_pricing_unit"), + _RPC_METHOD_NAME_ADAPTER.validate_python("get_pricing_unit"), product_name=product_name, pricing_plan_id=pricing_plan_id, pricing_unit_id=pricing_unit_id, @@ -55,7 +57,7 @@ async def create_pricing_unit( ) -> PricingUnitGet: result: PricingUnitGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "create_pricing_unit"), + _RPC_METHOD_NAME_ADAPTER.validate_python("create_pricing_unit"), product_name=product_name, data=data, timeout_s=_DEFAULT_TIMEOUT_S, @@ -73,7 +75,7 @@ async def update_pricing_unit( ) -> PricingUnitGet: result: PricingUnitGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "update_pricing_unit"), + _RPC_METHOD_NAME_ADAPTER.validate_python("update_pricing_unit"), product_name=product_name, data=data, timeout_s=_DEFAULT_TIMEOUT_S, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py index e826363897a3..ad7b2fd908b4 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py @@ -18,7 +18,7 @@ from models_library.rest_ordering import OrderBy from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import AnyUrl, NonNegativeInt, parse_obj_as +from pydantic import AnyUrl, NonNegativeInt, TypeAdapter from ....logging_utils import log_decorator from ....rabbitmq import RabbitMQRPCClient @@ -28,6 +28,8 @@ _DEFAULT_TIMEOUT_S: Final[NonNegativeInt] = 20 +_RPC_METHOD_NAME_ADAPTER: TypeAdapter[RPCMethodName] = TypeAdapter(RPCMethodName) + @log_decorator(_logger, level=logging.DEBUG) async def get_service_run_page( @@ -44,7 +46,7 @@ async def get_service_run_page( ) -> ServiceRunPage: result = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_service_run_page"), + _RPC_METHOD_NAME_ADAPTER.validate_python("get_service_run_page"), user_id=user_id, product_name=product_name, limit=limit, @@ -74,7 +76,9 @@ async def get_osparc_credits_aggregated_usages_page( ) -> OsparcCreditsAggregatedUsagesPage: result = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_osparc_credits_aggregated_usages_page"), + _RPC_METHOD_NAME_ADAPTER.validate_python( + "get_osparc_credits_aggregated_usages_page" + ), user_id=user_id, product_name=product_name, limit=limit, @@ -102,7 +106,7 @@ async def export_service_runs( ) -> AnyUrl: result: AnyUrl = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "export_service_runs"), + _RPC_METHOD_NAME_ADAPTER.validate_python("export_service_runs"), user_id=user_id, product_name=product_name, wallet_id=wallet_id, diff --git a/packages/service-library/src/servicelib/redis.py b/packages/service-library/src/servicelib/redis.py index 03847ae0b046..fce89d7790ee 100644 --- a/packages/service-library/src/servicelib/redis.py +++ b/packages/service-library/src/servicelib/redis.py @@ -10,8 +10,8 @@ import redis.asyncio as aioredis import redis.exceptions +from common_library.errors_classes import OsparcErrorMixin from pydantic import NonNegativeFloat, NonNegativeInt -from pydantic.errors import PydanticErrorMixin from redis.asyncio.lock import Lock from redis.asyncio.retry import Retry from redis.backoff import ExponentialBackoff @@ -36,7 +36,7 @@ _logger = logging.getLogger(__name__) -class BaseRedisError(PydanticErrorMixin, RuntimeError): +class BaseRedisError(OsparcErrorMixin, RuntimeError): ... diff --git a/packages/service-library/src/servicelib/utils_meta.py b/packages/service-library/src/servicelib/utils_meta.py index 46fa78dd83ee..6ee48fd4d56d 100644 --- a/packages/service-library/src/servicelib/utils_meta.py +++ b/packages/service-library/src/servicelib/utils_meta.py @@ -6,7 +6,7 @@ from models_library.basic_types import VersionStr from packaging.version import Version -from pydantic import parse_obj_as +from pydantic import TypeAdapter class PackageInfo: @@ -40,7 +40,7 @@ def version(self) -> Version: @property def __version__(self) -> VersionStr: - return parse_obj_as(VersionStr, self._distribution.version) + return TypeAdapter(VersionStr).validate_python(self._distribution.version) @property def api_prefix_path_tag(self) -> str: diff --git a/packages/service-library/src/servicelib/utils_secrets.py b/packages/service-library/src/servicelib/utils_secrets.py index 66ccb9a1ddf3..389aab962725 100644 --- a/packages/service-library/src/servicelib/utils_secrets.py +++ b/packages/service-library/src/servicelib/utils_secrets.py @@ -2,7 +2,7 @@ import string from typing import Any, Final -from pydantic import StrictInt, validate_arguments +from pydantic import StrictInt, validate_call MIN_PASSWORD_LENGTH = 30 _SAFE_SYMBOLS = "!$%*+,-.:=?@^_~" # avoid issues with parsing, espapes etc @@ -48,7 +48,7 @@ def are_secrets_equal(got: str, expected: str) -> bool: return secrets.compare_digest(got.encode("utf8"), expected.encode("utf8")) -@validate_arguments +@validate_call def secure_randint(start: StrictInt, end: StrictInt) -> int: """Generate a random integer between start (inclusive) and end (exclusive).""" if start >= end: diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py b/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py index bac102ab1272..8fe29473cfcb 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py @@ -9,7 +9,7 @@ from aiohttp import web from aiohttp.test_utils import TestClient from faker import Faker -from pydantic import BaseModel, parse_obj_as +from pydantic import BaseModel, TypeAdapter from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import long_running_tasks, status from servicelib.aiohttp.long_running_tasks.server import TaskId @@ -93,7 +93,7 @@ async def _caller(client: TestClient, **query_kwargs) -> TaskId: data, error = await assert_status(resp, status.HTTP_202_ACCEPTED) assert data assert not error - task_get = parse_obj_as(long_running_tasks.server.TaskGet, data) + task_get = TypeAdapter(long_running_tasks.server.TaskGet).validate_python(data) return task_get.task_id return _caller @@ -123,7 +123,7 @@ async def _waiter( data, error = await assert_status(result, status.HTTP_200_OK) assert data assert not error - task_status = long_running_tasks.server.TaskStatus.parse_obj(data) + task_status = long_running_tasks.server.TaskStatus.model_validate(data) assert task_status assert task_status.done diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py index afd9e8f4fde9..7907f092c249 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py @@ -18,7 +18,7 @@ import pytest from aiohttp import web from aiohttp.test_utils import TestClient -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import long_running_tasks, status from servicelib.aiohttp.long_running_tasks.server import TaskGet, TaskId @@ -75,12 +75,12 @@ async def test_workflow( data, error = await assert_status(result, status.HTTP_200_OK) assert data assert not error - task_status = long_running_tasks.server.TaskStatus.parse_obj(data) + task_status = long_running_tasks.server.TaskStatus.model_validate(data) assert task_status progress_updates.append( (task_status.task_progress.message, task_status.task_progress.percent) ) - print(f"<-- received task status: {task_status.json(indent=2)}") + print(f"<-- received task status: {task_status.model_dump_json(indent=2)}") assert task_status.done, "task incomplete" print( f"-- waiting for task status completed successfully: {json.dumps(attempt.retry_state.retry_object.statistics, indent=2)}" @@ -216,7 +216,7 @@ async def test_list_tasks( result = await client.get(f"{list_url}") data, error = await assert_status(result, status.HTTP_200_OK) assert not error - list_of_tasks = parse_obj_as(list[TaskGet], data) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(data) assert len(list_of_tasks) == NUM_TASKS # the task name is properly formatted @@ -235,5 +235,5 @@ async def test_list_tasks( result = await client.get(f"{list_url}") data, error = await assert_status(result, status.HTTP_200_OK) assert not error - list_of_tasks = parse_obj_as(list[TaskGet], data) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(data) assert len(list_of_tasks) == NUM_TASKS - (task_index + 1) diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py index 941ae31359d0..5671eda108fb 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py @@ -18,7 +18,7 @@ import pytest from aiohttp import web from aiohttp.test_utils import TestClient -from pydantic import create_model, parse_obj_as +from pydantic import TypeAdapter, create_model from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import long_running_tasks, status from servicelib.aiohttp.long_running_tasks._server import ( @@ -108,7 +108,7 @@ async def test_list_tasks( result = await client_with_task_context.get(f"{list_url}") data, error = await assert_status(result, status.HTTP_200_OK) assert not error - list_of_tasks = parse_obj_as(list[TaskGet], data) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(data) assert len(list_of_tasks) == 0 # the list should be full if we pass the expected context @@ -117,7 +117,7 @@ async def test_list_tasks( ) data, error = await assert_status(result, status.HTTP_200_OK) assert not error - list_of_tasks = parse_obj_as(list[TaskGet], data) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(data) assert len(list_of_tasks) == 1 diff --git a/packages/service-library/tests/aiohttp/test_docker_utils.py b/packages/service-library/tests/aiohttp/test_docker_utils.py index 890ffdc588b7..bcd2129abd24 100644 --- a/packages/service-library/tests/aiohttp/test_docker_utils.py +++ b/packages/service-library/tests/aiohttp/test_docker_utils.py @@ -11,7 +11,7 @@ from faker import Faker from models_library.docker import DockerGenericTag from models_library.progress_bar import ProgressReport -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_mock import MockerFixture from servicelib import progress_bar from servicelib.aiohttp.docker_utils import retrieve_image_layer_information @@ -42,8 +42,7 @@ async def test_retrieve_image_layer_information( if "sha256" in service_tag: image_name = f"{service_repo}@{service_tag}" await remove_images_from_host([image_name]) - docker_image = parse_obj_as( - DockerGenericTag, + docker_image = TypeAdapter(DockerGenericTag).validate_python( f"{registry_settings.REGISTRY_URL}/{osparc_service['image']['name']}:{osparc_service['image']['tag']}", ) layer_information = await retrieve_image_layer_information( @@ -97,13 +96,13 @@ def _assert_progress_report_values( # check first progress assert mocked_progress_cb.call_args_list[0].args[0].dict( exclude={"message"} - ) == ProgressReport(actual_value=0, total=total, unit="Byte").dict( + ) == ProgressReport(actual_value=0, total=total, unit="Byte").model_dump( exclude={"message"} ) # check last progress assert mocked_progress_cb.call_args_list[-1].args[0].dict( exclude={"message"} - ) == ProgressReport(actual_value=total, total=total, unit="Byte").dict( + ) == ProgressReport(actual_value=total, total=total, unit="Byte").model_dump( exclude={"message"} ) diff --git a/packages/service-library/tests/aiohttp/test_requests_validation.py b/packages/service-library/tests/aiohttp/test_requests_validation.py index 08e2f07bfbe7..4e1b4f4e2e71 100644 --- a/packages/service-library/tests/aiohttp/test_requests_validation.py +++ b/packages/service-library/tests/aiohttp/test_requests_validation.py @@ -11,7 +11,7 @@ from aiohttp.test_utils import TestClient from faker import Faker from models_library.utils.json_serialization import json_dumps -from pydantic import BaseModel, Extra, Field +from pydantic import BaseModel, ConfigDict, Field from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import ( parse_request_body_as, @@ -41,9 +41,9 @@ def create_fake(cls, faker: Faker): class MyRequestPathParams(BaseModel): project_uuid: UUID - - class Config: - extra = Extra.forbid + model_config = ConfigDict( + extra="forbid", + ) @classmethod def create_fake(cls, faker: Faker): @@ -55,7 +55,7 @@ class MyRequestQueryParams(BaseModel): label: str def as_params(self, **kwargs) -> dict[str, str]: - data = self.dict(**kwargs) + data = self.model_dump(**kwargs) return {k: f"{v}" for k, v in data.items()} @classmethod @@ -66,9 +66,9 @@ def create_fake(cls, faker: Faker): class MyRequestHeadersParams(BaseModel): user_agent: str = Field(alias="X-Simcore-User-Agent") optional_header: str | None = Field(default=None, alias="X-Simcore-Optional-Header") - - class Config: - allow_population_by_field_name = False + model_config = ConfigDict( + populate_by_name=False, + ) @classmethod def create_fake(cls, faker: Faker): @@ -111,7 +111,9 @@ def client(event_loop, aiohttp_client: Callable, faker: Faker) -> TestClient: async def _handler(request: web.Request) -> web.Response: # --------- UNDER TEST ------- # NOTE: app context does NOT need to be validated everytime! - context = MyRequestContext.parse_obj({**dict(request.app), **dict(request)}) + context = MyRequestContext.model_validate( + {**dict(request.app), **dict(request)} + ) path_params = parse_request_path_parameters_as( MyRequestPathParams, request, use_enveloped_error_v1=False @@ -129,11 +131,11 @@ async def _handler(request: web.Request) -> web.Response: return web.json_response( { - "parameters": path_params.dict(), - "queries": query_params.dict(), - "body": body.dict(), - "context": context.dict(), - "headers": headers_params.dict(), + "parameters": path_params.model_dump(), + "queries": query_params.model_dump(), + "body": body.model_dump(), + "context": context.model_dump(), + "headers": headers_params.model_dump(), }, dumps=json_dumps, ) @@ -194,21 +196,21 @@ async def test_parse_request_as( r = await client.get( f"/projects/{path_params.project_uuid}", params=query_params.as_params(), - json=body.dict(), - headers=headers_params.dict(by_alias=True), + json=body.model_dump(), + headers=headers_params.model_dump(by_alias=True), ) assert r.status == status.HTTP_200_OK, f"{await r.text()}" got = await r.json() - assert got["parameters"] == jsonable_encoder(path_params.dict()) - assert got["queries"] == jsonable_encoder(query_params.dict()) - assert got["body"] == body.dict() + assert got["parameters"] == jsonable_encoder(path_params.model_dump()) + assert got["queries"] == jsonable_encoder(query_params.model_dump()) + assert got["body"] == body.model_dump() assert got["context"] == { "secret": client.app[APP_SECRET_KEY], "user_id": 42, } - assert got["headers"] == jsonable_encoder(headers_params.dict()) + assert got["headers"] == jsonable_encoder(headers_params.model_dump()) async def test_parse_request_with_invalid_path_params( @@ -221,8 +223,8 @@ async def test_parse_request_with_invalid_path_params( r = await client.get( "/projects/invalid-uuid", params=query_params.as_params(), - json=body.dict(), - headers=headers_params.dict(by_alias=True), + json=body.model_dump(), + headers=headers_params.model_dump(by_alias=True), ) assert r.status == status.HTTP_422_UNPROCESSABLE_ENTITY, f"{await r.text()}" @@ -234,8 +236,8 @@ async def test_parse_request_with_invalid_path_params( "details": [ { "loc": "project_uuid", - "msg": "value is not a valid uuid", - "type": "type_error.uuid", + "msg": "Input should be a valid UUID, invalid character: expected an optional prefix of `urn:uuid:` followed by [0-9a-fA-F-], found `i` at 1", + "type": "uuid_parsing", } ], } @@ -252,8 +254,8 @@ async def test_parse_request_with_invalid_query_params( r = await client.get( f"/projects/{path_params.project_uuid}", params={}, - json=body.dict(), - headers=headers_params.dict(by_alias=True), + json=body.model_dump(), + headers=headers_params.model_dump(by_alias=True), ) assert r.status == status.HTTP_422_UNPROCESSABLE_ENTITY, f"{await r.text()}" @@ -265,8 +267,8 @@ async def test_parse_request_with_invalid_query_params( "details": [ { "loc": "label", - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", } ], } @@ -284,7 +286,7 @@ async def test_parse_request_with_invalid_body( f"/projects/{path_params.project_uuid}", params=query_params.as_params(), json={"invalid": "body"}, - headers=headers_params.dict(by_alias=True), + headers=headers_params.model_dump(by_alias=True), ) assert r.status == status.HTTP_422_UNPROCESSABLE_ENTITY, f"{await r.text()}" @@ -298,13 +300,13 @@ async def test_parse_request_with_invalid_body( "details": [ { "loc": "x", - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", }, { "loc": "z", - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", }, ], } @@ -322,7 +324,7 @@ async def test_parse_request_with_invalid_json_body( f"/projects/{path_params.project_uuid}", params=query_params.as_params(), data=b"[ 1 2, 3 'broken-json' ]", - headers=headers_params.dict(by_alias=True), + headers=headers_params.model_dump(by_alias=True), ) body = await r.text() @@ -340,8 +342,8 @@ async def test_parse_request_with_invalid_headers_params( r = await client.get( f"/projects/{path_params.project_uuid}", params=query_params.as_params(), - json=body.dict(), - headers=headers_params.dict(), # we pass the wrong names + json=body.model_dump(), + headers=headers_params.model_dump(), # we pass the wrong names ) assert r.status == status.HTTP_422_UNPROCESSABLE_ENTITY, f"{await r.text()}" @@ -353,8 +355,8 @@ async def test_parse_request_with_invalid_headers_params( "details": [ { "loc": "X-Simcore-User-Agent", - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", } ], } diff --git a/packages/service-library/tests/deferred_tasks/example_app.py b/packages/service-library/tests/deferred_tasks/example_app.py index 0ba848178d8e..8faea8d76ff2 100644 --- a/packages/service-library/tests/deferred_tasks/example_app.py +++ b/packages/service-library/tests/deferred_tasks/example_app.py @@ -110,8 +110,8 @@ async def _commands_handler( ) -> Any: """Handles all commands send by remote party""" if command == "init-context": - context.redis_settings = RedisSettings.parse_raw(payload["redis"]) - context.rabbit_settings = RabbitSettings.parse_raw(payload["rabbit"]) + context.redis_settings = RedisSettings.model_validate_json(payload["redis"]) + context.rabbit_settings = RabbitSettings.model_validate_json(payload["rabbit"]) # using the same db as the deferred tasks with different keys context.in_memory_lists = InMemoryLists(context.redis_settings, port) diff --git a/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py b/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py index 3ec3bde01ed7..366759e22d3b 100644 --- a/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py +++ b/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py @@ -5,7 +5,7 @@ from datetime import timedelta import pytest -from pydantic import parse_obj_as +from pydantic import TypeAdapter from servicelib.deferred_tasks._models import TaskUID from servicelib.deferred_tasks._redis_task_tracker import RedisTaskTracker from servicelib.deferred_tasks._task_schedule import TaskScheduleModel, TaskState @@ -19,8 +19,7 @@ @pytest.fixture def task_schedule() -> TaskScheduleModel: - return parse_obj_as( - TaskScheduleModel, + return TypeAdapter(TaskScheduleModel).validate_python( { "timeout": timedelta(seconds=1), "execution_attempts": 1, diff --git a/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py b/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py index b14f72618ec9..a03b87c41512 100644 --- a/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py +++ b/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py @@ -16,6 +16,8 @@ import psutil import pytest from aiohttp.test_utils import unused_port +from common_library.serialization import model_dump_with_secrets +from models_library.utils.json_serialization import json_dumps from pydantic import NonNegativeFloat, NonNegativeInt from pytest_mock import MockerFixture from servicelib import redis as servicelib_redis @@ -24,7 +26,6 @@ from servicelib.sequences_utils import partition_gen from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisSettings -from settings_library.utils_encoders import create_json_encoder_wo_secrets from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay @@ -125,7 +126,6 @@ async def _tcp_command( def _get_serialization_options() -> dict[str, Any]: return { - "encoder": create_json_encoder_wo_secrets(RabbitSettings), "exclude_defaults": True, "exclude_none": True, "exclude_unset": True, @@ -160,8 +160,20 @@ async def start(self) -> None: response = await _tcp_command( "init-context", { - "rabbit": self.rabbit_service.json(**_get_serialization_options()), - "redis": self.redis_service.json(**_get_serialization_options()), + "rabbit": json_dumps( + model_dump_with_secrets( + self.rabbit_service, + show_secrets=True, + **_get_serialization_options(), + ) + ), + "redis": json_dumps( + model_dump_with_secrets( + self.redis_service, + show_secrets=True, + **_get_serialization_options(), + ) + ), "max-workers": self.max_workers, }, port=self.remote_process.port, diff --git a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py index bd55b44d4988..52527f138d98 100644 --- a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py +++ b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py @@ -18,7 +18,7 @@ from asgi_lifespan import LifespanManager from fastapi import APIRouter, Depends, FastAPI, status from httpx import AsyncClient -from pydantic import parse_obj_as +from pydantic import TypeAdapter from servicelib.fastapi import long_running_tasks from servicelib.long_running_tasks._models import TaskGet, TaskId from servicelib.long_running_tasks._task import TaskContext @@ -94,7 +94,9 @@ async def _caller(app: FastAPI, client: AsyncClient, **query_kwargs) -> TaskId: ) resp = await client.post(f"{url}") assert resp.status_code == status.HTTP_202_ACCEPTED - task_id = parse_obj_as(long_running_tasks.server.TaskId, resp.json()) + task_id = TypeAdapter(long_running_tasks.server.TaskId).validate_python( + resp.json() + ) return task_id return _caller @@ -122,7 +124,7 @@ async def _waiter( with attempt: result = await client.get(f"{status_url}") assert result.status_code == status.HTTP_200_OK - task_status = long_running_tasks.server.TaskStatus.parse_obj( + task_status = long_running_tasks.server.TaskStatus.model_validate( result.json() ) assert task_status @@ -149,12 +151,14 @@ async def test_workflow( with attempt: result = await client.get(f"{status_url}") assert result.status_code == status.HTTP_200_OK - task_status = long_running_tasks.server.TaskStatus.parse_obj(result.json()) + task_status = long_running_tasks.server.TaskStatus.model_validate( + result.json() + ) assert task_status progress_updates.append( (task_status.task_progress.message, task_status.task_progress.percent) ) - print(f"<-- received task status: {task_status.json(indent=2)}") + print(f"<-- received task status: {task_status.model_dump_json(indent=2)}") assert task_status.done, "task incomplete" print( f"-- waiting for task status completed successfully: {json.dumps(attempt.retry_state.retry_object.statistics, indent=2)}" @@ -179,7 +183,7 @@ async def test_workflow( result = await client.get(f"{result_url}") # NOTE: this is DIFFERENT than with aiohttp where we return the real result assert result.status_code == status.HTTP_200_OK - task_result = long_running_tasks.server.TaskResult.parse_obj(result.json()) + task_result = long_running_tasks.server.TaskResult.model_validate(result.json()) assert not task_result.error assert task_result.result == [f"{x}" for x in range(10)] # getting the result again should raise a 404 @@ -218,7 +222,7 @@ async def test_failing_task_returns_error( result_url = app.url_path_for("get_task_result", task_id=task_id) result = await client.get(f"{result_url}") assert result.status_code == status.HTTP_200_OK - task_result = long_running_tasks.server.TaskResult.parse_obj(result.json()) + task_result = long_running_tasks.server.TaskResult.model_validate(result.json()) assert not task_result.result assert task_result.error @@ -274,7 +278,7 @@ async def test_list_tasks_empty_list(app: FastAPI, client: AsyncClient): list_url = app.url_path_for("list_tasks") result = await client.get(f"{list_url}") assert result.status_code == status.HTTP_200_OK - list_of_tasks = parse_obj_as(list[TaskGet], result.json()) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(result.json()) assert list_of_tasks == [] @@ -296,7 +300,7 @@ async def test_list_tasks( list_url = app.url_path_for("list_tasks") result = await client.get(f"{list_url}") assert result.status_code == status.HTTP_200_OK - list_of_tasks = parse_obj_as(list[TaskGet], result.json()) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(result.json()) assert len(list_of_tasks) == NUM_TASKS # now wait for them to finish @@ -311,5 +315,5 @@ async def test_list_tasks( # the list shall go down one by one result = await client.get(f"{list_url}") assert result.status_code == status.HTTP_200_OK - list_of_tasks = parse_obj_as(list[TaskGet], result.json()) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(result.json()) assert len(list_of_tasks) == NUM_TASKS - (task_index + 1) diff --git a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py index 9f15184b0520..0dc440bf33e9 100644 --- a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py +++ b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py @@ -4,11 +4,12 @@ import asyncio from typing import AsyncIterable, Final +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy import pytest from asgi_lifespan import LifespanManager from fastapi import APIRouter, Depends, FastAPI, status from httpx import AsyncClient -from pydantic import AnyHttpUrl, PositiveFloat, parse_obj_as +from pydantic import PositiveFloat, TypeAdapter from servicelib.fastapi.long_running_tasks._context_manager import _ProgressManager from servicelib.fastapi.long_running_tasks.client import ( Client, @@ -90,7 +91,7 @@ async def bg_task_app( @pytest.fixture def mock_task_id() -> TaskId: - return parse_obj_as(TaskId, "fake_task_id") + return TypeAdapter(TaskId).validate_python("fake_task_id") async def test_task_result( @@ -100,7 +101,7 @@ async def test_task_result( assert result.status_code == status.HTTP_200_OK, result.text task_id = result.json() - url = parse_obj_as(AnyHttpUrl, "http://backgroud.testserver.io") + url = TypeAdapter(AnyHttpUrlLegacy).validate_python("http://backgroud.testserver.io") client = Client(app=bg_task_app, async_client=async_client, base_url=url) async with periodic_task_result( client, @@ -120,7 +121,7 @@ async def test_task_result_times_out( assert result.status_code == status.HTTP_200_OK, result.text task_id = result.json() - url = parse_obj_as(AnyHttpUrl, "http://backgroud.testserver.io") + url = TypeAdapter(AnyHttpUrlLegacy).validate_python("http://backgroud.testserver.io") client = Client(app=bg_task_app, async_client=async_client, base_url=url) timeout = TASK_SLEEP_INTERVAL / 10 with pytest.raises(TaskClientTimeoutError) as exec_info: @@ -146,7 +147,7 @@ async def test_task_result_task_result_is_an_error( assert result.status_code == status.HTTP_200_OK, result.text task_id = result.json() - url = parse_obj_as(AnyHttpUrl, "http://backgroud.testserver.io") + url = TypeAdapter(AnyHttpUrlLegacy).validate_python("http://backgroud.testserver.io") client = Client(app=bg_task_app, async_client=async_client, base_url=url) with pytest.raises(TaskClientResultError) as exec_info: async with periodic_task_result( @@ -185,13 +186,13 @@ async def progress_update( assert received == ("", None) for _ in range(repeat): - await progress_updater.update(mock_task_id, percent=ProgressPercent(0.0)) + await progress_updater.update(mock_task_id, percent=TypeAdapter(ProgressPercent).validate_python(0.0)) assert counter == 2 assert received == ("", 0.0) for _ in range(repeat): await progress_updater.update( - mock_task_id, percent=ProgressPercent(1.0), message="done" + mock_task_id, percent=TypeAdapter(ProgressPercent).validate_python(1.0), message="done" ) assert counter == 3 assert received == ("done", 1.0) diff --git a/packages/service-library/tests/fastapi/test_docker_utils.py b/packages/service-library/tests/fastapi/test_docker_utils.py index 4db0db99bd02..f6d78066c97f 100644 --- a/packages/service-library/tests/fastapi/test_docker_utils.py +++ b/packages/service-library/tests/fastapi/test_docker_utils.py @@ -12,7 +12,7 @@ from faker import Faker from models_library.docker import DockerGenericTag from models_library.progress_bar import ProgressReport -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from pytest_mock import MockerFixture from servicelib import progress_bar from servicelib.docker_utils import pull_image @@ -46,8 +46,7 @@ async def test_retrieve_image_layer_information( if "sha256" in service_tag: image_name = f"{service_repo}@{service_tag}" await remove_images_from_host([image_name]) - docker_image = parse_obj_as( - DockerGenericTag, + docker_image = TypeAdapter(DockerGenericTag).validate_python( f"{registry_settings.REGISTRY_URL}/{osparc_service['image']['name']}:{osparc_service['image']['tag']}", ) layer_information = await retrieve_image_layer_information( @@ -103,13 +102,13 @@ def _assert_progress_report_values( # check first progress assert mocked_progress_cb.call_args_list[0].args[0].dict( exclude={"message"} - ) == ProgressReport(actual_value=0, total=total, unit="Byte").dict( + ) == ProgressReport(actual_value=0, total=total, unit="Byte").model_dump( exclude={"message"} ) # check last progress assert mocked_progress_cb.call_args_list[-1].args[0].dict( exclude={"message"} - ) == ProgressReport(actual_value=total, total=total, unit="Byte").dict( + ) == ProgressReport(actual_value=total, total=total, unit="Byte").model_dump( exclude={"message"} ) @@ -202,7 +201,7 @@ async def test_pull_image_without_layer_information( assert layer_information print(f"{image=} has {layer_information.layers_total_size=}") - fake_number_of_steps = parse_obj_as(ByteSize, "200MiB") + fake_number_of_steps = TypeAdapter(ByteSize).validate_python("200MiB") assert fake_number_of_steps > layer_information.layers_total_size async with progress_bar.ProgressBarData( num_steps=fake_number_of_steps, diff --git a/packages/service-library/tests/fastapi/test_exceptions_utils.py b/packages/service-library/tests/fastapi/test_exceptions_utils.py index 845043f34059..cfe7fbde0e8d 100644 --- a/packages/service-library/tests/fastapi/test_exceptions_utils.py +++ b/packages/service-library/tests/fastapi/test_exceptions_utils.py @@ -10,7 +10,7 @@ from fastapi import FastAPI, HTTPException from httpx import AsyncClient from models_library.api_schemas__common.errors import DefaultApiError -from pydantic import parse_raw_as +from pydantic import TypeAdapter from servicelib.fastapi.exceptions_utils import ( handle_errors_as_500, http_exception_as_json_response, @@ -66,7 +66,7 @@ async def test_http_errors_respond_with_error_model( response = await client.post(f"/error/{code}") assert response.status_code == code - error = parse_raw_as(DefaultApiError, response.text) + error = TypeAdapter(DefaultApiError).validate_json(response.text) assert error.detail == f"test {code}" assert error.name @@ -79,4 +79,4 @@ async def test_non_http_error_handling( response = await client.post(f"/raise/{code}") print(response) - error = parse_raw_as(DefaultApiError, response.text) + error = TypeAdapter(DefaultApiError).validate_json(response.text) diff --git a/packages/service-library/tests/fastapi/test_http_client_thin.py b/packages/service-library/tests/fastapi/test_http_client_thin.py index f98de720c33a..7bd96b25eeec 100644 --- a/packages/service-library/tests/fastapi/test_http_client_thin.py +++ b/packages/service-library/tests/fastapi/test_http_client_thin.py @@ -3,6 +3,7 @@ import logging from collections.abc import AsyncIterable, Iterable from typing import Final +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy import arrow import pytest @@ -15,7 +16,7 @@ TransportError, codes, ) -from pydantic import AnyHttpUrl, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter from respx import MockRouter from servicelib.fastapi.http_client_thin import ( BaseThinClient, @@ -76,12 +77,14 @@ async def thick_client(request_timeout: int) -> AsyncIterable[FakeThickClient]: @pytest.fixture -def test_url() -> AnyHttpUrl: - return parse_obj_as(AnyHttpUrl, "http://missing-host:1111") +def test_url() -> str: + url =TypeAdapter(AnyHttpUrlLegacy).validate_python("http://missing-host:1111") + return f"{url}" async def test_connection_error( - thick_client: FakeThickClient, test_url: AnyHttpUrl + thick_client: FakeThickClient, + test_url: str, ) -> None: with pytest.raises(ClientHttpError) as exe_info: await thick_client.get_provided_url(test_url) @@ -92,7 +95,7 @@ async def test_connection_error( async def test_retry_on_errors( request_timeout: int, - test_url: AnyHttpUrl, + test_url: str, caplog_info_level: pytest.LogCaptureFixture, ) -> None: client = FakeThickClient(total_retry_interval=request_timeout) @@ -108,7 +111,7 @@ async def test_retry_on_errors_by_error_type( error_class: type[RequestError], caplog_info_level: pytest.LogCaptureFixture, request_timeout: int, - test_url: AnyHttpUrl, + test_url: str, ) -> None: class ATestClient(BaseThinClient): # pylint: disable=no-self-use @@ -177,7 +180,7 @@ async def public_method_no_annotation(self): async def test_expect_state_decorator( - test_url: AnyHttpUrl, + test_url: str, respx_mock: MockRouter, request_timeout: int, ) -> None: diff --git a/packages/service-library/tests/fastapi/test_openapi.py b/packages/service-library/tests/fastapi/test_openapi.py index f7dd5744c741..7df0ab63a9f0 100644 --- a/packages/service-library/tests/fastapi/test_openapi.py +++ b/packages/service-library/tests/fastapi/test_openapi.py @@ -48,7 +48,7 @@ def test_exclusive_min_openapi_issue(app: FastAPI): def test_overriding_openapi_method(app: FastAPI): assert not hasattr(app, "_original_openapi") - assert app.openapi.__doc__ is None + # assert app.openapi.__doc__ is None # PC why was this set to check that it is none? it's coming from the base fastapi applicaiton and now they provide some docs override_fastapi_openapi_method(app) diff --git a/packages/service-library/tests/fastapi/test_rabbitmq.py b/packages/service-library/tests/fastapi/test_rabbitmq.py index 9c94cfa07660..b41a94097f29 100644 --- a/packages/service-library/tests/fastapi/test_rabbitmq.py +++ b/packages/service-library/tests/fastapi/test_rabbitmq.py @@ -132,6 +132,6 @@ async def test_post_message( f"--> checking for message in rabbit exchange {rabbit_message.channel_name}, {attempt.retry_state.retry_object.statistics}" ) mocked_message_handler.assert_called_once_with( - rabbit_message.json().encode() + rabbit_message.model_dump_json().encode() ) print("... message received") diff --git a/packages/service-library/tests/rabbitmq/test_rabbitmq_connection.py b/packages/service-library/tests/rabbitmq/test_rabbitmq_connection.py index 3019b07d6abf..ba7576e30277 100644 --- a/packages/service-library/tests/rabbitmq/test_rabbitmq_connection.py +++ b/packages/service-library/tests/rabbitmq/test_rabbitmq_connection.py @@ -86,13 +86,13 @@ async def test_rabbit_client_with_paused_container( await rabbit_client.publish(exchange_name, message) -def _get_rabbitmq_api_params(rabbit_service: RabbitSettings) -> dict[str, str]: +def _get_rabbitmq_api_params(rabbit_service: RabbitSettings) -> dict[str, Any]: return { "scheme": "http", - "user": rabbit_service.RABBIT_USER, + "username": rabbit_service.RABBIT_USER, "password": rabbit_service.RABBIT_PASSWORD.get_secret_value(), "host": rabbit_service.RABBIT_HOST, - "port": "15672", + "port": 15672, } diff --git a/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py b/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py index e192afc611e6..46588de6e87d 100644 --- a/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py +++ b/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py @@ -350,11 +350,12 @@ async def _a_handler() -> None: pass if expect_fail: - with pytest.raises(ValidationError) as exec_info: + with pytest.raises( + ValidationError, match="String should have at most 255 characters" + ): await rpc_server.register_handler( RPCNamespace("a"), RPCMethodName(handler_name), _a_handler ) - assert "ensure this value has at most 255 characters" in f"{exec_info.value}" else: await rpc_server.register_handler( RPCNamespace("a"), RPCMethodName(handler_name), _a_handler diff --git a/packages/service-library/tests/rabbitmq/test_rabbitmq_utils.py b/packages/service-library/tests/rabbitmq/test_rabbitmq_utils.py index b07f8e8cb8db..2615a92ac566 100644 --- a/packages/service-library/tests/rabbitmq/test_rabbitmq_utils.py +++ b/packages/service-library/tests/rabbitmq/test_rabbitmq_utils.py @@ -26,18 +26,18 @@ def test_rpc_namespace_sorts_elements(): def test_rpc_namespace_too_long(): with pytest.raises(ValidationError) as exec_info: RPCNamespace.from_entries({f"test{i}": f"test{i}" for i in range(20)}) - assert "ensure this value has at most 252 characters" in f"{exec_info.value}" + assert "String should have at most 252 characters" in f"{exec_info.value}" @pytest.mark.no_cleanup_check_rabbitmq_server_has_no_errors() # no rabbitmq instance running def test_rpc_namespace_too_short(): with pytest.raises(ValidationError) as exec_info: RPCNamespace.from_entries({}) - assert "ensure this value has at least 1 characters" in f"{exec_info.value}" + assert "String should have at least 1 character" in f"{exec_info.value}" @pytest.mark.no_cleanup_check_rabbitmq_server_has_no_errors() # no rabbitmq instance running def test_rpc_namespace_invalid_symbols(): with pytest.raises(ValidationError) as exec_info: RPCNamespace.from_entries({"test": "@"}) - assert "string does not match regex" in f"{exec_info.value}" + assert "String should match pattern" in f"{exec_info.value}" diff --git a/packages/service-library/tests/test_archiving_utils.py b/packages/service-library/tests/test_archiving_utils.py index 3996be43ca5e..073111eb22b2 100644 --- a/packages/service-library/tests/test_archiving_utils.py +++ b/packages/service-library/tests/test_archiving_utils.py @@ -20,7 +20,7 @@ import pytest from faker import Faker from PIL import Image -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from pytest_benchmark.plugin import BenchmarkFixture from servicelib import archiving_utils from servicelib.archiving_utils import ArchiveError, archive_dir, unarchive_dir @@ -569,7 +569,8 @@ async def _archive_dir_performance( @pytest.mark.skip(reason="manual testing") @pytest.mark.parametrize( - "compress, file_size, num_files", [(False, parse_obj_as(ByteSize, "1Mib"), 10000)] + "compress, file_size, num_files", + [(False, TypeAdapter(ByteSize).validate_python("1Mib"), 10000)], ) def test_archive_dir_performance( benchmark: BenchmarkFixture, diff --git a/packages/settings-library/requirements/_base.in b/packages/settings-library/requirements/_base.in index ec1d848cc85d..91f4dd23b049 100644 --- a/packages/settings-library/requirements/_base.in +++ b/packages/settings-library/requirements/_base.in @@ -1,10 +1,11 @@ # -# Specifies third-party dependencies for 'models-library' +# Specifies third-party dependencies for 'settings-library' # --constraint ../../../requirements/constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in -pydantic>=1.9 - +pydantic +pydantic-settings # extra rich diff --git a/packages/settings-library/requirements/_base.txt b/packages/settings-library/requirements/_base.txt index a97700778c33..64117c65d55d 100644 --- a/packages/settings-library/requirements/_base.txt +++ b/packages/settings-library/requirements/_base.txt @@ -1,15 +1,26 @@ +annotated-types==0.7.0 + # via pydantic click==8.1.7 # via typer markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -pydantic==1.10.18 +pydantic==2.9.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/_base.in + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-settings==2.5.2 + # via -r requirements/_base.in pygments==2.18.0 # via rich +python-dotenv==1.0.1 + # via pydantic-settings rich==13.8.1 # via # -r requirements/_base.in @@ -21,4 +32,5 @@ typer==0.12.5 typing-extensions==4.12.2 # via # pydantic + # pydantic-core # typer diff --git a/packages/settings-library/requirements/_test.txt b/packages/settings-library/requirements/_test.txt index d4aa9b9224c6..b9152c956e82 100644 --- a/packages/settings-library/requirements/_test.txt +++ b/packages/settings-library/requirements/_test.txt @@ -32,7 +32,9 @@ pytest-sugar==1.0.0 python-dateutil==2.9.0.post0 # via faker python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in six==1.16.0 # via python-dateutil termcolor==2.4.0 diff --git a/packages/settings-library/requirements/ci.txt b/packages/settings-library/requirements/ci.txt index c4aced798239..f535a4dc0269 100644 --- a/packages/settings-library/requirements/ci.txt +++ b/packages/settings-library/requirements/ci.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +simcore-common-library @ ../common-library/ pytest-simcore @ ../pytest-simcore # current module diff --git a/packages/settings-library/requirements/dev.txt b/packages/settings-library/requirements/dev.txt index 32d383e9cccf..de2adb4ecbb1 100644 --- a/packages/settings-library/requirements/dev.txt +++ b/packages/settings-library/requirements/dev.txt @@ -1,4 +1,4 @@ -# Shortcut to install all packages needed to develop 'models-library' +# Shortcut to install all packages needed to develop 'settings-library' # # - As ci.txt but with current and repo packages in develop (edit) mode # @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library/ --editable ../pytest-simcore/ # current module diff --git a/packages/settings-library/src/settings_library/base.py b/packages/settings-library/src/settings_library/base.py index 296b453e26cb..7bdd7d2cd007 100644 --- a/packages/settings-library/src/settings_library/base.py +++ b/packages/settings-library/src/settings_library/base.py @@ -1,19 +1,12 @@ import logging -from collections.abc import Sequence from functools import cached_property -from typing import Final, get_args, get_origin - -from pydantic import ( - BaseConfig, - BaseSettings, - ConfigError, - Extra, - ValidationError, - validator, -) -from pydantic.error_wrappers import ErrorList, ErrorWrapper -from pydantic.fields import ModelField, Undefined -from pydantic.typing import is_literal_type +from typing import Any, Final, get_origin + +from common_library.pydantic_fields_extension import get_type, is_literal, is_nullable +from pydantic import ValidationInfo, field_validator +from pydantic.fields import FieldInfo +from pydantic_core import ValidationError +from pydantic_settings import BaseSettings, SettingsConfigDict _logger = logging.getLogger(__name__) @@ -22,41 +15,33 @@ ] = "%s auto_default_from_env unresolved, defaulting to None" -class DefaultFromEnvFactoryError(ValidationError): - ... +class DefaultFromEnvFactoryError(ValueError): + def __init__(self, errors): + super().__init__() + self.errors = errors -def create_settings_from_env(field: ModelField): +def _create_settings_from_env(field_name: str, info: FieldInfo): # NOTE: Cannot pass only field.type_ because @prepare_field (when this function is called) # this value is still not resolved (field.type_ at that moment has a weak_ref). # Therefore we keep the entire 'field' but MUST be treated here as read-only def _default_factory(): """Creates default from sub-settings or None (if nullable)""" - field_settings_cls = field.type_ + field_settings_cls = get_type(info) try: return field_settings_cls() except ValidationError as err: - if field.allow_none: + if is_nullable(info): # e.g. Optional[PostgresSettings] would warn if defaults to None _logger.warning( _DEFAULTS_TO_NONE_MSG, - field.name, + field_name, ) return None - - def _prepend_field_name(ee: ErrorList): - if isinstance(ee, ErrorWrapper): - return ErrorWrapper(ee.exc, (field.name, *ee.loc_tuple())) - assert isinstance(ee, Sequence) # nosec - return [_prepend_field_name(e) for e in ee] - - raise DefaultFromEnvFactoryError( - errors=_prepend_field_name(err.raw_errors), - model=err.model, - # FIXME: model = shall be the parent settings?? but I dont find how retrieve it from the field - ) from err + _logger.warning("Validation errors=%s", err.errors()) + raise DefaultFromEnvFactoryError(errors=err.errors()) from err return _default_factory @@ -70,40 +55,46 @@ class BaseCustomSettings(BaseSettings): SEE tests for details. """ - @validator("*", pre=True) + @field_validator("*", mode="before") @classmethod - def parse_none(cls, v, field: ModelField): + def _parse_none(cls, v, info: ValidationInfo): # WARNING: In nullable fields, envs equal to null or none are parsed as None !! - if field.allow_none and isinstance(v, str) and v.lower() in ("null", "none"): + if ( + info.field_name + and is_nullable(cls.model_fields[info.field_name]) + and isinstance(v, str) + and v.lower() in ("none",) + ): return None return v - class Config(BaseConfig): - case_sensitive = True # All must be capitalized - extra = Extra.forbid - allow_mutation = False - frozen = True - validate_all = True - keep_untouched = (cached_property,) + model_config = SettingsConfigDict( + case_sensitive=True, # All must be capitalized + extra="forbid", + frozen=True, + validate_default=True, + ignored_types=(cached_property,), + env_parse_none_str="null", + ) - @classmethod - def prepare_field(cls, field: ModelField) -> None: - super().prepare_field(field) - - auto_default_from_env = field.field_info.extra.get( - "auto_default_from_env", False + @classmethod + def __pydantic_init_subclass__(cls, **kwargs: Any): + super().__pydantic_init_subclass__(**kwargs) + + for name, field in cls.model_fields.items(): + auto_default_from_env = ( + field.json_schema_extra is not None + and field.json_schema_extra.get( # type: ignore[union-attr] + "auto_default_from_env", False + ) ) - - field_type = field.type_ - if args := get_args(field_type): - field_type = next(a for a in args if a != type(None)) + field_type = get_type(field) # Avoids issubclass raising TypeError. SEE test_issubclass_type_error_with_pydantic_models is_not_composed = ( get_origin(field_type) is None ) # is not composed as dict[str, Any] or Generic[Base] - # avoid literals raising TypeError - is_not_literal = is_literal_type(field.type_) is False + is_not_literal = not is_literal(field) if ( is_not_literal @@ -111,25 +102,23 @@ def prepare_field(cls, field: ModelField) -> None: and issubclass(field_type, BaseCustomSettings) ): if auto_default_from_env: - assert field.field_info.default is Undefined - assert field.field_info.default_factory is None - # Transform it into something like `Field(default_factory=create_settings_from_env(field))` - field.default_factory = create_settings_from_env(field) + field.default_factory = _create_settings_from_env(name, field) field.default = None - field.required = False # has a default now elif ( is_not_literal and is_not_composed and issubclass(field_type, BaseSettings) ): - msg = f"{cls}.{field.name} of type {field_type} must inherit from BaseCustomSettings" - raise ConfigError(msg) + msg = f"{cls}.{name} of type {field_type} must inherit from BaseCustomSettings" + raise ValueError(msg) elif auto_default_from_env: - msg = f"auto_default_from_env=True can only be used in BaseCustomSettings subclassesbut field {cls}.{field.name} is {field_type} " - raise ConfigError(msg) + msg = f"auto_default_from_env=True can only be used in BaseCustomSettings subclasses but field {cls}.{name} is {field_type} " + raise ValueError(msg) + + cls.model_rebuild(force=True) @classmethod def create_from_envs(cls, **overrides): diff --git a/packages/settings-library/src/settings_library/basic_types.py b/packages/settings-library/src/settings_library/basic_types.py index 9020f85f98f5..f4c745b22842 100644 --- a/packages/settings-library/src/settings_library/basic_types.py +++ b/packages/settings-library/src/settings_library/basic_types.py @@ -3,21 +3,17 @@ # This is a minor evil to avoid the maintenance burden that creates # an extra dependency to a larger models_library (intra-repo library) -import re from enum import Enum +from typing import Annotated, TypeAlias -from pydantic import ConstrainedInt, ConstrainedStr - +from pydantic import Field, StringConstraints # port number range -class PortInt(ConstrainedInt): - gt = 0 - lt = 65535 +PortInt: TypeAlias = Annotated[int, Field(gt=0, lt=65535)] # e.g. 'v5' -class VersionTag(ConstrainedStr): - regex = re.compile(r"^v\d$") +VersionTag: TypeAlias = Annotated[str, StringConstraints(pattern=r"^v\d$")] class LogLevel(str, Enum): @@ -55,13 +51,7 @@ class BuildTargetEnum(str, Enum): # non-empty bounded string used as identifier # e.g. "123" or "name_123" or "fa327c73-52d8-462a-9267-84eeaf0f90e3" but NOT "" -class IDStr(ConstrainedStr): - strip_whitespace = True - min_length = 1 - max_length = 50 - - -# https://en.wikipedia.org/wiki/List_of_TCP_and_UDP_port_numbers#Registered_ports -class RegisteredPortInt(ConstrainedInt): - gt = 1024 - lt = 65535 +IDStr: TypeAlias = Annotated[ + str, StringConstraints(strip_whitespace=True, min_length=1, max_length=50) +] +RegisteredPortInt: TypeAlias = Annotated[int, Field(gt=1024, lt=65535)] diff --git a/packages/settings-library/src/settings_library/catalog.py b/packages/settings-library/src/settings_library/catalog.py index e5f44f292699..17c71237e81a 100644 --- a/packages/settings-library/src/settings_library/catalog.py +++ b/packages/settings-library/src/settings_library/catalog.py @@ -1,6 +1,5 @@ from functools import cached_property -from pydantic import parse_obj_as from settings_library.base import BaseCustomSettings from settings_library.basic_types import PortInt, VersionTag from settings_library.utils_service import ( @@ -13,7 +12,7 @@ class CatalogSettings(BaseCustomSettings, MixinServiceSettings): CATALOG_HOST: str = "catalog" CATALOG_PORT: PortInt = DEFAULT_FASTAPI_PORT - CATALOG_VTAG: VersionTag = parse_obj_as(VersionTag, "v0") + CATALOG_VTAG: VersionTag = "v0" @cached_property def api_base_url(self) -> str: diff --git a/packages/settings-library/src/settings_library/comp_services.py b/packages/settings-library/src/settings_library/comp_services.py index e3cb628f7b78..71901e616246 100644 --- a/packages/settings-library/src/settings_library/comp_services.py +++ b/packages/settings-library/src/settings_library/comp_services.py @@ -1,5 +1,4 @@ -from pydantic import ByteSize, NonNegativeInt, validator -from pydantic.tools import parse_raw_as +from pydantic import ByteSize, NonNegativeInt, TypeAdapter, field_validator from settings_library.base import BaseCustomSettings from ._constants import GB @@ -10,19 +9,19 @@ class ComputationalServices(BaseCustomSettings): DEFAULT_MAX_NANO_CPUS: NonNegativeInt = _DEFAULT_MAX_NANO_CPUS_VALUE - DEFAULT_MAX_MEMORY: ByteSize = parse_raw_as( - ByteSize, f"{_DEFAULT_MAX_MEMORY_VALUE}" + DEFAULT_MAX_MEMORY: ByteSize = TypeAdapter(ByteSize).validate_python( + f"{_DEFAULT_MAX_MEMORY_VALUE}" ) DEFAULT_RUNTIME_TIMEOUT: NonNegativeInt = 0 - @validator("DEFAULT_MAX_NANO_CPUS", pre=True) + @field_validator("DEFAULT_MAX_NANO_CPUS", mode="before") @classmethod def _set_default_cpus_if_negative(cls, v): if v is None or v == "" or int(v) <= 0: v = _DEFAULT_MAX_NANO_CPUS_VALUE return v - @validator("DEFAULT_MAX_MEMORY", pre=True) + @field_validator("DEFAULT_MAX_MEMORY", mode="before") @classmethod def _set_default_memory_if_negative(cls, v): if v is None or v == "" or int(v) <= 0: diff --git a/packages/settings-library/src/settings_library/director_v2.py b/packages/settings-library/src/settings_library/director_v2.py index 78c5edd78c6f..baf32956c8ee 100644 --- a/packages/settings-library/src/settings_library/director_v2.py +++ b/packages/settings-library/src/settings_library/director_v2.py @@ -1,6 +1,5 @@ from functools import cached_property -from pydantic import parse_obj_as from settings_library.base import BaseCustomSettings from settings_library.basic_types import PortInt, VersionTag from settings_library.utils_service import ( @@ -13,7 +12,7 @@ class DirectorV2Settings(BaseCustomSettings, MixinServiceSettings): DIRECTOR_V2_HOST: str = "director-v2" DIRECTOR_V2_PORT: PortInt = DEFAULT_FASTAPI_PORT - DIRECTOR_V2_VTAG: VersionTag = parse_obj_as(VersionTag, "v2") + DIRECTOR_V2_VTAG: VersionTag = "v2" @cached_property def api_base_url(self) -> str: diff --git a/packages/settings-library/src/settings_library/docker_registry.py b/packages/settings-library/src/settings_library/docker_registry.py index bb365cb9785f..e899ce457189 100644 --- a/packages/settings-library/src/settings_library/docker_registry.py +++ b/packages/settings-library/src/settings_library/docker_registry.py @@ -1,7 +1,8 @@ from functools import cached_property -from typing import Any, ClassVar +from typing import Any -from pydantic import Field, SecretStr, validator +from pydantic import Field, SecretStr, field_validator +from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings @@ -23,7 +24,7 @@ class RegistrySettings(BaseCustomSettings): ) REGISTRY_SSL: bool = Field(..., description="access to registry through ssl") - @validator("REGISTRY_PATH", pre=True) + @field_validator("REGISTRY_PATH", mode="before") @classmethod def _escape_none_string(cls, v) -> Any | None: return None if v == "None" else v @@ -36,8 +37,8 @@ def resolved_registry_url(self) -> str: def api_url(self) -> str: return f"{self.REGISTRY_URL}/v2" - class Config(BaseCustomSettings.Config): - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = SettingsConfigDict( + json_schema_extra={ "examples": [ { "REGISTRY_AUTH": "True", @@ -48,3 +49,4 @@ class Config(BaseCustomSettings.Config): } ], } + ) diff --git a/packages/settings-library/src/settings_library/ec2.py b/packages/settings-library/src/settings_library/ec2.py index 2cd7cf0b9a6e..22d2d9af9ee2 100644 --- a/packages/settings-library/src/settings_library/ec2.py +++ b/packages/settings-library/src/settings_library/ec2.py @@ -1,20 +1,23 @@ -from typing import Any, ClassVar +from typing import Annotated -from pydantic import Field +from pydantic import AnyHttpUrl, BeforeValidator, Field, TypeAdapter +from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings +ANY_HTTP_URL_ADAPTER: TypeAdapter = TypeAdapter(AnyHttpUrl) + class EC2Settings(BaseCustomSettings): EC2_ACCESS_KEY_ID: str - EC2_ENDPOINT: str | None = Field( - default=None, description="do not define if using standard AWS" - ) + EC2_ENDPOINT: Annotated[ + str, BeforeValidator(lambda x: str(ANY_HTTP_URL_ADAPTER.validate_python(x))) + ] | None = Field(default=None, description="do not define if using standard AWS") EC2_REGION_NAME: str = "us-east-1" EC2_SECRET_ACCESS_KEY: str - class Config(BaseCustomSettings.Config): - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = SettingsConfigDict( + json_schema_extra={ "examples": [ { "EC2_ACCESS_KEY_ID": "my_access_key_id", @@ -24,3 +27,4 @@ class Config(BaseCustomSettings.Config): } ], } + ) diff --git a/packages/settings-library/src/settings_library/efs.py b/packages/settings-library/src/settings_library/efs.py index d09b8abb20fe..34c48f9dca6c 100644 --- a/packages/settings-library/src/settings_library/efs.py +++ b/packages/settings-library/src/settings_library/efs.py @@ -8,7 +8,7 @@ class AwsEfsSettings(BaseCustomSettings): EFS_DNS_NAME: str = Field( description="AWS Elastic File System DNS name", - example="fs-xxx.efs.us-east-1.amazonaws.com", + examples=["fs-xxx.efs.us-east-1.amazonaws.com"], ) EFS_PROJECT_SPECIFIC_DATA_DIRECTORY: str EFS_MOUNTED_PATH: Path = Field( @@ -16,7 +16,7 @@ class AwsEfsSettings(BaseCustomSettings): ) EFS_ONLY_ENABLED_FOR_USERIDS: list[int] = Field( description="This is temporary solution so we can enable it for specific users for testing purpose", - example=[1], + examples=[[1]], ) diff --git a/packages/settings-library/src/settings_library/email.py b/packages/settings-library/src/settings_library/email.py index b15bf209405c..fe5f8448b347 100644 --- a/packages/settings-library/src/settings_library/email.py +++ b/packages/settings-library/src/settings_library/email.py @@ -1,6 +1,7 @@ from enum import Enum +from typing import Self -from pydantic import root_validator +from pydantic import model_validator from pydantic.fields import Field from pydantic.types import SecretStr @@ -31,25 +32,23 @@ class SMTPSettings(BaseCustomSettings): SMTP_USERNAME: str | None = Field(None, min_length=1) SMTP_PASSWORD: SecretStr | None = Field(None, min_length=1) - @root_validator - @classmethod - def _both_credentials_must_be_set(cls, values): - username = values.get("SMTP_USERNAME") - password = values.get("SMTP_PASSWORD") + @model_validator(mode="after") + def _both_credentials_must_be_set(self) -> Self: + username = self.SMTP_USERNAME + password = self.SMTP_PASSWORD if username is None and password or username and password is None: msg = f"Please provide both {username=} and {password=} not just one" raise ValueError(msg) - return values + return self - @root_validator - @classmethod - def _enabled_tls_required_authentication(cls, values): - smtp_protocol = values.get("SMTP_PROTOCOL") + @model_validator(mode="after") + def _enabled_tls_required_authentication(self) -> Self: + smtp_protocol = self.SMTP_PROTOCOL - username = values.get("SMTP_USERNAME") - password = values.get("SMTP_PASSWORD") + username = self.SMTP_USERNAME + password = self.SMTP_PASSWORD tls_enabled = smtp_protocol == EmailProtocol.TLS starttls_enabled = smtp_protocol == EmailProtocol.STARTTLS @@ -57,7 +56,7 @@ def _enabled_tls_required_authentication(cls, values): if (tls_enabled or starttls_enabled) and not (username or password): msg = "when using SMTP_PROTOCOL other than UNENCRYPTED username and password are required" raise ValueError(msg) - return values + return self @property def has_credentials(self) -> bool: diff --git a/packages/settings-library/src/settings_library/node_ports.py b/packages/settings-library/src/settings_library/node_ports.py index 2a5d12f1bd7d..522fcdd09914 100644 --- a/packages/settings-library/src/settings_library/node_ports.py +++ b/packages/settings-library/src/settings_library/node_ports.py @@ -1,7 +1,7 @@ from datetime import timedelta from typing import Final -from pydantic import Field, NonNegativeInt, PositiveInt, SecretStr, root_validator +from pydantic import Field, NonNegativeInt, PositiveInt, SecretStr, model_validator from .base import BaseCustomSettings from .postgres import PostgresSettings @@ -11,8 +11,8 @@ class StorageAuthSettings(StorageSettings): - STORAGE_USERNAME: str | None - STORAGE_PASSWORD: SecretStr | None + STORAGE_USERNAME: str | None = None + STORAGE_PASSWORD: SecretStr | None = None STORAGE_SECURE: bool = False @property @@ -21,11 +21,11 @@ def auth_required(self) -> bool: # for details see https://github.com/ITISFoundation/osparc-issues/issues/1264 return self.STORAGE_USERNAME is not None and self.STORAGE_PASSWORD is not None - @root_validator + @model_validator(mode="after") @classmethod def _validate_auth_fields(cls, values): - username = values["STORAGE_USERNAME"] - password = values["STORAGE_PASSWORD"] + username = values.STORAGE_USERNAME + password = values.STORAGE_PASSWORD if (username is None) != (password is None): msg = f"Both {username=} and {password=} must be either set or unset!" raise ValueError(msg) @@ -33,9 +33,13 @@ def _validate_auth_fields(cls, values): class NodePortsSettings(BaseCustomSettings): - NODE_PORTS_STORAGE_AUTH: StorageAuthSettings = Field(auto_default_from_env=True) + NODE_PORTS_STORAGE_AUTH: StorageAuthSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) - POSTGRES_SETTINGS: PostgresSettings = Field(auto_default_from_env=True) + POSTGRES_SETTINGS: PostgresSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) NODE_PORTS_MULTIPART_UPLOAD_COMPLETION_TIMEOUT_S: NonNegativeInt = int( timedelta(minutes=5).total_seconds() diff --git a/packages/settings-library/src/settings_library/postgres.py b/packages/settings-library/src/settings_library/postgres.py index 7724aba99dc4..883d14c3bb4c 100644 --- a/packages/settings-library/src/settings_library/postgres.py +++ b/packages/settings-library/src/settings_library/postgres.py @@ -1,8 +1,15 @@ from functools import cached_property -from typing import Any, ClassVar from urllib.parse import parse_qsl, urlencode, urlparse, urlunparse -from pydantic import Field, PostgresDsn, SecretStr, validator +from pydantic import ( + AliasChoices, + Field, + PostgresDsn, + SecretStr, + ValidationInfo, + field_validator, +) +from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings from .basic_types import PortInt @@ -11,7 +18,7 @@ class PostgresSettings(BaseCustomSettings): # entrypoint POSTGRES_HOST: str - POSTGRES_PORT: PortInt = PortInt(5432) + POSTGRES_PORT: PortInt = 5432 # auth POSTGRES_USER: str @@ -31,43 +38,47 @@ class PostgresSettings(BaseCustomSettings): POSTGRES_CLIENT_NAME: str | None = Field( default=None, description="Name of the application connecting the postgres database, will default to use the host hostname (hostname on linux)", - env=[ + validation_alias=AliasChoices( "POSTGRES_CLIENT_NAME", # This is useful when running inside a docker container, then the hostname is set each client gets a different name "HOST", "HOSTNAME", - ], + ), ) - @validator("POSTGRES_MAXSIZE") + @field_validator("POSTGRES_MAXSIZE") @classmethod - def _check_size(cls, v, values): - if not (values["POSTGRES_MINSIZE"] <= v): - msg = f"assert POSTGRES_MINSIZE={values['POSTGRES_MINSIZE']} <= POSTGRES_MAXSIZE={v}" + def _check_size(cls, v, info: ValidationInfo): + if info.data["POSTGRES_MINSIZE"] > v: + msg = f"assert POSTGRES_MINSIZE={info.data['POSTGRES_MINSIZE']} <= POSTGRES_MAXSIZE={v}" raise ValueError(msg) return v @cached_property def dsn(self) -> str: - dsn: str = PostgresDsn.build( - scheme="postgresql", - user=self.POSTGRES_USER, - password=self.POSTGRES_PASSWORD.get_secret_value(), - host=self.POSTGRES_HOST, - port=f"{self.POSTGRES_PORT}", - path=f"/{self.POSTGRES_DB}", + dsn: str = str( + PostgresDsn.build( # pylint: disable=no-member + scheme="postgresql", + username=self.POSTGRES_USER, + password=self.POSTGRES_PASSWORD.get_secret_value(), + host=self.POSTGRES_HOST, + port=self.POSTGRES_PORT, + path=f"{self.POSTGRES_DB}", + ) ) return dsn @cached_property def dsn_with_async_sqlalchemy(self) -> str: - dsn: str = PostgresDsn.build( - scheme="postgresql+asyncpg", - user=self.POSTGRES_USER, - password=self.POSTGRES_PASSWORD.get_secret_value(), - host=self.POSTGRES_HOST, - port=f"{self.POSTGRES_PORT}", - path=f"/{self.POSTGRES_DB}", + dsn: str = str( + PostgresDsn.build( # pylint: disable=no-member + scheme="postgresql+asyncpg", + username=self.POSTGRES_USER, + password=self.POSTGRES_PASSWORD.get_secret_value(), + host=self.POSTGRES_HOST, + port=self.POSTGRES_PORT, + path=f"{self.POSTGRES_DB}", + ) ) return dsn @@ -93,8 +104,8 @@ def _update_query(self, uri: str) -> str: return urlunparse(parsed_uri._replace(query=updated_query)) return uri - class Config(BaseCustomSettings.Config): - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = SettingsConfigDict( + json_schema_extra={ "examples": [ # minimal required { @@ -106,3 +117,4 @@ class Config(BaseCustomSettings.Config): } ], } + ) diff --git a/packages/settings-library/src/settings_library/prometheus.py b/packages/settings-library/src/settings_library/prometheus.py index 065c7e930f08..9c40293d4638 100644 --- a/packages/settings-library/src/settings_library/prometheus.py +++ b/packages/settings-library/src/settings_library/prometheus.py @@ -9,7 +9,7 @@ class PrometheusSettings(BaseCustomSettings, MixinServiceSettings): PROMETHEUS_URL: AnyUrl - PROMETHEUS_VTAG: VersionTag = VersionTag("v1") + PROMETHEUS_VTAG: VersionTag = "v1" PROMETHEUS_USERNAME: str | None = None PROMETHEUS_PASSWORD: SecretStr | None = None @@ -24,14 +24,16 @@ def origin(self) -> str: @cached_property def api_url(self) -> str: assert self.PROMETHEUS_URL.host # nosec - prometheus_url: str = AnyUrl.build( - scheme=self.PROMETHEUS_URL.scheme, - user=self.PROMETHEUS_USERNAME, - password=self.PROMETHEUS_PASSWORD.get_secret_value() - if self.PROMETHEUS_PASSWORD - else None, - host=self.PROMETHEUS_URL.host, - port=self.PROMETHEUS_URL.port, - path=self.PROMETHEUS_URL.path, + prometheus_url: str = str( + AnyUrl.build( + scheme=self.PROMETHEUS_URL.scheme, + username=self.PROMETHEUS_USERNAME, + password=self.PROMETHEUS_PASSWORD.get_secret_value() + if self.PROMETHEUS_PASSWORD + else None, + host=self.PROMETHEUS_URL.host, + port=self.PROMETHEUS_URL.port, + path=self.PROMETHEUS_URL.path, + ) ) return prometheus_url diff --git a/packages/settings-library/src/settings_library/r_clone.py b/packages/settings-library/src/settings_library/r_clone.py index ff04d509befa..c4288466928c 100644 --- a/packages/settings-library/src/settings_library/r_clone.py +++ b/packages/settings-library/src/settings_library/r_clone.py @@ -13,7 +13,7 @@ class S3Provider(StrEnum): class RCloneSettings(BaseCustomSettings): - R_CLONE_S3: S3Settings = Field(auto_default_from_env=True) + R_CLONE_S3: S3Settings = Field(json_schema_extra={"auto_default_from_env": True}) R_CLONE_PROVIDER: S3Provider # SEE https://rclone.org/docs/#transfers-n diff --git a/packages/settings-library/src/settings_library/rabbit.py b/packages/settings-library/src/settings_library/rabbit.py index 19c6af0b6560..e2cc2e271cee 100644 --- a/packages/settings-library/src/settings_library/rabbit.py +++ b/packages/settings-library/src/settings_library/rabbit.py @@ -1,6 +1,5 @@ from functools import cached_property -from pydantic import parse_obj_as from pydantic.networks import AnyUrl from pydantic.types import SecretStr @@ -15,7 +14,7 @@ class RabbitDsn(AnyUrl): class RabbitSettings(BaseCustomSettings): # host RABBIT_HOST: str - RABBIT_PORT: PortInt = parse_obj_as(PortInt, 5672) + RABBIT_PORT: PortInt = 5672 RABBIT_SECURE: bool # auth @@ -24,11 +23,13 @@ class RabbitSettings(BaseCustomSettings): @cached_property def dsn(self) -> str: - rabbit_dsn: str = RabbitDsn.build( - scheme="amqps" if self.RABBIT_SECURE else "amqp", - user=self.RABBIT_USER, - password=self.RABBIT_PASSWORD.get_secret_value(), - host=self.RABBIT_HOST, - port=f"{self.RABBIT_PORT}", + rabbit_dsn: str = str( + RabbitDsn.build( + scheme="amqps" if self.RABBIT_SECURE else "amqp", + username=self.RABBIT_USER, + password=self.RABBIT_PASSWORD.get_secret_value(), + host=self.RABBIT_HOST, + port=self.RABBIT_PORT, + ) ) return rabbit_dsn diff --git a/packages/settings-library/src/settings_library/redis.py b/packages/settings-library/src/settings_library/redis.py index b4873665dd1e..1fd7e4ac197a 100644 --- a/packages/settings-library/src/settings_library/redis.py +++ b/packages/settings-library/src/settings_library/redis.py @@ -1,6 +1,6 @@ from enum import IntEnum -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pydantic.networks import RedisDsn from pydantic.types import SecretStr @@ -24,20 +24,24 @@ class RedisSettings(BaseCustomSettings): # host REDIS_SECURE: bool = False REDIS_HOST: str = "redis" - REDIS_PORT: PortInt = parse_obj_as(PortInt, 6789) + REDIS_PORT: PortInt = TypeAdapter(PortInt).validate_python(6789) # auth REDIS_USER: str | None = None REDIS_PASSWORD: SecretStr | None = None - def build_redis_dsn(self, db_index: RedisDatabase): - return RedisDsn.build( - scheme="rediss" if self.REDIS_SECURE else "redis", - user=self.REDIS_USER or None, - password=( - self.REDIS_PASSWORD.get_secret_value() if self.REDIS_PASSWORD else None - ), - host=self.REDIS_HOST, - port=f"{self.REDIS_PORT}", - path=f"/{db_index}", + def build_redis_dsn(self, db_index: RedisDatabase) -> str: + return str( + RedisDsn.build( # pylint: disable=no-member + scheme="redis", + username=self.REDIS_USER or None, + password=( + self.REDIS_PASSWORD.get_secret_value() + if self.REDIS_PASSWORD + else None + ), + host=self.REDIS_HOST, + port=self.REDIS_PORT, + path=f"/{db_index}", + ) ) diff --git a/packages/settings-library/src/settings_library/resource_usage_tracker.py b/packages/settings-library/src/settings_library/resource_usage_tracker.py index dc696fab76c7..d0df8f093adc 100644 --- a/packages/settings-library/src/settings_library/resource_usage_tracker.py +++ b/packages/settings-library/src/settings_library/resource_usage_tracker.py @@ -1,7 +1,6 @@ from datetime import timedelta from functools import cached_property -from pydantic import parse_obj_as from settings_library.base import BaseCustomSettings from settings_library.basic_types import PortInt, VersionTag from settings_library.utils_service import ( @@ -16,7 +15,7 @@ class ResourceUsageTrackerSettings(BaseCustomSettings, MixinServiceSettings): RESOURCE_USAGE_TRACKER_HOST: str = "resource-usage-tracker" RESOURCE_USAGE_TRACKER_PORT: PortInt = DEFAULT_FASTAPI_PORT - RESOURCE_USAGE_TRACKER_VTAG: VersionTag = parse_obj_as(VersionTag, "v1") + RESOURCE_USAGE_TRACKER_VTAG: VersionTag = "v1" @cached_property def api_base_url(self) -> str: diff --git a/packages/settings-library/src/settings_library/s3.py b/packages/settings-library/src/settings_library/s3.py index cef1bf11be57..95268b419205 100644 --- a/packages/settings-library/src/settings_library/s3.py +++ b/packages/settings-library/src/settings_library/s3.py @@ -1,22 +1,25 @@ -from typing import Any, ClassVar +from typing import Annotated -from pydantic import AnyHttpUrl, Field +from pydantic import AnyHttpUrl, BeforeValidator, Field, TypeAdapter +from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings from .basic_types import IDStr +ANY_HTTP_URL_ADAPTER: TypeAdapter = TypeAdapter(AnyHttpUrl) + class S3Settings(BaseCustomSettings): S3_ACCESS_KEY: IDStr S3_BUCKET_NAME: IDStr - S3_ENDPOINT: AnyHttpUrl | None = Field( - default=None, description="do not define if using standard AWS" - ) + S3_ENDPOINT: Annotated[ + str, BeforeValidator(lambda x: str(ANY_HTTP_URL_ADAPTER.validate_python(x))) + ] | None = Field(default=None, description="do not define if using standard AWS") S3_REGION: IDStr S3_SECRET_KEY: IDStr - class Config(BaseCustomSettings.Config): - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = SettingsConfigDict( + json_schema_extra={ "examples": [ { # non AWS use-case @@ -35,3 +38,4 @@ class Config(BaseCustomSettings.Config): }, ], } + ) diff --git a/packages/settings-library/src/settings_library/ssm.py b/packages/settings-library/src/settings_library/ssm.py index 32b965fa123b..823ac57c1a23 100644 --- a/packages/settings-library/src/settings_library/ssm.py +++ b/packages/settings-library/src/settings_library/ssm.py @@ -1,20 +1,28 @@ -from typing import Any, ClassVar +from typing import Annotated -from pydantic import AnyHttpUrl, Field, SecretStr +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy +from pydantic import BeforeValidator, Field, SecretStr, TypeAdapter +from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings class SSMSettings(BaseCustomSettings): SSM_ACCESS_KEY_ID: SecretStr - SSM_ENDPOINT: AnyHttpUrl | None = Field( - default=None, description="do not define if using standard AWS" - ) + SSM_ENDPOINT: ( + Annotated[ + str, + BeforeValidator( + lambda x: str(TypeAdapter(AnyHttpUrlLegacy).validate_python(x)) + ), + ] + | None + ) = Field(default=None, description="do not define if using standard AWS") SSM_REGION_NAME: str = "us-east-1" SSM_SECRET_ACCESS_KEY: SecretStr - class Config(BaseCustomSettings.Config): - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = SettingsConfigDict( + json_schema_extra={ "examples": [ { "SSM_ACCESS_KEY_ID": "my_access_key_id", @@ -24,3 +32,4 @@ class Config(BaseCustomSettings.Config): } ], } + ) diff --git a/packages/settings-library/src/settings_library/storage.py b/packages/settings-library/src/settings_library/storage.py index 92ec0301257b..00ef1987037e 100644 --- a/packages/settings-library/src/settings_library/storage.py +++ b/packages/settings-library/src/settings_library/storage.py @@ -1,6 +1,5 @@ from functools import cached_property -from pydantic import parse_obj_as from settings_library.base import BaseCustomSettings from settings_library.basic_types import PortInt, VersionTag from settings_library.utils_service import ( @@ -13,7 +12,7 @@ class StorageSettings(BaseCustomSettings, MixinServiceSettings): STORAGE_HOST: str = "storage" STORAGE_PORT: PortInt = DEFAULT_AIOHTTP_PORT - STORAGE_VTAG: VersionTag = parse_obj_as(VersionTag, "v0") + STORAGE_VTAG: VersionTag = "v0" @cached_property def base_url(self) -> str: diff --git a/packages/settings-library/src/settings_library/twilio.py b/packages/settings-library/src/settings_library/twilio.py index eb4ec0c707a3..b63e35caf611 100644 --- a/packages/settings-library/src/settings_library/twilio.py +++ b/packages/settings-library/src/settings_library/twilio.py @@ -6,29 +6,25 @@ """ -import re -from re import Pattern +from typing import Annotated, TypeAlias -from pydantic import ConstrainedStr, Field, parse_obj_as +from pydantic import BeforeValidator, Field, StringConstraints, TypeAdapter from .base import BaseCustomSettings - -class CountryCodeStr(ConstrainedStr): - # Based on https://countrycode.org/ - strip_whitespace: bool = True - regex: Pattern[str] | None = re.compile(r"^\d{1,4}") - - class Config: - frozen = True +# Based on https://countrycode.org/ +CountryCodeStr: TypeAlias = Annotated[ + str, + BeforeValidator(str), + StringConstraints(strip_whitespace=True, pattern=r"^\d{1,4}"), +] class TwilioSettings(BaseCustomSettings): TWILIO_ACCOUNT_SID: str = Field(..., description="Twilio account String Identifier") TWILIO_AUTH_TOKEN: str = Field(..., description="API tokens") TWILIO_COUNTRY_CODES_W_ALPHANUMERIC_SID_SUPPORT: list[CountryCodeStr] = Field( - default=parse_obj_as( - list[CountryCodeStr], + default=TypeAdapter(list[CountryCodeStr]).validate_python( [ "41", ], diff --git a/packages/settings-library/src/settings_library/utils_cli.py b/packages/settings-library/src/settings_library/utils_cli.py index 79d0e1ac1451..0311ed28d7bd 100644 --- a/packages/settings-library/src/settings_library/utils_cli.py +++ b/packages/settings-library/src/settings_library/utils_cli.py @@ -1,3 +1,4 @@ +import json import logging import os from collections.abc import Callable @@ -6,12 +7,14 @@ import rich import typer +from common_library.serialization import model_dump_with_secrets +from models_library.utils.json_serialization import json_dumps from pydantic import ValidationError -from pydantic.env_settings import BaseSettings +from pydantic_core import to_jsonable_python +from pydantic_settings import BaseSettings from ._constants import HEADER_STR from .base import BaseCustomSettings -from .utils_encoders import create_json_encoder_wo_secrets def print_as_envfile( @@ -24,14 +27,15 @@ def print_as_envfile( ): exclude_unset = pydantic_export_options.get("exclude_unset", False) - for field in settings_obj.__fields__.values(): - auto_default_from_env = field.field_info.extra.get( - "auto_default_from_env", False + for name, field in settings_obj.model_fields.items(): + auto_default_from_env = ( + field.json_schema_extra is not None + and field.json_schema_extra.get("auto_default_from_env", False) ) - value = getattr(settings_obj, field.name) + value = getattr(settings_obj, name) - if exclude_unset and field.name not in settings_obj.__fields_set__: + if exclude_unset and name not in settings_obj.model_fields_set: if not auto_default_from_env: continue if value is None: @@ -39,10 +43,14 @@ def print_as_envfile( if isinstance(value, BaseSettings): if compact: - value = f"'{value.json(**pydantic_export_options)}'" # flat + value = json.dumps( + model_dump_with_secrets( + value, show_secrets=show_secrets, **pydantic_export_options + ) + ) # flat else: if verbose: - typer.echo(f"\n# --- {field.name} --- ") + typer.echo(f"\n# --- {name} --- ") print_as_envfile( value, compact=False, @@ -54,22 +62,34 @@ def print_as_envfile( elif show_secrets and hasattr(value, "get_secret_value"): value = value.get_secret_value() - if verbose: - field_info = field.field_info - if field_info.description: - typer.echo(f"# {field_info.description}") + if verbose and field.description: + typer.echo(f"# {field.description}") - typer.echo(f"{field.name}={value}") + typer.echo(f"{name}={value}") -def print_as_json(settings_obj, *, compact=False, **pydantic_export_options): +def print_as_json( + settings_obj, + *, + compact: bool = False, + show_secrets: bool, + json_serializer, + **pydantic_export_options, +): typer.echo( - settings_obj.json(indent=None if compact else 2, **pydantic_export_options) + json_serializer( + model_dump_with_secrets( + settings_obj, show_secrets=show_secrets, **pydantic_export_options + ), + indent=None if compact else 2, + ) ) def create_settings_command( - settings_cls: type[BaseCustomSettings], logger: logging.Logger | None = None + settings_cls: type[BaseCustomSettings], + logger: logging.Logger | None = None, + json_serializer=json_dumps, ) -> Callable: """Creates typer command function for settings""" @@ -94,14 +114,24 @@ def settings( """Resolves settings and prints envfile""" if as_json_schema: - typer.echo(settings_cls.schema_json(indent=0 if compact else 2)) + typer.echo( + json.dumps( + settings_cls.model_json_schema(), + default=to_jsonable_python, + indent=0 if compact else 2, + ) + ) return try: settings_obj = settings_cls.create_from_envs() except ValidationError as err: - settings_schema = settings_cls.schema_json(indent=2) + settings_schema = json.dumps( + settings_cls.model_json_schema(), + default=to_jsonable_python, + indent=2, + ) assert logger is not None # nosec logger.error( # noqa: TRY400 @@ -128,14 +158,15 @@ def settings( raise pydantic_export_options: dict[str, Any] = {"exclude_unset": exclude_unset} - if show_secrets: - # NOTE: this option is for json-only - pydantic_export_options["encoder"] = create_json_encoder_wo_secrets( - settings_cls - ) if as_json: - print_as_json(settings_obj, compact=compact, **pydantic_export_options) + print_as_json( + settings_obj, + compact=compact, + show_secrets=show_secrets, + json_serializer=json_serializer, + **pydantic_export_options, + ) else: print_as_envfile( settings_obj, diff --git a/packages/settings-library/src/settings_library/utils_encoders.py b/packages/settings-library/src/settings_library/utils_encoders.py index 71ea960bf786..f38e156b6a5a 100644 --- a/packages/settings-library/src/settings_library/utils_encoders.py +++ b/packages/settings-library/src/settings_library/utils_encoders.py @@ -12,7 +12,7 @@ def create_json_encoder_wo_secrets(model_cls: type[BaseModel]): show_secrets_encoder = create_json_encoder_wo_secrets(type(model)) model.dict(encoder=show_secrets_encoder)['my_secret'] == "secret" """ - current_encoders = getattr(model_cls.Config, "json_encoders", {}) + current_encoders = getattr(model_cls.model_config, "json_encoders", {}) return partial( custom_pydantic_encoder, { diff --git a/packages/settings-library/src/settings_library/utils_service.py b/packages/settings-library/src/settings_library/utils_service.py index e7bb66057c5c..17746487a6fb 100644 --- a/packages/settings-library/src/settings_library/utils_service.py +++ b/packages/settings-library/src/settings_library/utils_service.py @@ -4,14 +4,13 @@ """ from enum import Enum, auto -from pydantic import parse_obj_as from pydantic.networks import AnyUrl from pydantic.types import SecretStr from .basic_types import PortInt -DEFAULT_AIOHTTP_PORT: PortInt = parse_obj_as(PortInt, 8080) -DEFAULT_FASTAPI_PORT: PortInt = parse_obj_as(PortInt, 8000) +DEFAULT_AIOHTTP_PORT: PortInt = 8080 +DEFAULT_FASTAPI_PORT: PortInt = 8000 class URLPart(Enum): @@ -96,6 +95,8 @@ def _compose_url( assert prefix # nosec prefix = prefix.upper() + port_value = self._safe_getattr(f"{prefix}_PORT", port) + parts = { "scheme": ( "https" @@ -103,30 +104,32 @@ def _compose_url( else "http" ), "host": self._safe_getattr(f"{prefix}_HOST", URLPart.REQUIRED), - "user": self._safe_getattr(f"{prefix}_USER", user), + "port": int(port_value) if port_value is not None else None, + "username": self._safe_getattr(f"{prefix}_USER", user), "password": self._safe_getattr(f"{prefix}_PASSWORD", password), - "port": self._safe_getattr(f"{prefix}_PORT", port), } if vtag != URLPart.EXCLUDE: # noqa: SIM102 if v := self._safe_getattr(f"{prefix}_VTAG", vtag): - parts["path"] = f"/{v}" + parts["path"] = f"{v}" # post process parts dict kwargs = {} - for k, v in parts.items(): - value = v + for k, v in parts.items(): # type: ignore[assignment] if isinstance(v, SecretStr): value = v.get_secret_value() - elif v is not None: - value = f"{v}" + else: + value = v - kwargs[k] = value + if value is not None: + kwargs[k] = value - assert all(isinstance(v, str) or v is None for v in kwargs.values()) # nosec + assert all( + isinstance(v, (str, int)) or v is None for v in kwargs.values() + ) # nosec - composed_url: str = AnyUrl.build(**kwargs) - return composed_url + composed_url: str = str(AnyUrl.build(**kwargs)) # type: ignore[arg-type] + return composed_url.rstrip("/") def _build_api_base_url(self, *, prefix: str) -> str: return self._compose_url( diff --git a/packages/settings-library/src/settings_library/webserver.py b/packages/settings-library/src/settings_library/webserver.py index 4da2c41d699b..c32bdbeb0c5d 100644 --- a/packages/settings-library/src/settings_library/webserver.py +++ b/packages/settings-library/src/settings_library/webserver.py @@ -1,7 +1,5 @@ from functools import cached_property -from pydantic import parse_obj_as - from .base import BaseCustomSettings from .basic_types import PortInt, VersionTag from .utils_service import DEFAULT_AIOHTTP_PORT, MixinServiceSettings, URLPart @@ -10,7 +8,7 @@ class WebServerSettings(BaseCustomSettings, MixinServiceSettings): WEBSERVER_HOST: str = "webserver" WEBSERVER_PORT: PortInt = DEFAULT_AIOHTTP_PORT - WEBSERVER_VTAG: VersionTag = parse_obj_as(VersionTag, "v0") + WEBSERVER_VTAG: VersionTag = "v0" @cached_property def base_url(self) -> str: diff --git a/packages/settings-library/tests/conftest.py b/packages/settings-library/tests/conftest.py index 725f19c534a0..0431a6c67487 100644 --- a/packages/settings-library/tests/conftest.py +++ b/packages/settings-library/tests/conftest.py @@ -96,9 +96,13 @@ class _ApplicationSettings(BaseCustomSettings): # NOTE: by convention, an addon is disabled when APP_ADDON=None, so we make this # entry nullable as well - APP_OPTIONAL_ADDON: _ModuleSettings | None = Field(auto_default_from_env=True) + APP_OPTIONAL_ADDON: _ModuleSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) # NOTE: example of a group that cannot be disabled (not nullable) - APP_REQUIRED_PLUGIN: PostgresSettings | None = Field(auto_default_from_env=True) + APP_REQUIRED_PLUGIN: PostgresSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) return _ApplicationSettings diff --git a/packages/settings-library/tests/test__models_examples.py b/packages/settings-library/tests/test__models_examples.py index c60a6c082618..96ffc7135b21 100644 --- a/packages/settings-library/tests/test__models_examples.py +++ b/packages/settings-library/tests/test__models_examples.py @@ -14,6 +14,6 @@ def test_all_settings_library_models_config_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): - assert model_cls.parse_obj( + assert model_cls.model_validate( example_data ), f"Failed {example_name} : {json.dumps(example_data)}" diff --git a/packages/settings-library/tests/test__pydantic_settings.py b/packages/settings-library/tests/test__pydantic_settings.py index 956bf6a35015..2d52c1096e71 100644 --- a/packages/settings-library/tests/test__pydantic_settings.py +++ b/packages/settings-library/tests/test__pydantic_settings.py @@ -12,47 +12,53 @@ """ - -from pydantic import BaseSettings, validator -from pydantic.fields import ModelField, Undefined +from common_library.pydantic_fields_extension import is_nullable +from pydantic import ValidationInfo, field_validator +from pydantic_core import PydanticUndefined +from pydantic_settings import BaseSettings from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict def assert_field_specs( - model_cls, name, is_required, is_nullable, explicit_default, defaults + model_cls: type[BaseSettings], + name: str, + required: bool, + nullable: bool, + explicit_default, ): - field: ModelField = model_cls.__fields__[name] - print(field, field.field_info) + info = model_cls.model_fields[name] + print(info) - assert field.required == is_required - assert field.allow_none == is_nullable - assert field.field_info.default == explicit_default + assert info.is_required() == required + assert is_nullable(info) == nullable - assert field.default == defaults - if field.required: + if info.is_required(): # in this case, default is not really used - assert field.default is None + assert info.default is PydanticUndefined + else: + assert info.default == explicit_default class Settings(BaseSettings): VALUE: int VALUE_DEFAULT: int = 42 - VALUE_NULLABLE_REQUIRED: int | None = ... # type: ignore - VALUE_NULLABLE_OPTIONAL: int | None + VALUE_NULLABLE_REQUIRED: int | None = ... # type: ignore[assignment] + VALUE_NULLABLE_REQUIRED_AS_WELL: int | None VALUE_NULLABLE_DEFAULT_VALUE: int | None = 42 VALUE_NULLABLE_DEFAULT_NULL: int | None = None # Other ways to write down "required" is using ... - VALUE_ALSO_REQUIRED: int = ... # type: ignore + VALUE_REQUIRED_AS_WELL: int = ... # type: ignore[assignment] - @validator("*", pre=True) + @field_validator("*", mode="before") @classmethod - def _parse_none(cls, v, values, field: ModelField): + def parse_none(cls, v, info: ValidationInfo): # WARNING: In nullable fields, envs equal to null or none are parsed as None !! - if field.allow_none and isinstance(v, str) and v.lower() in ("null", "none"): - return None + if info.field_name and is_nullable(cls.model_fields[info.field_name]): + if isinstance(v, str) and v.lower() in ("null", "none"): + return None return v @@ -64,37 +70,33 @@ def test_fields_declarations(): assert_field_specs( Settings, "VALUE", - is_required=True, - is_nullable=False, - explicit_default=Undefined, - defaults=None, + required=True, + nullable=False, + explicit_default=PydanticUndefined, ) assert_field_specs( Settings, "VALUE_DEFAULT", - is_required=False, - is_nullable=False, + required=False, + nullable=False, explicit_default=42, - defaults=42, ) assert_field_specs( Settings, "VALUE_NULLABLE_REQUIRED", - is_required=True, - is_nullable=True, + required=True, + nullable=True, explicit_default=Ellipsis, - defaults=None, ) assert_field_specs( Settings, - "VALUE_NULLABLE_OPTIONAL", - is_required=False, - is_nullable=True, - explicit_default=Undefined, # <- difference wrt VALUE_NULLABLE_DEFAULT_NULL - defaults=None, + "VALUE_NULLABLE_REQUIRED_AS_WELL", + required=True, + nullable=True, + explicit_default=PydanticUndefined, # <- difference wrt VALUE_NULLABLE_DEFAULT_NULL ) # VALUE_NULLABLE_OPTIONAL interpretation has always been confusing @@ -104,28 +106,25 @@ def test_fields_declarations(): assert_field_specs( Settings, "VALUE_NULLABLE_DEFAULT_VALUE", - is_required=False, - is_nullable=True, + required=False, + nullable=True, explicit_default=42, - defaults=42, ) assert_field_specs( Settings, "VALUE_NULLABLE_DEFAULT_NULL", - is_required=False, - is_nullable=True, - explicit_default=None, # <- difference wrt VALUE_NULLABLE_OPTIONAL - defaults=None, + required=False, + nullable=True, + explicit_default=None, ) assert_field_specs( Settings, - "VALUE_ALSO_REQUIRED", - is_required=True, - is_nullable=False, + "VALUE_REQUIRED_AS_WELL", + required=True, + nullable=False, explicit_default=Ellipsis, - defaults=None, ) @@ -137,7 +136,7 @@ def test_construct(monkeypatch): VALUE_NULLABLE_REQUIRED=None, ) - print(settings_from_init.json(exclude_unset=True, indent=1)) + print(settings_from_init.model_dump_json(exclude_unset=True, indent=1)) # from env vars setenvs_from_dict( @@ -149,15 +148,15 @@ def test_construct(monkeypatch): }, ) # WARNING: set this env to None would not work w/o ``parse_none`` validator! bug??? - settings_from_env = Settings() - print(settings_from_env.json(exclude_unset=True, indent=1)) + settings_from_env = Settings() # type: ignore[call-arg] + print(settings_from_env.model_dump_json(exclude_unset=True, indent=1)) assert settings_from_init == settings_from_env # mixed - settings_from_both = Settings(VALUE_NULLABLE_REQUIRED=3) - print(settings_from_both.json(exclude_unset=True, indent=1)) + settings_from_both = Settings(VALUE_NULLABLE_REQUIRED=3) # type: ignore[call-arg] + print(settings_from_both.model_dump_json(exclude_unset=True, indent=1)) - assert settings_from_both == settings_from_init.copy( + assert settings_from_both == settings_from_init.model_copy( update={"VALUE_NULLABLE_REQUIRED": 3} ) diff --git a/packages/settings-library/tests/test_base.py b/packages/settings-library/tests/test_base.py index 7cbd9fa87736..3344aa6b35ac 100644 --- a/packages/settings-library/tests/test_base.py +++ b/packages/settings-library/tests/test_base.py @@ -10,8 +10,9 @@ import pytest import settings_library.base -from pydantic import BaseModel, BaseSettings, ValidationError +from pydantic import BaseModel, ValidationError from pydantic.fields import Field +from pydantic_settings import BaseSettings from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_envfile from settings_library.base import ( @@ -38,17 +39,17 @@ def _get_attrs_tree(obj: Any) -> dict[str, Any]: def _print_defaults(model_cls: type[BaseModel]): - for field in model_cls.__fields__.values(): - print(field.name, ":", end="") + for name, field in model_cls.model_fields.items(): + print(name, ":", end="") try: - default = field.get_default() + default = field.get_default(call_default_factory=True) # new in Pydatic v2 print(default, type(default)) except ValidationError as err: print(err) def _dumps_model_class(model_cls: type[BaseModel]): - d = {field.name: _get_attrs_tree(field) for field in model_cls.__fields__.values()} + d = {name: _get_attrs_tree(field) for name, field in model_cls.model_fields.items()} return json.dumps(d, indent=1) @@ -61,16 +62,19 @@ class S(BaseCustomSettings): class M1(BaseCustomSettings): VALUE: S VALUE_DEFAULT: S = S(S_VALUE=42) - VALUE_CONFUSING: S = None # type: ignore + # VALUE_CONFUSING: S = None # type: ignore VALUE_NULLABLE_REQUIRED: S | None = ... # type: ignore - VALUE_NULLABLE_OPTIONAL: S | None VALUE_NULLABLE_DEFAULT_VALUE: S | None = S(S_VALUE=42) VALUE_NULLABLE_DEFAULT_NULL: S | None = None - VALUE_NULLABLE_DEFAULT_ENV: S | None = Field(auto_default_from_env=True) - VALUE_DEFAULT_ENV: S = Field(auto_default_from_env=True) + VALUE_NULLABLE_DEFAULT_ENV: S | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) + VALUE_DEFAULT_ENV: S = Field( + json_schema_extra={"auto_default_from_env": True} + ) class M2(BaseCustomSettings): # @@ -82,10 +86,14 @@ class M2(BaseCustomSettings): VALUE_NULLABLE_DEFAULT_NULL: S | None = None # defaults enabled but if not exists, it disables - VALUE_NULLABLE_DEFAULT_ENV: S | None = Field(auto_default_from_env=True) + VALUE_NULLABLE_DEFAULT_ENV: S | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) # cannot be disabled - VALUE_DEFAULT_ENV: S = Field(auto_default_from_env=True) + VALUE_DEFAULT_ENV: S = Field( + json_schema_extra={"auto_default_from_env": True} + ) # Changed in version 3.7: Dictionary order is guaranteed to be insertion order _classes = {"M1": M1, "M2": M2, "S": S} @@ -101,14 +109,14 @@ def test_create_settings_class( # DEV: Path("M1.ignore.json").write_text(dumps_model_class(M)) - assert M.__fields__["VALUE_NULLABLE_DEFAULT_ENV"].default_factory + assert M.model_fields["VALUE_NULLABLE_DEFAULT_ENV"].default_factory - assert M.__fields__["VALUE_NULLABLE_DEFAULT_ENV"].get_default() is None + assert M.model_fields["VALUE_NULLABLE_DEFAULT_ENV"].get_default() is None - assert M.__fields__["VALUE_DEFAULT_ENV"].default_factory + assert M.model_fields["VALUE_DEFAULT_ENV"].default_factory with pytest.raises(DefaultFromEnvFactoryError): - M.__fields__["VALUE_DEFAULT_ENV"].get_default() + M.model_fields["VALUE_DEFAULT_ENV"].get_default(call_default_factory=True) def test_create_settings_class_with_environment( @@ -136,20 +144,19 @@ def test_create_settings_class_with_environment( instance = SettingsClass() - print(instance.json(indent=2)) + print(instance.model_dump_json(indent=2)) # checks - assert instance.dict(exclude_unset=True) == { + assert instance.model_dump(exclude_unset=True) == { "VALUE": {"S_VALUE": 2}, "VALUE_NULLABLE_REQUIRED": {"S_VALUE": 3}, } - assert instance.dict() == { + assert instance.model_dump() == { "VALUE": {"S_VALUE": 2}, "VALUE_DEFAULT": {"S_VALUE": 42}, - "VALUE_CONFUSING": None, + # "VALUE_CONFUSING": None, "VALUE_NULLABLE_REQUIRED": {"S_VALUE": 3}, - "VALUE_NULLABLE_OPTIONAL": None, "VALUE_NULLABLE_DEFAULT_VALUE": {"S_VALUE": 42}, "VALUE_NULLABLE_DEFAULT_NULL": None, "VALUE_NULLABLE_DEFAULT_ENV": {"S_VALUE": 1}, @@ -163,13 +170,15 @@ def test_create_settings_class_without_environ_fails( # now defining S_VALUE M2_outside_context = create_settings_class("M2") - with pytest.raises(ValidationError) as err_info: + with pytest.raises(DefaultFromEnvFactoryError) as err_info: M2_outside_context.create_from_envs() - assert err_info.value.errors()[0] == { - "loc": ("VALUE_DEFAULT_ENV", "S_VALUE"), - "msg": "field required", - "type": "value_error.missing", + assert err_info.value.errors[0] == { + "input": {}, + "loc": ("S_VALUE",), + "msg": "Field required", + "type": "missing", + "url": "https://errors.pydantic.dev/2.9/v/missing", } @@ -202,7 +211,9 @@ def test_auto_default_to_none_logs_a_warning( class SettingsClass(BaseCustomSettings): VALUE_NULLABLE_DEFAULT_NULL: S | None = None - VALUE_NULLABLE_DEFAULT_ENV: S | None = Field(auto_default_from_env=True) + VALUE_NULLABLE_DEFAULT_ENV: S | None = Field( + json_schema_extra={"auto_default_from_env": True}, + ) instance = SettingsClass.create_from_envs() assert instance.VALUE_NULLABLE_DEFAULT_NULL is None @@ -224,7 +235,9 @@ def test_auto_default_to_not_none( class SettingsClass(BaseCustomSettings): VALUE_NULLABLE_DEFAULT_NULL: S | None = None - VALUE_NULLABLE_DEFAULT_ENV: S | None = Field(auto_default_from_env=True) + VALUE_NULLABLE_DEFAULT_ENV: S | None = Field( + json_schema_extra={"auto_default_from_env": True}, + ) instance = SettingsClass.create_from_envs() assert instance.VALUE_NULLABLE_DEFAULT_NULL is None @@ -286,9 +299,11 @@ class SettingsClassExt(SettingsClass): error = err_info.value.errors()[0] assert error == { + "input": "", "loc": ("INT_VALUE_TO_NOTHING",), - "msg": "value is not a valid integer", - "type": "type_error.integer", + "msg": "Input should be a valid integer, unable to parse string as an integer", + "type": "int_parsing", + "url": "https://errors.pydantic.dev/2.9/v/int_parsing", } diff --git a/packages/settings-library/tests/test_base_w_postgres.py b/packages/settings-library/tests/test_base_w_postgres.py index d54d40bf925d..85fd98c7522c 100644 --- a/packages/settings-library/tests/test_base_w_postgres.py +++ b/packages/settings-library/tests/test_base_w_postgres.py @@ -3,10 +3,11 @@ # pylint: disable=unused-variable +import os from collections.abc import Callable import pytest -from pydantic import Field, ValidationError +from pydantic import AliasChoices, Field, ValidationError from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_envfile from settings_library.base import BaseCustomSettings, DefaultFromEnvFactoryError from settings_library.basic_types import PortInt @@ -22,6 +23,13 @@ # +@pytest.fixture +def postgres_envvars_unset(monkeypatch: pytest.MonkeyPatch) -> None: + for name in os.environ: + if name.startswith("POSTGRES_"): + monkeypatch.delenv(name) + + @pytest.fixture def model_classes_factory() -> Callable: # @@ -49,7 +57,9 @@ class _FakePostgresSettings(BaseCustomSettings): POSTGRES_CLIENT_NAME: str | None = Field( None, - env=["HOST", "HOSTNAME", "POSTGRES_CLIENT_NAME"], + validation_alias=AliasChoices( + "HOST", "HOSTNAME", "POSTGRES_CLIENT_NAME" + ), ) # @@ -60,18 +70,18 @@ class S1(BaseCustomSettings): WEBSERVER_POSTGRES: _FakePostgresSettings class S2(BaseCustomSettings): - WEBSERVER_POSTGRES_NULLABLE_OPTIONAL: _FakePostgresSettings | None + WEBSERVER_POSTGRES_NULLABLE_OPTIONAL: _FakePostgresSettings | None = None class S3(BaseCustomSettings): # cannot be disabled!! WEBSERVER_POSTGRES_DEFAULT_ENV: _FakePostgresSettings = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) class S4(BaseCustomSettings): # defaults enabled but if cannot be resolved, it disables WEBSERVER_POSTGRES_NULLABLE_DEFAULT_ENV: _FakePostgresSettings | None = ( - Field(auto_default_from_env=True) + Field(json_schema_extra={"auto_default_from_env": True}) ) class S5(BaseCustomSettings): @@ -104,7 +114,9 @@ class S5(BaseCustomSettings): # -def test_parse_from_empty_envs(model_classes_factory: Callable): +def test_parse_from_empty_envs( + postgres_envvars_unset: None, model_classes_factory: Callable +): S1, S2, S3, S4, S5 = model_classes_factory() @@ -115,7 +127,7 @@ def test_parse_from_empty_envs(model_classes_factory: Callable): assert s2.WEBSERVER_POSTGRES_NULLABLE_OPTIONAL is None with pytest.raises(DefaultFromEnvFactoryError): - # NOTE: cannot hae a default or assignment + # NOTE: cannot have a default or assignment S3() # auto default factory resolves to None (because is nullable) @@ -126,7 +138,11 @@ def test_parse_from_empty_envs(model_classes_factory: Callable): assert s5.WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL is None -def test_parse_from_individual_envs(monkeypatch, model_classes_factory): +def test_parse_from_individual_envs( + postgres_envvars_unset: None, + monkeypatch: pytest.MonkeyPatch, + model_classes_factory: Callable, +): S1, S2, S3, S4, S5 = model_classes_factory() @@ -146,18 +162,20 @@ def test_parse_from_individual_envs(monkeypatch, model_classes_factory): S1() assert exc_info.value.errors()[0] == { + "input": {}, "loc": ("WEBSERVER_POSTGRES",), - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", + "url": "https://errors.pydantic.dev/2.9/v/missing", } s2 = S2() - assert s2.dict(exclude_unset=True) == {} - assert s2.dict() == {"WEBSERVER_POSTGRES_NULLABLE_OPTIONAL": None} + assert s2.model_dump(exclude_unset=True) == {} + assert s2.model_dump() == {"WEBSERVER_POSTGRES_NULLABLE_OPTIONAL": None} s3 = S3() - assert s3.dict(exclude_unset=True) == {} - assert s3.dict() == { + assert s3.model_dump(exclude_unset=True) == {} + assert s3.model_dump() == { "WEBSERVER_POSTGRES_DEFAULT_ENV": { "POSTGRES_HOST": "pg", "POSTGRES_USER": "test", @@ -171,8 +189,8 @@ def test_parse_from_individual_envs(monkeypatch, model_classes_factory): } s4 = S4() - assert s4.dict(exclude_unset=True) == {} - assert s4.dict() == { + assert s4.model_dump(exclude_unset=True) == {} + assert s4.model_dump() == { "WEBSERVER_POSTGRES_NULLABLE_DEFAULT_ENV": { "POSTGRES_HOST": "pg", "POSTGRES_USER": "test", @@ -186,11 +204,13 @@ def test_parse_from_individual_envs(monkeypatch, model_classes_factory): } s5 = S5() - assert s5.dict(exclude_unset=True) == {} - assert s5.dict() == {"WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL": None} + assert s5.model_dump(exclude_unset=True) == {} + assert s5.model_dump() == {"WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL": None} -def test_parse_compact_env(monkeypatch, model_classes_factory): +def test_parse_compact_env( + postgres_envvars_unset: None, monkeypatch, model_classes_factory +): S1, S2, S3, S4, S5 = model_classes_factory() @@ -209,7 +229,7 @@ def test_parse_compact_env(monkeypatch, model_classes_factory): # test s1 = S1() - assert s1.dict(exclude_unset=True) == { + assert s1.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -217,7 +237,7 @@ def test_parse_compact_env(monkeypatch, model_classes_factory): "POSTGRES_DB": "db2", } } - assert s1.dict() == { + assert s1.model_dump() == { "WEBSERVER_POSTGRES": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -238,7 +258,7 @@ def test_parse_compact_env(monkeypatch, model_classes_factory): """, ) s2 = S2() - assert s2.dict(exclude_unset=True) == { + assert s2.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_NULLABLE_OPTIONAL": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -258,7 +278,7 @@ def test_parse_compact_env(monkeypatch, model_classes_factory): # default until it is really needed. Here before it would # fail because default cannot be computed even if the final value can! s3 = S3() - assert s3.dict(exclude_unset=True) == { + assert s3.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_DEFAULT_ENV": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -275,7 +295,7 @@ def test_parse_compact_env(monkeypatch, model_classes_factory): """, ) s4 = S4() - assert s4.dict(exclude_unset=True) == { + assert s4.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_NULLABLE_DEFAULT_ENV": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -292,7 +312,7 @@ def test_parse_compact_env(monkeypatch, model_classes_factory): """, ) s5 = S5() - assert s5.dict(exclude_unset=True) == { + assert s5.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -302,7 +322,9 @@ def test_parse_compact_env(monkeypatch, model_classes_factory): } -def test_parse_from_mixed_envs(monkeypatch, model_classes_factory): +def test_parse_from_mixed_envs( + postgres_envvars_unset: None, monkeypatch, model_classes_factory +): S1, S2, S3, S4, S5 = model_classes_factory() @@ -326,7 +348,7 @@ def test_parse_from_mixed_envs(monkeypatch, model_classes_factory): s1 = S1() - assert s1.dict() == { + assert s1.model_dump() == { "WEBSERVER_POSTGRES": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -341,7 +363,7 @@ def test_parse_from_mixed_envs(monkeypatch, model_classes_factory): # NOTE how unset marks also applies to embedded fields # NOTE: (1) priority of json-compact over granulated # NOTE: (2) json-compact did not define this but granulated did - assert s1.dict(exclude_unset=True) == { + assert s1.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES": { "POSTGRES_HOST": "pg2", # <- (1) "POSTGRES_USER": "test2", # <- (1) @@ -358,7 +380,7 @@ def test_parse_from_mixed_envs(monkeypatch, model_classes_factory): ) s2 = S2() - assert s2.dict(exclude_unset=True) == { + assert s2.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_NULLABLE_OPTIONAL": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -375,7 +397,7 @@ def test_parse_from_mixed_envs(monkeypatch, model_classes_factory): ) s3 = S3() - assert s3.dict(exclude_unset=True) == { + assert s3.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_DEFAULT_ENV": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -392,7 +414,7 @@ def test_parse_from_mixed_envs(monkeypatch, model_classes_factory): ) s4 = S4() - assert s4.dict(exclude_unset=True) == { + assert s4.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_NULLABLE_DEFAULT_ENV": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -409,7 +431,7 @@ def test_parse_from_mixed_envs(monkeypatch, model_classes_factory): ) s5 = S5() - assert s5.dict(exclude_unset=True) == { + assert s5.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -436,7 +458,9 @@ def test_parse_from_mixed_envs(monkeypatch, model_classes_factory): # -def test_toggle_plugin_1(monkeypatch, model_classes_factory): +def test_toggle_plugin_1( + postgres_envvars_unset: None, monkeypatch, model_classes_factory +): *_, S4, S5 = model_classes_factory() @@ -449,7 +473,9 @@ def test_toggle_plugin_1(monkeypatch, model_classes_factory): assert s5.WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL is None -def test_toggle_plugin_2(monkeypatch, model_classes_factory): +def test_toggle_plugin_2( + postgres_envvars_unset: None, monkeypatch, model_classes_factory +): *_, S4, S5 = model_classes_factory() # minimal @@ -470,7 +496,9 @@ def test_toggle_plugin_2(monkeypatch, model_classes_factory): assert s5.WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL is None -def test_toggle_plugin_3(monkeypatch, model_classes_factory): +def test_toggle_plugin_3( + postgres_envvars_unset: None, monkeypatch, model_classes_factory +): *_, S4, S5 = model_classes_factory() # explicitly disables @@ -493,7 +521,9 @@ def test_toggle_plugin_3(monkeypatch, model_classes_factory): assert s5.WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL is None -def test_toggle_plugin_4(monkeypatch, model_classes_factory): +def test_toggle_plugin_4( + postgres_envvars_unset: None, monkeypatch, model_classes_factory +): *_, S4, S5 = model_classes_factory() JSON_VALUE = '{"POSTGRES_HOST":"pg2", "POSTGRES_USER":"test2", "POSTGRES_PASSWORD":"shh2", "POSTGRES_DB":"db2"}' diff --git a/packages/settings-library/tests/test_email.py b/packages/settings-library/tests/test_email.py index 1cd3978503e4..acb9d607c896 100644 --- a/packages/settings-library/tests/test_email.py +++ b/packages/settings-library/tests/test_email.py @@ -67,7 +67,7 @@ def all_env_devel_undefined( ], ) def test_smtp_configuration_ok(cfg: dict[str, Any], all_env_devel_undefined: None): - assert SMTPSettings.parse_obj(cfg) + assert SMTPSettings.model_validate(cfg) @pytest.mark.parametrize( diff --git a/packages/settings-library/tests/test_postgres.py b/packages/settings-library/tests/test_postgres.py index 19dbfcf17947..6c9067c2d6b1 100644 --- a/packages/settings-library/tests/test_postgres.py +++ b/packages/settings-library/tests/test_postgres.py @@ -28,15 +28,12 @@ def test_cached_property_dsn(mock_environment: EnvVarsDict): settings = PostgresSettings.create_from_envs() # all are upper-case - assert all(key == key.upper() for key in settings.dict()) + assert all(key == key.upper() for key in settings.model_dump()) - # dsn is computed from the other fields - assert "dsn" not in settings.dict() - - # causes cached property to be computed and stored on the instance assert settings.dsn - assert "dsn" in settings.dict() + # dsn is computed from the other fields + assert "dsn" not in settings.model_dump() def test_dsn_with_query(mock_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch): diff --git a/packages/settings-library/tests/test_twilio.py b/packages/settings-library/tests/test_twilio.py index 6f2830ea4aa8..1989fbe6a9f1 100644 --- a/packages/settings-library/tests/test_twilio.py +++ b/packages/settings-library/tests/test_twilio.py @@ -20,7 +20,7 @@ def test_twilio_settings_within_envdevel( }, ) settings = TwilioSettings.create_from_envs() - print(settings.json(indent=2)) + print(settings.model_dump_json(indent=2)) assert settings diff --git a/packages/settings-library/tests/test_utils_cli.py b/packages/settings-library/tests/test_utils_cli.py index 611ccf2509f2..b3a0c10d8999 100644 --- a/packages/settings-library/tests/test_utils_cli.py +++ b/packages/settings-library/tests/test_utils_cli.py @@ -18,10 +18,10 @@ from settings_library.utils_cli import ( create_settings_command, create_version_callback, + model_dump_with_secrets, print_as_envfile, print_as_json, ) -from settings_library.utils_encoders import create_json_encoder_wo_secrets from typer.testing import CliRunner log = logging.getLogger(__name__) @@ -84,12 +84,7 @@ def fake_granular_env_file_content() -> str: @pytest.fixture def export_as_dict() -> Callable: def _export(model_obj, **export_options): - return json.loads( - model_obj.json( - encoder=create_json_encoder_wo_secrets(model_obj.__class__), - **export_options, - ) - ) + return model_dump_with_secrets(model_obj, show_secrets=True, **export_options) return _export @@ -136,7 +131,7 @@ def test_settings_as_json( # reuse resulting json to build settings settings: dict = json.loads(result.stdout) - assert fake_settings_class.parse_obj(settings) + assert fake_settings_class.model_validate(settings) def test_settings_as_json_schema( @@ -439,7 +434,9 @@ class FakeSettings(BaseCustomSettings): assert "secret" not in captured.out assert "Some info" not in captured.out - print_as_json(settings_obj, compact=True) + print_as_json( + settings_obj, compact=True, show_secrets=False, json_serializer=json.dumps + ) captured = capsys.readouterr() assert "secret" not in captured.out assert "**" in captured.out diff --git a/packages/settings-library/tests/test_utils_logging.py b/packages/settings-library/tests/test_utils_logging.py index 12ddacda314f..01673d46f601 100644 --- a/packages/settings-library/tests/test_utils_logging.py +++ b/packages/settings-library/tests/test_utils_logging.py @@ -1,6 +1,6 @@ import logging -from pydantic import Field, validator +from pydantic import AliasChoices, Field, field_validator from settings_library.base import BaseCustomSettings from settings_library.basic_types import BootMode from settings_library.utils_logging import MixinLoggingSettings @@ -14,22 +14,22 @@ def test_mixin_logging(monkeypatch): class Settings(BaseCustomSettings, MixinLoggingSettings): # DOCKER - SC_BOOT_MODE: BootMode | None + SC_BOOT_MODE: BootMode | None = None # LOGGING LOG_LEVEL: str = Field( "WARNING", - env=[ + validation_alias=AliasChoices( "APPNAME_LOG_LEVEL", "LOG_LEVEL", - ], + ), ) APPNAME_DEBUG: bool = Field( default=False, description="Starts app in debug mode" ) - @validator("LOG_LEVEL", pre=True) + @field_validator("LOG_LEVEL", mode="before") @classmethod def _v(cls, value: str) -> str: return cls.validate_log_level(value) @@ -42,14 +42,9 @@ def _v(cls, value: str) -> str: assert settings.LOG_LEVEL == "DEBUG" assert ( - settings.json() - == '{"SC_BOOT_MODE": null, "LOG_LEVEL": "DEBUG", "APPNAME_DEBUG": false}' + settings.model_dump_json() + == '{"SC_BOOT_MODE":null,"LOG_LEVEL":"DEBUG","APPNAME_DEBUG":false}' ) # test cached-property assert settings.log_level == logging.DEBUG - # log_level is cached-property (notice that is lower-case!), and gets added after first use - assert ( - settings.json() - == '{"SC_BOOT_MODE": null, "LOG_LEVEL": "DEBUG", "APPNAME_DEBUG": false, "log_level": 10}' - ) diff --git a/packages/settings-library/tests/test_utils_service.py b/packages/settings-library/tests/test_utils_service.py index a3638f9b31e4..8ecd98358937 100644 --- a/packages/settings-library/tests/test_utils_service.py +++ b/packages/settings-library/tests/test_utils_service.py @@ -5,7 +5,7 @@ from functools import cached_property import pytest -from pydantic import AnyHttpUrl, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter from pydantic.types import SecretStr from settings_library.base import BaseCustomSettings from settings_library.basic_types import PortInt, VersionTag @@ -24,9 +24,9 @@ class MySettings(BaseCustomSettings, MixinServiceSettings): MY_VTAG: VersionTag | None = None MY_SECURE: bool = False - # optional - MY_USER: str | None - MY_PASSWORD: SecretStr | None + # optional (in Pydantic v2 requires a default) + MY_USER: str | None = None + MY_PASSWORD: SecretStr | None = None @cached_property def api_base_url(self) -> str: @@ -88,8 +88,8 @@ def test_service_settings_base_urls(service_settings_cls: type): settings_with_defaults = service_settings_cls() - base_url = parse_obj_as(AnyHttpUrl, settings_with_defaults.base_url) - api_base_url = parse_obj_as(AnyHttpUrl, settings_with_defaults.api_base_url) + base_url = TypeAdapter(AnyHttpUrl).validate_python(settings_with_defaults.base_url) + api_base_url = TypeAdapter(AnyHttpUrl).validate_python(settings_with_defaults.api_base_url) assert base_url.path != api_base_url.path assert (base_url.scheme, base_url.host, base_url.port) == ( diff --git a/packages/simcore-sdk/requirements/_base.in b/packages/simcore-sdk/requirements/_base.in index a07a0b50b018..7999f81151a9 100644 --- a/packages/simcore-sdk/requirements/_base.in +++ b/packages/simcore-sdk/requirements/_base.in @@ -7,6 +7,8 @@ --requirement ../../../packages/service-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in +--requirement ../../../packages/common-library/requirements/_base.in + aiocache aiofiles diff --git a/packages/simcore-sdk/requirements/_base.txt b/packages/simcore-sdk/requirements/_base.txt index 869c0eb387cf..af923d0c4f98 100644 --- a/packages/simcore-sdk/requirements/_base.txt +++ b/packages/simcore-sdk/requirements/_base.txt @@ -33,6 +33,8 @@ aiosignal==1.3.1 # via aiohttp alembic==1.13.3 # via -r requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic anyio==4.6.0 # via # fast-depends @@ -199,7 +201,7 @@ psycopg2-binary==2.9.9 # via # aiopg # sqlalchemy -pydantic==1.10.18 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt @@ -216,12 +218,28 @@ pydantic==1.10.18 # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fast-depends + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.7.3 # via -r requirements/../../../packages/service-library/requirements/_base.in python-dateutil==2.9.0.post0 # via arrow +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -306,6 +324,7 @@ typing-extensions==4.12.2 # opentelemetry-sdk # pint # pydantic + # pydantic-core # typer urllib3==2.2.3 # via diff --git a/packages/simcore-sdk/requirements/_test.txt b/packages/simcore-sdk/requirements/_test.txt index 67493efaa0de..3065f3672cbf 100644 --- a/packages/simcore-sdk/requirements/_test.txt +++ b/packages/simcore-sdk/requirements/_test.txt @@ -28,6 +28,10 @@ alembic==1.13.3 # via # -c requirements/_base.txt # -r requirements/_test.in +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic antlr4-python3-runtime==4.13.2 # via moto attrs==24.2.0 @@ -202,11 +206,15 @@ py-partiql-parser==0.5.6 # via moto pycparser==2.22 # via cffi -pydantic==1.10.18 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic pyparsing==3.1.4 # via moto pytest==8.3.3 @@ -244,7 +252,9 @@ python-dateutil==2.9.0.post0 # faker # moto python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt @@ -320,6 +330,7 @@ typing-extensions==4.12.2 # cfn-lint # mypy # pydantic + # pydantic-core # sqlalchemy2-stubs # types-aiobotocore # types-aiobotocore-s3 diff --git a/packages/simcore-sdk/requirements/ci.txt b/packages/simcore-sdk/requirements/ci.txt index 4e430e3dc21f..18aaf5e93a29 100644 --- a/packages/simcore-sdk/requirements/ci.txt +++ b/packages/simcore-sdk/requirements/ci.txt @@ -16,6 +16,7 @@ # installs this repo's packages simcore-postgres-database @ ../postgres-database pytest-simcore @ ../pytest-simcore +simcore-common-library @ ../common-library simcore-models-library @ ../models-library simcore-settings-library @ ../settings-library/ simcore-service-library @ ../service-library/ diff --git a/packages/simcore-sdk/requirements/dev.txt b/packages/simcore-sdk/requirements/dev.txt index b67f43d86905..c7e7f45b7ede 100644 --- a/packages/simcore-sdk/requirements/dev.txt +++ b/packages/simcore-sdk/requirements/dev.txt @@ -15,6 +15,7 @@ --editable ../pytest-simcore/ --editable ../postgres-database +--editable ../common-library/ --editable ../models-library/ --editable ../settings-library/ diff --git a/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py b/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py index 7b8b810ba38c..7579c3eeb0c5 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py @@ -6,7 +6,7 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, StorageFileID from models_library.users import UserID -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from servicelib.archiving_utils import unarchive_dir from servicelib.logging_utils import log_context from servicelib.progress_bar import ProgressBarData @@ -25,7 +25,9 @@ def __create_s3_object_key( project_id: ProjectID, node_uuid: NodeID, file_path: Path | str ) -> StorageFileID: file_name = file_path.name if isinstance(file_path, Path) else file_path - return parse_obj_as(StorageFileID, f"{project_id}/{node_uuid}/{file_name}") # type: ignore[arg-type] + return TypeAdapter(StorageFileID).validate_python( + f"{project_id}/{node_uuid}/{file_name}" + ) def __get_s3_name(path: Path, *, is_archive: bool) -> str: diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager.py index 7b5467c2851a..a5305dd5b93d 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager.py @@ -14,7 +14,7 @@ from models_library.projects_nodes_io import LocationID, LocationName from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import AnyUrl, parse_obj_as +from pydantic import AnyUrl, TypeAdapter from settings_library.node_ports import NodePortsSettings from tenacity.asyncio import AsyncRetrying from tenacity.before_sleep import before_sleep_log @@ -42,7 +42,7 @@ async def _get_location_id_from_location_name( raise exceptions.S3InvalidStore(store) -def _get_https_link_if_storage_secure(url: AnyUrl) -> str: +def _get_https_link_if_storage_secure(url: str) -> str: # NOTE: links generated by storage are http only. # WEBSERVER -> STORAGE (http requests) # DY-SIDECAR (simcore-sdk) -> STORAGE (httpS requests) @@ -69,18 +69,18 @@ async def _complete_upload( :rtype: ETag """ async with session.post( - _get_https_link_if_storage_secure(upload_completion_link), + _get_https_link_if_storage_secure(str(upload_completion_link)), json=jsonable_encoder(FileUploadCompletionBody(parts=parts)), auth=get_basic_auth(), ) as resp: resp.raise_for_status() # now poll for state - file_upload_complete_response = parse_obj_as( - Envelope[FileUploadCompleteResponse], await resp.json() + file_upload_complete_response = TypeAdapter(Envelope[FileUploadCompleteResponse]).validate_python( + await resp.json() ) assert file_upload_complete_response.data # nosec state_url = _get_https_link_if_storage_secure( - file_upload_complete_response.data.links.state + str(file_upload_complete_response.data.links.state) ) _logger.info("completed upload of %s", f"{len(parts)} parts, received {state_url}") @@ -96,8 +96,8 @@ async def _complete_upload( with attempt: async with session.post(state_url, auth=get_basic_auth()) as resp: resp.raise_for_status() - future_enveloped = parse_obj_as( - Envelope[FileUploadCompleteFutureResponse], await resp.json() + future_enveloped = TypeAdapter(Envelope[FileUploadCompleteFutureResponse]).validate_python( + await resp.json() ) assert future_enveloped.data # nosec if future_enveloped.data.state == FileUploadCompleteState.NOK: @@ -142,7 +142,7 @@ async def _abort_upload( # abort the upload correctly, so it can revert back to last version try: async with session.post( - _get_https_link_if_storage_secure(abort_upload_link), auth=get_basic_auth() + _get_https_link_if_storage_secure(str(abort_upload_link)), auth=get_basic_auth() ) as resp: resp.raise_for_status() except ClientError: diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py index 35d1d7c71f8f..320cfd7e25f3 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py @@ -6,10 +6,11 @@ from asyncio.streams import StreamReader from pathlib import Path +from common_library.errors_classes import OsparcErrorMixin + from aiocache import cached # type: ignore[import-untyped] from models_library.basic_types import IDStr from pydantic import AnyUrl, ByteSize -from pydantic.errors import PydanticErrorMixin from servicelib.progress_bar import ProgressBarData from servicelib.utils import logged_gather from settings_library.aws_s3_cli import AwsS3CliSettings @@ -24,7 +25,7 @@ _OSPARC_SYMLINK_EXTENSION = ".rclonelink" # named `rclonelink` to maintain backwards -class BaseAwsS3CliError(PydanticErrorMixin, RuntimeError): +class BaseAwsS3CliError(OsparcErrorMixin, RuntimeError): ... diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py index 695b710c8f8d..5feefab82f82 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py @@ -253,7 +253,7 @@ def _check_for_aws_http_errors(exc: BaseException) -> bool: async def _session_put( session: ClientSession, file_part_size: int, - upload_url: AnyUrl, + upload_url: str, pbar: tqdm, io_log_redirect_cb: LogRedirectCB | None, progress_bar: ProgressBarData, @@ -314,7 +314,7 @@ async def _upload_file_part( received_e_tag = await _session_put( session=session, file_part_size=file_part_size, - upload_url=upload_url, + upload_url=str(upload_url), pbar=pbar, io_log_redirect_cb=io_log_redirect_cb, progress_bar=progress_bar, diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py index 6a5609c7eb5c..f3e2587fab75 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py @@ -15,7 +15,7 @@ from models_library.basic_types import IDStr, SHA256Str from models_library.projects_nodes_io import LocationID, LocationName, StorageFileID from models_library.users import UserID -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter, parse_obj_as from servicelib.file_utils import create_sha256_checksum from servicelib.progress_bar import ProgressBarData from settings_library.aws_s3_cli import AwsS3CliSettings @@ -189,14 +189,16 @@ async def download_path_from_s3( aws_s3_cli_settings, progress_bar, local_directory_path=local_path, - download_s3_link=parse_obj_as(AnyUrl, f"{download_link}"), + download_s3_link=TypeAdapter(AnyUrl).validate_python(f"{download_link}"), ) elif r_clone_settings: await r_clone.sync_s3_to_local( r_clone_settings, progress_bar, local_directory_path=local_path, - download_s3_link=parse_obj_as(AnyUrl, f"{download_link}"), + download_s3_link=str( + TypeAdapter(AnyUrl).validate_python(f"{download_link}") + ), ) else: msg = "Unexpected configuration" diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py index 18e151394936..bbfe14e7f39e 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py @@ -8,11 +8,12 @@ from pathlib import Path from typing import Final +from common_library.errors_classes import OsparcErrorMixin + from aiocache import cached # type: ignore[import-untyped] from aiofiles import tempfile from models_library.basic_types import IDStr from pydantic import AnyUrl, BaseModel, ByteSize -from pydantic.errors import PydanticErrorMixin from servicelib.progress_bar import ProgressBarData from servicelib.utils import logged_gather from settings_library.r_clone import RCloneSettings @@ -31,7 +32,7 @@ _logger = logging.getLogger(__name__) -class BaseRCloneError(PydanticErrorMixin, RuntimeError): +class BaseRCloneError(OsparcErrorMixin, RuntimeError): ... @@ -166,7 +167,7 @@ async def _get_folder_size( cwd=f"{local_dir.resolve()}", ) - rclone_folder_size_result = _RCloneSize.parse_raw(result) + rclone_folder_size_result = _RCloneSize.model_validate_json(result) _logger.debug( "RClone size call for %s: %s", f"{folder}", f"{rclone_folder_size_result}" ) @@ -259,7 +260,7 @@ async def sync_local_to_s3( """ _raise_if_directory_is_file(local_directory_path) - upload_s3_path = re.sub(r"^s3://", "", upload_s3_link) + upload_s3_path = re.sub(r"^s3://", "", str(upload_s3_link)) _logger.debug(" %s; %s", f"{upload_s3_link=}", f"{upload_s3_path=}") await _sync_sources( @@ -279,7 +280,7 @@ async def sync_s3_to_local( progress_bar: ProgressBarData, *, local_directory_path: Path, - download_s3_link: AnyUrl, + download_s3_link: str, exclude_patterns: set[str] | None = None, debug_logs: bool = False, ) -> None: diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py index fa34b0426a06..7470c3940280 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py @@ -3,7 +3,7 @@ from typing import Union from models_library.utils.change_case import snake_to_camel -from pydantic import BaseModel, ByteSize, Field, parse_raw_as +from pydantic import BaseModel, ByteSize, ConfigDict, Field, TypeAdapter from servicelib.logging_utils import log_catch from servicelib.progress_bar import ProgressBarData @@ -31,9 +31,7 @@ class _RCloneSyncTransferCompletedMessage(_RCloneSyncMessageBase): class _RCloneSyncTransferringStats(BaseModel): bytes: ByteSize total_bytes: ByteSize - - class Config: - alias_generator = snake_to_camel + model_config = ConfigDict(alias_generator=snake_to_camel) class _RCloneSyncTransferringMessage(_RCloneSyncMessageBase): @@ -78,8 +76,9 @@ def __init__(self, progress_bar: ProgressBarData) -> None: async def __call__(self, logs: str) -> None: _logger.debug("received logs: %s", logs) with log_catch(_logger, reraise=False): - rclone_message: _RCloneSyncMessages = parse_raw_as( - _RCloneSyncMessages, # type: ignore[arg-type] + rclone_message: _RCloneSyncMessages = TypeAdapter( + _RCloneSyncMessages + ).validate_strings( logs, ) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py index c249cbcf8302..b7a394a6dbdd 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py @@ -144,7 +144,7 @@ async def get_storage_locations( expected_status=status.HTTP_200_OK, params={"user_id": f"{user_id}"}, ) as response: - locations_enveloped = Envelope[FileLocationArray].parse_obj( + locations_enveloped = Envelope[FileLocationArray].model_validate( await response.json() ) if locations_enveloped.data is None: @@ -173,7 +173,7 @@ async def get_download_file_link( expected_status=status.HTTP_200_OK, params={"user_id": f"{user_id}", "link_type": link_type.value}, ) as response: - presigned_link_enveloped = Envelope[PresignedLink].parse_obj( + presigned_link_enveloped = Envelope[PresignedLink].model_validate( await response.json() ) if not presigned_link_enveloped.data or not presigned_link_enveloped.data.link: @@ -215,7 +215,7 @@ async def get_upload_file_links( expected_status=status.HTTP_200_OK, params=query_params, ) as response: - file_upload_links_enveloped = Envelope[FileUploadSchema].parse_obj( + file_upload_links_enveloped = Envelope[FileUploadSchema].model_validate( await response.json() ) if file_upload_links_enveloped.data is None: @@ -245,7 +245,7 @@ async def get_file_metadata( # NOTE: keeps backwards compatibility raise exceptions.S3InvalidPathError(file_id) - file_metadata_enveloped = Envelope[FileMetaDataGet].parse_obj(payload) + file_metadata_enveloped = Envelope[FileMetaDataGet].model_validate(payload) assert file_metadata_enveloped.data # nosec return file_metadata_enveloped.data @@ -265,7 +265,7 @@ async def list_file_metadata( expected_status=status.HTTP_200_OK, params={"user_id": f"{user_id}", "uuid_filter": uuid_filter}, ) as resp: - envelope = Envelope[list[FileMetaDataGet]].parse_obj(await resp.json()) + envelope = Envelope[list[FileMetaDataGet]].model_validate(await resp.json()) assert envelope.data is not None # nosec file_meta_data: list[FileMetaDataGet] = envelope.data return file_meta_data diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py index d8eb1d993493..ad94884c3b0d 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py @@ -4,18 +4,27 @@ from models_library.basic_regex import UUID_RE from models_library.projects_nodes_io import BaseFileLink, DownloadLink from models_library.projects_nodes_io import PortLink as BasePortLink -from pydantic import AnyUrl, Extra, Field, StrictBool, StrictFloat, StrictInt, StrictStr +from pydantic import ( + AnyUrl, + ConfigDict, + Field, + StrictBool, + StrictFloat, + StrictInt, + StrictStr, +) class PortLink(BasePortLink): - node_uuid: str = Field(..., regex=UUID_RE, alias="nodeUuid") # type: ignore[assignment] # This overrides the base class it is ugly but needs its own PR to fix it + node_uuid: str = Field(..., pattern=UUID_RE, alias="nodeUuid") # type: ignore[assignment] # This overrides the base class it is ugly but needs its own PR to fix it class FileLink(BaseFileLink): """allow all kind of file links""" - class Config: - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) # TODO: needs to be in sync with project_nodes.InputTypes and project_nodes.OutputTypes diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py index 9da016b4cea9..2ecf1422866d 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py @@ -11,8 +11,7 @@ from models_library.projects_nodes_io import NodeIDStr from models_library.services_types import ServicePortKey from models_library.users import UserID -from pydantic import BaseModel, Field, ValidationError -from pydantic.error_wrappers import flatten_errors +from pydantic import BaseModel, ConfigDict, Field, ValidationError from servicelib.progress_bar import ProgressBarData from servicelib.utils import logged_gather from settings_library.aws_s3_cli import AwsS3CliSettings @@ -63,9 +62,9 @@ class Nodeports(BaseModel): r_clone_settings: RCloneSettings | None = None io_log_redirect_cb: LogRedirectCB | None aws_s3_cli_settings: AwsS3CliSettings | None = None - - class Config: - arbitrary_types_allowed = True + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) def __init__(self, **data: Any): super().__init__(**data) @@ -216,9 +215,11 @@ async def _set_with_notifications( await self.save_to_db_cb(self) # groups all ValidationErrors pre-pending 'port_key' to loc and raises ValidationError - if errors := [ - list(flatten_errors([r], self.__config__, loc=(f"{port_key}",))) - for port_key, r in zip(port_values.keys(), results) - if isinstance(r, ValidationError) + if error_details := [ + _get_error_details(r, port_key) + for port_key, r in zip(port_values.keys(), results, strict=False) + if r is not None ]: - raise ValidationError(errors, model=type(self)) + raise ValidationError.from_exception_data( + title="Multiple port_key errors", line_errors=error_details + ) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py index 2338563dcdb5..3ddab6a29d3a 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py @@ -10,8 +10,16 @@ from models_library.basic_types import IDStr from models_library.services_io import BaseServiceIOModel from models_library.services_types import ServicePortKey -from pydantic import AnyUrl, Field, PrivateAttr, ValidationError, validator -from pydantic.tools import parse_obj_as +from pydantic import ( + AnyUrl, + ConfigDict, + Field, + PrivateAttr, + TypeAdapter, + ValidationError, + ValidationInfo, + field_validator, +) from servicelib.progress_bar import ProgressBarData from ..node_ports_common.exceptions import ( @@ -56,7 +64,7 @@ def _check_if_symlink_is_valid(symlink: Path) -> None: def can_parse_as(v, *types) -> bool: try: for type_ in types: - parse_obj_as(type_, v) + TypeAdapter(type_).validate_python(v) return True except ValidationError: return False @@ -70,17 +78,23 @@ class SetKWargs: class Port(BaseServiceIOModel): key: ServicePortKey widget: dict[str, Any] | None = None - default_value: DataItemValue | None = Field(None, alias="defaultValue") + default_value: DataItemValue | None = Field( + None, alias="defaultValue", union_mode="left_to_right" + ) - value: DataItemValue | None = None + value: DataItemValue | None = Field( + None, validate_default=True, union_mode="left_to_right" + ) # Different states of "value" # - e.g. typically after resolving a port's link, a download link, ... # - lazy evaluation using get_* members # - used to run validation & conversion of resolved PortContentTypes values # - excluded from all model export - value_item: ItemValue | None = Field(None, exclude=True) - value_concrete: ItemConcreteValue | None = Field(None, exclude=True) + value_item: ItemValue | None = Field(None, exclude=True, union_mode="left_to_right") + value_concrete: ItemConcreteValue | None = Field( + None, exclude=True, union_mode="left_to_right" + ) # Function to convert from ItemValue -> ItemConcreteValue _py_value_converter: Callable[[Any], ItemConcreteValue] = PrivateAttr() @@ -90,15 +104,14 @@ class Port(BaseServiceIOModel): # flags _used_default_value: bool = PrivateAttr(False) - class Config(BaseServiceIOModel.Config): - validate_assignment = True + model_config = ConfigDict(validate_assignment=True) - @validator("value", always=True) + @field_validator("value") @classmethod - def check_value(cls, v: DataItemValue, values: dict[str, Any]) -> DataItemValue: + def check_value(cls, v: DataItemValue, info: ValidationInfo) -> DataItemValue: if ( v is not None - and (property_type := values.get("property_type")) + and (property_type := info.data.get("property_type")) and not isinstance(v, PortLink) ): if port_utils.is_file_type(property_type): @@ -108,10 +121,10 @@ def check_value(cls, v: DataItemValue, values: dict[str, Any]) -> DataItemValue: ) elif property_type == "ref_contentSchema": v, _ = validate_port_content( - port_key=values.get("key"), + port_key=info.data.get("key"), value=v, unit=None, - content_schema=values.get("content_schema", {}), + content_schema=info.data.get("content_schema", {}), ) elif isinstance(v, (list, dict)): raise TypeError( @@ -119,21 +132,21 @@ def check_value(cls, v: DataItemValue, values: dict[str, Any]) -> DataItemValue: ) return v - @validator("value_item", "value_concrete", pre=True) + @field_validator("value_item", "value_concrete", mode="before") @classmethod - def check_item_or_concrete_value(cls, v, values): + def check_item_or_concrete_value(cls, v, info: ValidationInfo): if ( v - and v != values["value"] - and (property_type := values.get("property_type")) + and v != info.data["value"] + and (property_type := info.data.get("property_type")) and property_type == "ref_contentSchema" and not can_parse_as(v, Path, AnyUrl) ): v, _ = validate_port_content( - port_key=values.get("key"), + port_key=info.data.get("key"), value=v, unit=None, - content_schema=values.get("content_schema", {}), + content_schema=info.data.get("content_schema", {}), ) return v @@ -209,7 +222,9 @@ async def _evaluate() -> ItemValue | None: if isinstance(self.value, DownloadLink): # generic download link for a file - url: AnyUrl = self.value.download_link + url: AnyUrl = TypeAdapter(AnyUrl).validate_python( + self.value.download_link + ) return url # otherwise, this is a BasicValueTypes diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py index c2ebb56986de..b33e677c0bf5 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py @@ -1,7 +1,8 @@ import logging import re -from typing import Any, Dict, Optional, Tuple +from typing import Any +from common_library.errors_classes import OsparcErrorMixin from models_library.projects_nodes import UnitStr from models_library.utils.json_schema import ( JsonSchemaValidationError, @@ -9,9 +10,8 @@ jsonschema_validate_schema, ) from pint import PintError, UnitRegistry -from pydantic.errors import PydanticValueError -JsonSchemaDict = Dict[str, Any] +JsonSchemaDict = dict[str, Any] log = logging.getLogger(__name__) @@ -22,8 +22,8 @@ # - Use 'code' to discriminate port_validation errors -class PortValueError(PydanticValueError): - code = "port_validation.schema_error" +class PortValueError(OsparcErrorMixin, ValueError): + code = "port_validation.schema_error" # type: ignore msg_template = "Invalid value in port {port_key!r}: {schema_error_message}" # pylint: disable=useless-super-delegation @@ -37,8 +37,8 @@ def __init__(self, *, port_key: str, schema_error: JsonSchemaValidationError): ) -class PortUnitError(PydanticValueError): - code = "port_validation.unit_error" +class PortUnitError(OsparcErrorMixin, ValueError): + code = "port_validation.unit_error" # type: ignore msg_template = "Invalid unit in port {port_key!r}: {pint_error_msg}" # pylint: disable=useless-super-delegation @@ -72,7 +72,7 @@ def _validate_port_value(value, content_schema: JsonSchemaDict): def _validate_port_unit( value, unit, content_schema: JsonSchemaDict, *, ureg: UnitRegistry -) -> Tuple[Any, Optional[UnitStr]]: +) -> tuple[Any, UnitStr | None]: """ - Checks valid 'value' against content_schema - Converts 'value' with 'unit' to unit expected in content_schema @@ -101,7 +101,7 @@ def _validate_port_unit( def validate_port_content( port_key, value: Any, - unit: Optional[UnitStr], + unit: UnitStr | None, content_schema: JsonSchemaDict, ): """A port content is all datasets injected to a given port. Currently only diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py index 2855e8a253e2..9fb13510afb6 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py @@ -1,38 +1,35 @@ from collections.abc import ItemsView, Iterator, KeysView, ValuesView from models_library.services_types import ServicePortKey -from pydantic import BaseModel +from pydantic import RootModel from ..node_ports_common.exceptions import UnboundPortError from .port import Port -class BasePortsMapping(BaseModel): - __root__: dict[ServicePortKey, Port] - +class BasePortsMapping(RootModel[dict[ServicePortKey, Port]]): def __getitem__(self, key: int | ServicePortKey) -> Port: - if isinstance(key, int): - if key < len(self.__root__): - key = list(self.__root__.keys())[key] - if key not in self.__root__: + if isinstance(key, int) and key < len(self.root): + key = list(self.root.keys())[key] + if key not in self.root: raise UnboundPortError(key) assert isinstance(key, str) # nosec - return self.__root__[key] + return self.root[key] def __iter__(self) -> Iterator[ServicePortKey]: # type: ignore - return iter(self.__root__) + return iter(self.root) def keys(self) -> KeysView[ServicePortKey]: - return self.__root__.keys() + return self.root.keys() def items(self) -> ItemsView[ServicePortKey, Port]: - return self.__root__.items() + return self.root.items() def values(self) -> ValuesView[Port]: - return self.__root__.values() + return self.root.values() def __len__(self) -> int: - return self.__root__.__len__() + return self.root.__len__() class InputsList(BasePortsMapping): diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py index daa4c9aaa3e9..f4d74711e186 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py @@ -115,7 +115,7 @@ async def dump(nodeports: Nodeports) -> None: "dumping node_ports_v2 object %s", pformat(nodeports, indent=2), ) - _nodeports_cfg = nodeports.dict( + _nodeports_cfg = nodeports.model_dump( include={"internal_inputs", "internal_outputs"}, by_alias=True, exclude_unset=True, diff --git a/packages/simcore-sdk/tests/helpers/utils_port_v2.py b/packages/simcore-sdk/tests/helpers/utils_port_v2.py index 556e0eb4cedd..23298f6b1759 100644 --- a/packages/simcore-sdk/tests/helpers/utils_port_v2.py +++ b/packages/simcore-sdk/tests/helpers/utils_port_v2.py @@ -45,5 +45,5 @@ def create_valid_port_mapping( key=key_for_file_port, fileToKeyMap={file_to_key: key_for_file_port} if file_to_key else None, ) - port_mapping = mapping_class(**{"__root__": port_cfgs}) + port_mapping = mapping_class(**{"root": port_cfgs}) return port_mapping diff --git a/packages/simcore-sdk/tests/integration/conftest.py b/packages/simcore-sdk/tests/integration/conftest.py index d5f6cd7227a8..92b6afaa81b2 100644 --- a/packages/simcore-sdk/tests/integration/conftest.py +++ b/packages/simcore-sdk/tests/integration/conftest.py @@ -142,7 +142,7 @@ async def _create(file_path: Path) -> dict[str, Any]: async with ClientSession() as session: async with session.put(url) as resp: resp.raise_for_status() - presigned_links_enveloped = Envelope[FileUploadSchema].parse_obj( + presigned_links_enveloped = Envelope[FileUploadSchema].model_validate( await resp.json() ) assert presigned_links_enveloped.data diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py index 9cd1ce32de4e..56f696bb46db 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py @@ -1,699 +1,699 @@ -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name -# pylint:disable=too-many-arguments -# pylint:disable=protected-access - -import filecmp -from collections.abc import Awaitable, Callable -from pathlib import Path -from typing import Any -from uuid import uuid4 - -import pytest -from aiohttp import ClientError -from faker import Faker -from models_library.projects_nodes_io import ( - LocationID, - SimcoreS3DirectoryID, - SimcoreS3FileID, -) -from models_library.users import UserID -from pydantic import BaseModel, ByteSize, parse_obj_as -from pytest_mock import MockerFixture -from pytest_simcore.helpers.parametrizations import byte_size_ids -from servicelib.progress_bar import ProgressBarData -from settings_library.aws_s3_cli import AwsS3CliSettings -from settings_library.r_clone import RCloneSettings -from simcore_sdk.node_ports_common import exceptions, filemanager -from simcore_sdk.node_ports_common.aws_s3_cli import AwsS3CliFailedError -from simcore_sdk.node_ports_common.filemanager import UploadedFile, UploadedFolder -from simcore_sdk.node_ports_common.r_clone import RCloneFailedError -from yarl import URL - -pytest_simcore_core_services_selection = [ - "migration", - "postgres", - "storage", - "redis", -] - -pytest_simcore_ops_services_selection = ["minio", "adminer"] - - -class _SyncSettings(BaseModel): - r_clone_settings: RCloneSettings | None - aws_s3_cli_settings: AwsS3CliSettings | None - - -@pytest.fixture( - params=[(True, False), (False, True), (False, False)], - ids=[ - "RClone enabled", - "AwsS3Cli enabled", - "Both RClone and AwsS3Cli disabled", - ], -) -def optional_sync_settings( - r_clone_settings: RCloneSettings, - aws_s3_cli_settings: AwsS3CliSettings, - request: pytest.FixtureRequest, -) -> _SyncSettings: - _rclone_enabled, _aws_s3_cli_enabled = request.param - - _r_clone_settings = r_clone_settings if _rclone_enabled else None - _aws_s3_cli_settings = aws_s3_cli_settings if _aws_s3_cli_enabled else None - - return _SyncSettings( - r_clone_settings=_r_clone_settings, aws_s3_cli_settings=_aws_s3_cli_settings - ) - - -def _file_size(size_str: str, **pytest_params): - return pytest.param(parse_obj_as(ByteSize, size_str), id=size_str, **pytest_params) - - -@pytest.mark.parametrize( - "file_size", - [ - _file_size("10Mib"), - _file_size("103Mib"), - _file_size("1003Mib", marks=pytest.mark.heavy_load), - _file_size("7Gib", marks=pytest.mark.heavy_load), - ], - ids=byte_size_ids, -) -async def test_valid_upload_download( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - file_size: ByteSize, - create_file_of_size: Callable[[ByteSize, str], Path], - optional_sync_settings: _SyncSettings, - simcore_services_ready: None, - storage_service: URL, - faker: Faker, -): - file_path = create_file_of_size(file_size, "test.test") - - file_id = create_valid_file_uuid("", file_path) - async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: - upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=file_path, - r_clone_settings=optional_sync_settings.r_clone_settings, - io_log_redirect_cb=None, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - assert isinstance(upload_result, UploadedFile) - store_id, e_tag = upload_result.store_id, upload_result.etag - # pylint: disable=protected-access - assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 - assert store_id == s3_simcore_location - assert e_tag - file_metadata = await filemanager.get_file_metadata( - user_id=user_id, store_id=store_id, s3_object=file_id - ) - assert file_metadata.location == store_id - assert file_metadata.etag == e_tag - - download_folder = Path(tmpdir) / "downloads" - download_file_path = await filemanager.download_path_from_s3( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - local_path=download_folder, - io_log_redirect_cb=None, - r_clone_settings=optional_sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - assert progress_bar._current_steps == pytest.approx(2) # noqa: SLF001 - assert download_file_path.exists() - assert download_file_path.name == "test.test" - assert filecmp.cmp(download_file_path, file_path) - - -@pytest.mark.parametrize( - "file_size", - [ - _file_size("10Mib"), - _file_size("103Mib"), - ], - ids=byte_size_ids, -) -async def test_valid_upload_download_using_file_object( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - file_size: ByteSize, - create_file_of_size: Callable[[ByteSize, str], Path], - optional_sync_settings: _SyncSettings, - faker: Faker, -): - file_path = create_file_of_size(file_size, "test.test") - - file_id = create_valid_file_uuid("", file_path) - with file_path.open("rb") as file_object: - upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=filemanager.UploadableFileObject( - file_object, file_path.name, file_path.stat().st_size - ), - r_clone_settings=optional_sync_settings.r_clone_settings, - io_log_redirect_cb=None, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - assert isinstance(upload_result, UploadedFile) - store_id, e_tag = upload_result.store_id, upload_result.etag - assert store_id == s3_simcore_location - assert e_tag - file_metadata = await filemanager.get_file_metadata( - user_id=user_id, store_id=store_id, s3_object=file_id - ) - assert file_metadata.location == store_id - assert file_metadata.etag == e_tag - - download_folder = Path(tmpdir) / "downloads" - async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: - download_file_path = await filemanager.download_path_from_s3( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - local_path=download_folder, - io_log_redirect_cb=None, - r_clone_settings=optional_sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 - assert download_file_path.exists() - assert download_file_path.name == "test.test" - assert filecmp.cmp(download_file_path, file_path) - - -@pytest.fixture -def mocked_upload_file_raising_exceptions(mocker: MockerFixture) -> None: - mocker.patch( - "simcore_sdk.node_ports_common.filemanager.r_clone.sync_local_to_s3", - autospec=True, - side_effect=RCloneFailedError, - ) - mocker.patch( - "simcore_sdk.node_ports_common.file_io_utils._upload_file_part", - autospec=True, - side_effect=ClientError, - ) - mocker.patch( - "simcore_sdk.node_ports_common.filemanager.aws_s3_cli.sync_local_to_s3", - autospec=True, - side_effect=AwsS3CliFailedError, - ) - - -@pytest.mark.parametrize( - "file_size", - [ - _file_size("10Mib"), - ], - ids=byte_size_ids, -) -async def test_failed_upload_is_properly_removed_from_storage( - node_ports_config: None, - create_file_of_size: Callable[[ByteSize], Path], - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - optional_sync_settings: _SyncSettings, - file_size: ByteSize, - user_id: UserID, - mocked_upload_file_raising_exceptions: None, -): - file_path = create_file_of_size(file_size) - file_id = create_valid_file_uuid("", file_path) - with pytest.raises(exceptions.S3TransferError): - await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=file_path, - r_clone_settings=optional_sync_settings.r_clone_settings, - io_log_redirect_cb=None, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - with pytest.raises(exceptions.S3InvalidPathError): - await filemanager.get_file_metadata( - user_id=user_id, store_id=s3_simcore_location, s3_object=file_id - ) - - -@pytest.mark.parametrize( - "file_size", - [ - _file_size("10Mib"), - ], - ids=byte_size_ids, -) -async def test_failed_upload_after_valid_upload_keeps_last_valid_state( - node_ports_config: None, - create_file_of_size: Callable[[ByteSize], Path], - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - optional_sync_settings: _SyncSettings, - file_size: ByteSize, - user_id: UserID, - mocker: MockerFixture, -): - # upload a valid file - file_path = create_file_of_size(file_size) - file_id = create_valid_file_uuid("", file_path) - upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=file_path, - r_clone_settings=optional_sync_settings.r_clone_settings, - io_log_redirect_cb=None, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - assert isinstance(upload_result, UploadedFile) - store_id, e_tag = upload_result.store_id, upload_result.etag - assert store_id == s3_simcore_location - assert e_tag - # check the file is correctly uploaded - file_metadata = await filemanager.get_file_metadata( - user_id=user_id, store_id=store_id, s3_object=file_id - ) - assert file_metadata.location == store_id - assert file_metadata.etag == e_tag - # now start an invalid update by generating an exception while uploading the same file - mocker.patch( - "simcore_sdk.node_ports_common.filemanager.r_clone.sync_local_to_s3", - autospec=True, - side_effect=RCloneFailedError, - ) - mocker.patch( - "simcore_sdk.node_ports_common.file_io_utils._upload_file_part", - autospec=True, - side_effect=ClientError, - ) - with pytest.raises(exceptions.S3TransferError): - await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=file_path, - r_clone_settings=optional_sync_settings.r_clone_settings, - io_log_redirect_cb=None, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - # the file shall be back to its original state - file_metadata = await filemanager.get_file_metadata( - user_id=user_id, store_id=s3_simcore_location, s3_object=file_id - ) - assert file_metadata.location == store_id - assert file_metadata.etag == e_tag - - -async def test_invalid_file_path( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - optional_sync_settings: _SyncSettings, - faker: Faker, -): - file_path = Path(tmpdir) / "test.test" - file_path.write_text("I am a test file") - assert file_path.exists() - - file_id = create_valid_file_uuid("", file_path) - store = s3_simcore_location - with pytest.raises(FileNotFoundError): - await filemanager.upload_path( - user_id=user_id, - store_id=store, - store_name=None, - s3_object=file_id, - path_to_upload=Path(tmpdir) / "some other file.txt", - io_log_redirect_cb=None, - ) - - download_folder = Path(tmpdir) / "downloads" - with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 - async with ProgressBarData( - num_steps=1, description=faker.pystr() - ) as progress_bar: - await filemanager.download_path_from_s3( - user_id=user_id, - store_id=store, - store_name=None, - s3_object=file_id, - local_path=download_folder, - io_log_redirect_cb=None, - r_clone_settings=optional_sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - - -async def test_errors_upon_invalid_file_identifiers( - node_ports_config: None, - tmpdir: Path, - user_id: UserID, - project_id: str, - s3_simcore_location: LocationID, - optional_sync_settings: _SyncSettings, - faker: Faker, -): - file_path = Path(tmpdir) / "test.test" - file_path.write_text("I am a test file") - assert file_path.exists() - - store = s3_simcore_location - with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 - invalid_s3_path = SimcoreS3FileID("") - await filemanager.upload_path( - user_id=user_id, - store_id=store, - store_name=None, - s3_object=invalid_s3_path, - path_to_upload=file_path, - io_log_redirect_cb=None, - ) - - with pytest.raises(exceptions.StorageInvalidCall): # noqa: PT012 - invalid_file_id = SimcoreS3FileID("file_id") - await filemanager.upload_path( - user_id=user_id, - store_id=store, - store_name=None, - s3_object=invalid_file_id, - path_to_upload=file_path, - io_log_redirect_cb=None, - ) - - download_folder = Path(tmpdir) / "downloads" - with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 - async with ProgressBarData( - num_steps=1, description=faker.pystr() - ) as progress_bar: - invalid_s3_path = SimcoreS3FileID("") - await filemanager.download_path_from_s3( - user_id=user_id, - store_id=store, - store_name=None, - s3_object=invalid_s3_path, - local_path=download_folder, - io_log_redirect_cb=None, - r_clone_settings=optional_sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - - with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 - async with ProgressBarData( - num_steps=1, description=faker.pystr() - ) as progress_bar: - await filemanager.download_path_from_s3( - user_id=user_id, - store_id=store, - store_name=None, - s3_object=SimcoreS3FileID(f"{project_id}/{uuid4()}/invisible.txt"), - local_path=download_folder, - io_log_redirect_cb=None, - r_clone_settings=optional_sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - - -async def test_invalid_store( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - optional_sync_settings: _SyncSettings, - faker: Faker, -): - file_path = Path(tmpdir) / "test.test" - file_path.write_text("I am a test file") - assert file_path.exists() - - file_id = create_valid_file_uuid("", file_path) - store = "somefunkystore" - with pytest.raises(exceptions.S3InvalidStore): - await filemanager.upload_path( - user_id=user_id, - store_id=None, - store_name=store, # type: ignore - s3_object=file_id, - path_to_upload=file_path, - io_log_redirect_cb=None, - ) - - download_folder = Path(tmpdir) / "downloads" - with pytest.raises(exceptions.S3InvalidStore): # noqa: PT012 - async with ProgressBarData( - num_steps=1, description=faker.pystr() - ) as progress_bar: - await filemanager.download_path_from_s3( - user_id=user_id, - store_id=None, - store_name=store, # type: ignore - s3_object=file_id, - local_path=download_folder, - io_log_redirect_cb=None, - r_clone_settings=optional_sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - - -@pytest.fixture( - params=[True, False], - ids=["with RClone", "with AwsS3Cli"], -) -def sync_settings( - r_clone_settings: RCloneSettings, - aws_s3_cli_settings: AwsS3CliSettings, - request: pytest.FixtureRequest, -) -> _SyncSettings: - is_rclone_enabled = request.param - - return _SyncSettings( - r_clone_settings=r_clone_settings if is_rclone_enabled else None, - aws_s3_cli_settings=aws_s3_cli_settings if not is_rclone_enabled else None, - ) - - -@pytest.mark.parametrize("is_directory", [False, True]) -async def test_valid_metadata( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - sync_settings: _SyncSettings, - is_directory: bool, -): - # first we go with a non-existing file - file_path = Path(tmpdir) / "a-subdir" / "test.test" - file_path.parent.mkdir(parents=True, exist_ok=True) - - path_to_upload = file_path.parent if is_directory else file_path - - file_id = create_valid_file_uuid("", path_to_upload) - assert file_path.exists() is False - - is_metadata_present = await filemanager.entry_exists( - user_id=user_id, - store_id=s3_simcore_location, - s3_object=file_id, - is_directory=is_directory, - ) - assert is_metadata_present is False - - # now really create the file and upload it - file_path.write_text("I am a test file") - assert file_path.exists() - - file_id = create_valid_file_uuid("", path_to_upload) - upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=path_to_upload, - io_log_redirect_cb=None, - r_clone_settings=sync_settings.r_clone_settings, - aws_s3_cli_settings=sync_settings.aws_s3_cli_settings, - ) - if is_directory: - assert isinstance(upload_result, UploadedFolder) - else: - assert isinstance(upload_result, UploadedFile) - assert upload_result.store_id == s3_simcore_location - assert upload_result.etag - - is_metadata_present = await filemanager.entry_exists( - user_id=user_id, - store_id=s3_simcore_location, - s3_object=file_id, - is_directory=is_directory, - ) - - assert is_metadata_present is True - - -@pytest.mark.parametrize( - "fct, extra_kwargs", - [ - (filemanager.entry_exists, {"is_directory": False}), - (filemanager.delete_file, {}), - (filemanager.get_file_metadata, {}), - ], -) -async def test_invalid_call_raises_exception( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - fct: Callable[[int, str, str, Any | None], Awaitable], - extra_kwargs: dict[str, Any], -): - file_path = Path(tmpdir) / "test.test" - file_id = create_valid_file_uuid("", file_path) - assert file_path.exists() is False - - with pytest.raises(exceptions.StorageInvalidCall): - await fct( - user_id=None, store_id=s3_simcore_location, s3_object=file_id, **extra_kwargs # type: ignore - ) - with pytest.raises(exceptions.StorageInvalidCall): - await fct(user_id=user_id, store_id=None, s3_object=file_id, **extra_kwargs) # type: ignore - with pytest.raises(exceptions.StorageInvalidCall): - await fct( - user_id=user_id, store_id=s3_simcore_location, s3_object="bing", **extra_kwargs # type: ignore - ) - - -async def test_delete_file( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - storage_service: URL, -): - file_path = Path(tmpdir) / "test.test" - file_path.write_text("I am a test file") - assert file_path.exists() - - file_id = create_valid_file_uuid("", file_path) - upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=file_path, - io_log_redirect_cb=None, - ) - assert isinstance(upload_result, UploadedFile) - store_id, e_tag = upload_result.store_id, upload_result.etag - assert store_id == s3_simcore_location - assert e_tag - - is_metadata_present = await filemanager.entry_exists( - user_id=user_id, store_id=store_id, s3_object=file_id, is_directory=False - ) - assert is_metadata_present is True - - await filemanager.delete_file( - user_id=user_id, store_id=s3_simcore_location, s3_object=file_id - ) - - # check that it disappeared - assert ( - await filemanager.entry_exists( - user_id=user_id, store_id=store_id, s3_object=file_id, is_directory=False - ) - is False - ) - - -@pytest.mark.parametrize("files_in_folder", [1, 10]) -async def test_upload_path_source_is_a_folder( - node_ports_config: None, - project_id: str, - tmp_path: Path, - faker: Faker, - user_id: int, - s3_simcore_location: LocationID, - files_in_folder: int, - sync_settings: _SyncSettings, -): - source_dir = tmp_path / f"source-{faker.uuid4()}" - source_dir.mkdir(parents=True, exist_ok=True) - - download_dir = tmp_path / f"download-{faker.uuid4()}" - download_dir.mkdir(parents=True, exist_ok=True) - - for i in range(files_in_folder): - (source_dir / f"file-{i}.txt").write_text("1") - - directory_id = SimcoreS3DirectoryID.from_simcore_s3_object( - f"{project_id}/{faker.uuid4()}/some-dir-in-node-root/" - ) - s3_object = SimcoreS3FileID(directory_id) - - upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=s3_object, - path_to_upload=source_dir, - io_log_redirect_cb=None, - r_clone_settings=sync_settings.r_clone_settings, - aws_s3_cli_settings=sync_settings.aws_s3_cli_settings, - ) - assert isinstance(upload_result, UploadedFolder) - assert source_dir.exists() - - async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: - await filemanager.download_path_from_s3( - user_id=user_id, - store_name=None, - store_id=s3_simcore_location, - s3_object=s3_object, - local_path=download_dir, - io_log_redirect_cb=None, - r_clone_settings=sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=sync_settings.aws_s3_cli_settings, - ) - assert download_dir.exists() - - # ensure all files in download and source directory are the same - file_names: set = {f.name for f in source_dir.glob("*")} & { - f.name for f in download_dir.glob("*") - } - for file_name in file_names: - filecmp.cmp(source_dir / file_name, download_dir / file_name, shallow=False) +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name +# pylint:disable=too-many-arguments +# pylint:disable=protected-access + +import filecmp +from collections.abc import Awaitable, Callable +from pathlib import Path +from typing import Any +from uuid import uuid4 + +import pytest +from aiohttp import ClientError +from faker import Faker +from models_library.projects_nodes_io import ( + LocationID, + SimcoreS3DirectoryID, + SimcoreS3FileID, +) +from models_library.users import UserID +from pydantic import BaseModel, ByteSize, parse_obj_as +from pytest_mock import MockerFixture +from pytest_simcore.helpers.parametrizations import byte_size_ids +from servicelib.progress_bar import ProgressBarData +from settings_library.aws_s3_cli import AwsS3CliSettings +from settings_library.r_clone import RCloneSettings +from simcore_sdk.node_ports_common import exceptions, filemanager +from simcore_sdk.node_ports_common.aws_s3_cli import AwsS3CliFailedError +from simcore_sdk.node_ports_common.filemanager import UploadedFile, UploadedFolder +from simcore_sdk.node_ports_common.r_clone import RCloneFailedError +from yarl import URL + +pytest_simcore_core_services_selection = [ + "migration", + "postgres", + "storage", + "redis", +] + +pytest_simcore_ops_services_selection = ["minio", "adminer"] + + +class _SyncSettings(BaseModel): + r_clone_settings: RCloneSettings | None + aws_s3_cli_settings: AwsS3CliSettings | None + + +@pytest.fixture( + params=[(True, False), (False, True), (False, False)], + ids=[ + "RClone enabled", + "AwsS3Cli enabled", + "Both RClone and AwsS3Cli disabled", + ], +) +def optional_sync_settings( + r_clone_settings: RCloneSettings, + aws_s3_cli_settings: AwsS3CliSettings, + request: pytest.FixtureRequest, +) -> _SyncSettings: + _rclone_enabled, _aws_s3_cli_enabled = request.param + + _r_clone_settings = r_clone_settings if _rclone_enabled else None + _aws_s3_cli_settings = aws_s3_cli_settings if _aws_s3_cli_enabled else None + + return _SyncSettings( + r_clone_settings=_r_clone_settings, aws_s3_cli_settings=_aws_s3_cli_settings + ) + + +def _file_size(size_str: str, **pytest_params): + return pytest.param(parse_obj_as(ByteSize, size_str), id=size_str, **pytest_params) + + +@pytest.mark.parametrize( + "file_size", + [ + _file_size("10Mib"), + _file_size("103Mib"), + _file_size("1003Mib", marks=pytest.mark.heavy_load), + _file_size("7Gib", marks=pytest.mark.heavy_load), + ], + ids=byte_size_ids, +) +async def test_valid_upload_download( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + file_size: ByteSize, + create_file_of_size: Callable[[ByteSize, str], Path], + optional_sync_settings: _SyncSettings, + simcore_services_ready: None, + storage_service: URL, + faker: Faker, +): + file_path = create_file_of_size(file_size, "test.test") + + file_id = create_valid_file_uuid("", file_path) + async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: + upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=file_path, + r_clone_settings=optional_sync_settings.r_clone_settings, + io_log_redirect_cb=None, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + assert isinstance(upload_result, UploadedFile) + store_id, e_tag = upload_result.store_id, upload_result.etag + # pylint: disable=protected-access + assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 + assert store_id == s3_simcore_location + assert e_tag + file_metadata = await filemanager.get_file_metadata( + user_id=user_id, store_id=store_id, s3_object=file_id + ) + assert file_metadata.location == store_id + assert file_metadata.etag == e_tag + + download_folder = Path(tmpdir) / "downloads" + download_file_path = await filemanager.download_path_from_s3( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + local_path=download_folder, + io_log_redirect_cb=None, + r_clone_settings=optional_sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + assert progress_bar._current_steps == pytest.approx(2) # noqa: SLF001 + assert download_file_path.exists() + assert download_file_path.name == "test.test" + assert filecmp.cmp(download_file_path, file_path) + + +@pytest.mark.parametrize( + "file_size", + [ + _file_size("10Mib"), + _file_size("103Mib"), + ], + ids=byte_size_ids, +) +async def test_valid_upload_download_using_file_object( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + file_size: ByteSize, + create_file_of_size: Callable[[ByteSize, str], Path], + optional_sync_settings: _SyncSettings, + faker: Faker, +): + file_path = create_file_of_size(file_size, "test.test") + + file_id = create_valid_file_uuid("", file_path) + with file_path.open("rb") as file_object: + upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=filemanager.UploadableFileObject( + file_object, file_path.name, file_path.stat().st_size + ), + r_clone_settings=optional_sync_settings.r_clone_settings, + io_log_redirect_cb=None, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + assert isinstance(upload_result, UploadedFile) + store_id, e_tag = upload_result.store_id, upload_result.etag + assert store_id == s3_simcore_location + assert e_tag + file_metadata = await filemanager.get_file_metadata( + user_id=user_id, store_id=store_id, s3_object=file_id + ) + assert file_metadata.location == store_id + assert file_metadata.etag == e_tag + + download_folder = Path(tmpdir) / "downloads" + async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: + download_file_path = await filemanager.download_path_from_s3( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + local_path=download_folder, + io_log_redirect_cb=None, + r_clone_settings=optional_sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 + assert download_file_path.exists() + assert download_file_path.name == "test.test" + assert filecmp.cmp(download_file_path, file_path) + + +@pytest.fixture +def mocked_upload_file_raising_exceptions(mocker: MockerFixture) -> None: + mocker.patch( + "simcore_sdk.node_ports_common.filemanager.r_clone.sync_local_to_s3", + autospec=True, + side_effect=RCloneFailedError, + ) + mocker.patch( + "simcore_sdk.node_ports_common.file_io_utils._upload_file_part", + autospec=True, + side_effect=ClientError, + ) + mocker.patch( + "simcore_sdk.node_ports_common.filemanager.aws_s3_cli.sync_local_to_s3", + autospec=True, + side_effect=AwsS3CliFailedError, + ) + + +@pytest.mark.parametrize( + "file_size", + [ + _file_size("10Mib"), + ], + ids=byte_size_ids, +) +async def test_failed_upload_is_properly_removed_from_storage( + node_ports_config: None, + create_file_of_size: Callable[[ByteSize], Path], + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + optional_sync_settings: _SyncSettings, + file_size: ByteSize, + user_id: UserID, + mocked_upload_file_raising_exceptions: None, +): + file_path = create_file_of_size(file_size) + file_id = create_valid_file_uuid("", file_path) + with pytest.raises(exceptions.S3TransferError): + await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=file_path, + r_clone_settings=optional_sync_settings.r_clone_settings, + io_log_redirect_cb=None, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + with pytest.raises(exceptions.S3InvalidPathError): + await filemanager.get_file_metadata( + user_id=user_id, store_id=s3_simcore_location, s3_object=file_id + ) + + +@pytest.mark.parametrize( + "file_size", + [ + _file_size("10Mib"), + ], + ids=byte_size_ids, +) +async def test_failed_upload_after_valid_upload_keeps_last_valid_state( + node_ports_config: None, + create_file_of_size: Callable[[ByteSize], Path], + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + optional_sync_settings: _SyncSettings, + file_size: ByteSize, + user_id: UserID, + mocker: MockerFixture, +): + # upload a valid file + file_path = create_file_of_size(file_size) + file_id = create_valid_file_uuid("", file_path) + upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=file_path, + r_clone_settings=optional_sync_settings.r_clone_settings, + io_log_redirect_cb=None, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + assert isinstance(upload_result, UploadedFile) + store_id, e_tag = upload_result.store_id, upload_result.etag + assert store_id == s3_simcore_location + assert e_tag + # check the file is correctly uploaded + file_metadata = await filemanager.get_file_metadata( + user_id=user_id, store_id=store_id, s3_object=file_id + ) + assert file_metadata.location == store_id + assert file_metadata.etag == e_tag + # now start an invalid update by generating an exception while uploading the same file + mocker.patch( + "simcore_sdk.node_ports_common.filemanager.r_clone.sync_local_to_s3", + autospec=True, + side_effect=RCloneFailedError, + ) + mocker.patch( + "simcore_sdk.node_ports_common.file_io_utils._upload_file_part", + autospec=True, + side_effect=ClientError, + ) + with pytest.raises(exceptions.S3TransferError): + await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=file_path, + r_clone_settings=optional_sync_settings.r_clone_settings, + io_log_redirect_cb=None, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + # the file shall be back to its original state + file_metadata = await filemanager.get_file_metadata( + user_id=user_id, store_id=s3_simcore_location, s3_object=file_id + ) + assert file_metadata.location == store_id + assert file_metadata.etag == e_tag + + +async def test_invalid_file_path( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + optional_sync_settings: _SyncSettings, + faker: Faker, +): + file_path = Path(tmpdir) / "test.test" + file_path.write_text("I am a test file") + assert file_path.exists() + + file_id = create_valid_file_uuid("", file_path) + store = s3_simcore_location + with pytest.raises(FileNotFoundError): + await filemanager.upload_path( + user_id=user_id, + store_id=store, + store_name=None, + s3_object=file_id, + path_to_upload=Path(tmpdir) / "some other file.txt", + io_log_redirect_cb=None, + ) + + download_folder = Path(tmpdir) / "downloads" + with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 + async with ProgressBarData( + num_steps=1, description=faker.pystr() + ) as progress_bar: + await filemanager.download_path_from_s3( + user_id=user_id, + store_id=store, + store_name=None, + s3_object=file_id, + local_path=download_folder, + io_log_redirect_cb=None, + r_clone_settings=optional_sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + + +async def test_errors_upon_invalid_file_identifiers( + node_ports_config: None, + tmpdir: Path, + user_id: UserID, + project_id: str, + s3_simcore_location: LocationID, + optional_sync_settings: _SyncSettings, + faker: Faker, +): + file_path = Path(tmpdir) / "test.test" + file_path.write_text("I am a test file") + assert file_path.exists() + + store = s3_simcore_location + with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 + invalid_s3_path = SimcoreS3FileID("") + await filemanager.upload_path( + user_id=user_id, + store_id=store, + store_name=None, + s3_object=invalid_s3_path, + path_to_upload=file_path, + io_log_redirect_cb=None, + ) + + with pytest.raises(exceptions.StorageInvalidCall): # noqa: PT012 + invalid_file_id = SimcoreS3FileID("file_id") + await filemanager.upload_path( + user_id=user_id, + store_id=store, + store_name=None, + s3_object=invalid_file_id, + path_to_upload=file_path, + io_log_redirect_cb=None, + ) + + download_folder = Path(tmpdir) / "downloads" + with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 + async with ProgressBarData( + num_steps=1, description=faker.pystr() + ) as progress_bar: + invalid_s3_path = SimcoreS3FileID("") + await filemanager.download_path_from_s3( + user_id=user_id, + store_id=store, + store_name=None, + s3_object=invalid_s3_path, + local_path=download_folder, + io_log_redirect_cb=None, + r_clone_settings=optional_sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + + with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 + async with ProgressBarData( + num_steps=1, description=faker.pystr() + ) as progress_bar: + await filemanager.download_path_from_s3( + user_id=user_id, + store_id=store, + store_name=None, + s3_object=SimcoreS3FileID(f"{project_id}/{uuid4()}/invisible.txt"), + local_path=download_folder, + io_log_redirect_cb=None, + r_clone_settings=optional_sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + + +async def test_invalid_store( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + optional_sync_settings: _SyncSettings, + faker: Faker, +): + file_path = Path(tmpdir) / "test.test" + file_path.write_text("I am a test file") + assert file_path.exists() + + file_id = create_valid_file_uuid("", file_path) + store = "somefunkystore" + with pytest.raises(exceptions.S3InvalidStore): + await filemanager.upload_path( + user_id=user_id, + store_id=None, + store_name=store, # type: ignore + s3_object=file_id, + path_to_upload=file_path, + io_log_redirect_cb=None, + ) + + download_folder = Path(tmpdir) / "downloads" + with pytest.raises(exceptions.S3InvalidStore): # noqa: PT012 + async with ProgressBarData( + num_steps=1, description=faker.pystr() + ) as progress_bar: + await filemanager.download_path_from_s3( + user_id=user_id, + store_id=None, + store_name=store, # type: ignore + s3_object=file_id, + local_path=download_folder, + io_log_redirect_cb=None, + r_clone_settings=optional_sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + + +@pytest.fixture( + params=[True, False], + ids=["with RClone", "with AwsS3Cli"], +) +def sync_settings( + r_clone_settings: RCloneSettings, + aws_s3_cli_settings: AwsS3CliSettings, + request: pytest.FixtureRequest, +) -> _SyncSettings: + is_rclone_enabled = request.param + + return _SyncSettings( + r_clone_settings=r_clone_settings if is_rclone_enabled else None, + aws_s3_cli_settings=aws_s3_cli_settings if not is_rclone_enabled else None, + ) + + +@pytest.mark.parametrize("is_directory", [False, True]) +async def test_valid_metadata( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + sync_settings: _SyncSettings, + is_directory: bool, +): + # first we go with a non-existing file + file_path = Path(tmpdir) / "a-subdir" / "test.test" + file_path.parent.mkdir(parents=True, exist_ok=True) + + path_to_upload = file_path.parent if is_directory else file_path + + file_id = create_valid_file_uuid("", path_to_upload) + assert file_path.exists() is False + + is_metadata_present = await filemanager.entry_exists( + user_id=user_id, + store_id=s3_simcore_location, + s3_object=file_id, + is_directory=is_directory, + ) + assert is_metadata_present is False + + # now really create the file and upload it + file_path.write_text("I am a test file") + assert file_path.exists() + + file_id = create_valid_file_uuid("", path_to_upload) + upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=path_to_upload, + io_log_redirect_cb=None, + r_clone_settings=sync_settings.r_clone_settings, + aws_s3_cli_settings=sync_settings.aws_s3_cli_settings, + ) + if is_directory: + assert isinstance(upload_result, UploadedFolder) + else: + assert isinstance(upload_result, UploadedFile) + assert upload_result.store_id == s3_simcore_location + assert upload_result.etag + + is_metadata_present = await filemanager.entry_exists( + user_id=user_id, + store_id=s3_simcore_location, + s3_object=file_id, + is_directory=is_directory, + ) + + assert is_metadata_present is True + + +@pytest.mark.parametrize( + "fct, extra_kwargs", + [ + (filemanager.entry_exists, {"is_directory": False}), + (filemanager.delete_file, {}), + (filemanager.get_file_metadata, {}), + ], +) +async def test_invalid_call_raises_exception( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + fct: Callable[[int, str, str, Any | None], Awaitable], + extra_kwargs: dict[str, Any], +): + file_path = Path(tmpdir) / "test.test" + file_id = create_valid_file_uuid("", file_path) + assert file_path.exists() is False + + with pytest.raises(exceptions.StorageInvalidCall): + await fct( + user_id=None, store_id=s3_simcore_location, s3_object=file_id, **extra_kwargs # type: ignore + ) + with pytest.raises(exceptions.StorageInvalidCall): + await fct(user_id=user_id, store_id=None, s3_object=file_id, **extra_kwargs) # type: ignore + with pytest.raises(exceptions.StorageInvalidCall): + await fct( + user_id=user_id, store_id=s3_simcore_location, s3_object="bing", **extra_kwargs # type: ignore + ) + + +async def test_delete_file( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + storage_service: URL, +): + file_path = Path(tmpdir) / "test.test" + file_path.write_text("I am a test file") + assert file_path.exists() + + file_id = create_valid_file_uuid("", file_path) + upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=file_path, + io_log_redirect_cb=None, + ) + assert isinstance(upload_result, UploadedFile) + store_id, e_tag = upload_result.store_id, upload_result.etag + assert store_id == s3_simcore_location + assert e_tag + + is_metadata_present = await filemanager.entry_exists( + user_id=user_id, store_id=store_id, s3_object=file_id, is_directory=False + ) + assert is_metadata_present is True + + await filemanager.delete_file( + user_id=user_id, store_id=s3_simcore_location, s3_object=file_id + ) + + # check that it disappeared + assert ( + await filemanager.entry_exists( + user_id=user_id, store_id=store_id, s3_object=file_id, is_directory=False + ) + is False + ) + + +@pytest.mark.parametrize("files_in_folder", [1, 10]) +async def test_upload_path_source_is_a_folder( + node_ports_config: None, + project_id: str, + tmp_path: Path, + faker: Faker, + user_id: int, + s3_simcore_location: LocationID, + files_in_folder: int, + sync_settings: _SyncSettings, +): + source_dir = tmp_path / f"source-{faker.uuid4()}" + source_dir.mkdir(parents=True, exist_ok=True) + + download_dir = tmp_path / f"download-{faker.uuid4()}" + download_dir.mkdir(parents=True, exist_ok=True) + + for i in range(files_in_folder): + (source_dir / f"file-{i}.txt").write_text("1") + + directory_id = SimcoreS3DirectoryID.from_simcore_s3_object( + f"{project_id}/{faker.uuid4()}/some-dir-in-node-root/" + ) + s3_object = SimcoreS3FileID(directory_id) + + upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=s3_object, + path_to_upload=source_dir, + io_log_redirect_cb=None, + r_clone_settings=sync_settings.r_clone_settings, + aws_s3_cli_settings=sync_settings.aws_s3_cli_settings, + ) + assert isinstance(upload_result, UploadedFolder) + assert source_dir.exists() + + async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: + await filemanager.download_path_from_s3( + user_id=user_id, + store_name=None, + store_id=s3_simcore_location, + s3_object=s3_object, + local_path=download_dir, + io_log_redirect_cb=None, + r_clone_settings=sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=sync_settings.aws_s3_cli_settings, + ) + assert download_dir.exists() + + # ensure all files in download and source directory are the same + file_names: set = {f.name for f in source_dir.glob("*")} & { + f.name for f in download_dir.glob("*") + } + for file_name in file_names: + filecmp.cmp(source_dir / file_name, download_dir / file_name, shallow=False) diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py index 73fb423d101a..40cf5d56c27e 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py @@ -30,6 +30,7 @@ SimcoreS3FileID, ) from models_library.services_types import ServicePortKey +from pydantic import TypeAdapter from pytest_mock import MockerFixture from servicelib.progress_bar import ProgressBarData from settings_library.r_clone import RCloneSettings @@ -93,7 +94,7 @@ async def _check_port_valid( assert port.value assert isinstance(port.value, DownloadLink | PortLink | BaseFileLink) assert ( - port.value.dict(by_alias=True, exclude_unset=True) + port.value.model_dump(by_alias=True, exclude_unset=True) == port_values[key_name] ) else: @@ -227,7 +228,7 @@ async def test_port_value_accessors( item_pytype: type, option_r_clone_settings: RCloneSettings | None, ): # pylint: disable=W0613, W0621 - item_key = ServicePortKey("some_key") + item_key = TypeAdapter(ServicePortKey).validate_python("some_key") config_dict, _, _ = create_special_configuration( inputs=[(item_key, item_type, item_value)], outputs=[(item_key, item_type, None)], @@ -302,17 +303,26 @@ async def test_port_file_accessors( ) await check_config_valid(PORTS, config_dict) assert ( - await (await PORTS.outputs)[ServicePortKey("out_34")].get() is None + await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_34") + ].get() + is None ) # check emptyness with pytest.raises(exceptions.S3InvalidPathError): - await (await PORTS.inputs)[ServicePortKey("in_1")].get() + await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_1") + ].get() # this triggers an upload to S3 + configuration change - await (await PORTS.outputs)[ServicePortKey("out_34")].set(item_value) + await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_34") + ].set(item_value) # this is the link to S3 storage - value = (await PORTS.outputs)[ServicePortKey("out_34")].value + value = (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_34") + ].value assert isinstance(value, DownloadLink | PortLink | BaseFileLink) - received_file_link = value.dict(by_alias=True, exclude_unset=True) + received_file_link = value.model_dump(by_alias=True, exclude_unset=True) assert received_file_link["store"] == s3_simcore_location assert ( received_file_link["path"] @@ -325,12 +335,21 @@ async def test_port_file_accessors( # this triggers a download from S3 to a location in /tempdir/simcorefiles/item_key assert isinstance( - await (await PORTS.outputs)[ServicePortKey("out_34")].get(), item_pytype + await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_34") + ].get(), + item_pytype, ) - downloaded_file = await (await PORTS.outputs)[ServicePortKey("out_34")].get() + downloaded_file = await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_34") + ].get() assert isinstance(downloaded_file, Path) assert downloaded_file.exists() - assert str(await (await PORTS.outputs)[ServicePortKey("out_34")].get()).startswith( + assert str( + await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_34") + ].get() + ).startswith( str( Path( tempfile.gettempdir(), @@ -475,9 +494,16 @@ async def test_get_value_from_previous_node( ) await check_config_valid(PORTS, config_dict) - input_value = await (await PORTS.inputs)[ServicePortKey("in_15")].get() + input_value = await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_15") + ].get() assert isinstance(input_value, item_pytype) - assert await (await PORTS.inputs)[ServicePortKey("in_15")].get() == item_value + assert ( + await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_15") + ].get() + == item_value + ) @pytest.mark.parametrize( @@ -519,7 +545,9 @@ async def test_get_file_from_previous_node( r_clone_settings=option_r_clone_settings, ) await check_config_valid(PORTS, config_dict) - file_path = await (await PORTS.inputs)[ServicePortKey("in_15")].get() + file_path = await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_15") + ].get() assert isinstance(file_path, item_pytype) assert file_path == Path( tempfile.gettempdir(), @@ -580,7 +608,9 @@ async def test_get_file_from_previous_node_with_mapping_of_same_key_name( postgres_db, project_id, this_node_uuid, config_dict ) # pylint: disable=E1101 await check_config_valid(PORTS, config_dict) - file_path = await (await PORTS.inputs)[ServicePortKey("in_15")].get() + file_path = await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_15") + ].get() assert isinstance(file_path, item_pytype) assert file_path == Path( tempfile.gettempdir(), @@ -640,7 +670,9 @@ async def test_file_mapping( postgres_db, project_id, node_uuid, config_dict ) # pylint: disable=E1101 await check_config_valid(PORTS, config_dict) - file_path = await (await PORTS.inputs)[ServicePortKey("in_1")].get() + file_path = await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_1") + ].get() assert isinstance(file_path, item_pytype) assert file_path == Path( tempfile.gettempdir(), @@ -651,7 +683,9 @@ async def test_file_mapping( ) # let's get it a second time to see if replacing works - file_path = await (await PORTS.inputs)[ServicePortKey("in_1")].get() + file_path = await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_1") + ].get() assert isinstance(file_path, item_pytype) assert file_path == Path( tempfile.gettempdir(), @@ -668,9 +702,11 @@ async def test_file_mapping( assert isinstance(file_path, Path) await PORTS.set_file_by_keymap(file_path) file_id = create_valid_file_uuid("out_1", file_path) - value = (await PORTS.outputs)[ServicePortKey("out_1")].value + value = (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_1") + ].value assert isinstance(value, DownloadLink | PortLink | BaseFileLink) - received_file_link = value.dict(by_alias=True, exclude_unset=True) + received_file_link = value.model_dump(by_alias=True, exclude_unset=True) assert received_file_link["store"] == s3_simcore_location assert received_file_link["path"] == file_id # received a new eTag @@ -723,15 +759,19 @@ async def test_regression_concurrent_port_update_fails( # when writing in serial these are expected to work for item_key, _, _ in outputs: - await (await PORTS.outputs)[ServicePortKey(item_key)].set(int_item_value) - assert (await PORTS.outputs)[ServicePortKey(item_key)].value == int_item_value + await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python(item_key) + ].set(int_item_value) + assert (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python(item_key) + ].value == int_item_value # when writing in parallel and reading back, # they fail, with enough concurrency async def _upload_create_task(item_key: str) -> None: - await (await PORTS.outputs)[ServicePortKey(item_key)].set( - parallel_int_item_value - ) + await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python(item_key) + ].set(parallel_int_item_value) # updating in parallel creates a race condition results = await gather( @@ -744,7 +784,7 @@ async def _upload_create_task(item_key: str) -> None: with pytest.raises(AssertionError) as exc_info: # noqa: PT012 for item_key, _, _ in outputs: assert (await PORTS.outputs)[ - ServicePortKey(item_key) + TypeAdapter(ServicePortKey).validate_python(item_key) ].value == parallel_int_item_value assert exc_info.value.args[0].startswith( @@ -809,7 +849,10 @@ async def test_batch_update_inputs_outputs( async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: port_values = (await PORTS.outputs).values() await PORTS.set_multiple( - {ServicePortKey(port.key): (k, None) for k, port in enumerate(port_values)}, + { + TypeAdapter(ServicePortKey).validate_python(port.key): (k, None) + for k, port in enumerate(port_values) + }, progress_bar=progress_bar, outputs_callbacks=callbacks, ) @@ -820,7 +863,7 @@ async def test_batch_update_inputs_outputs( assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 await PORTS.set_multiple( { - ServicePortKey(port.key): (k, None) + TypeAdapter(ServicePortKey).validate_python(port.key): (k, None) for k, port in enumerate((await PORTS.inputs).values(), start=1000) }, progress_bar=progress_bar, @@ -836,19 +879,39 @@ async def test_batch_update_inputs_outputs( ports_inputs = await PORTS.inputs for k, asd in enumerate(outputs): item_key, _, _ = asd - assert ports_outputs[ServicePortKey(item_key)].value == k - assert await ports_outputs[ServicePortKey(item_key)].get() == k + assert ( + ports_outputs[TypeAdapter(ServicePortKey).validate_python(item_key)].value + == k + ) + assert ( + await ports_outputs[ + TypeAdapter(ServicePortKey).validate_python(item_key) + ].get() + == k + ) for k, asd in enumerate(inputs, start=1000): item_key, _, _ = asd - assert ports_inputs[ServicePortKey(item_key)].value == k - assert await ports_inputs[ServicePortKey(item_key)].get() == k + assert ( + ports_inputs[TypeAdapter(ServicePortKey).validate_python(item_key)].value + == k + ) + assert ( + await ports_inputs[ + TypeAdapter(ServicePortKey).validate_python(item_key) + ].get() + == k + ) # test missing key raises error async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: with pytest.raises(UnboundPortError): await PORTS.set_multiple( - {ServicePortKey("missing_key_in_both"): (123132, None)}, + { + TypeAdapter(ServicePortKey).validate_python( + "missing_key_in_both" + ): (123132, None) + }, progress_bar=progress_bar, outputs_callbacks=callbacks, ) diff --git a/packages/simcore-sdk/tests/unit/test_node_data_data_manager.py b/packages/simcore-sdk/tests/unit/test_node_data_data_manager.py index c1edb4f183ce..a578d410605b 100644 --- a/packages/simcore-sdk/tests/unit/test_node_data_data_manager.py +++ b/packages/simcore-sdk/tests/unit/test_node_data_data_manager.py @@ -40,7 +40,7 @@ def _create_files(number: int, folder: Path) -> list[Path]: @pytest.fixture def r_clone_settings(faker: Faker) -> RCloneSettings: - return RCloneSettings.parse_obj( + return RCloneSettings.model_validate( { "R_CLONE_S3": { "S3_ENDPOINT": faker.url(), diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py b/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py index c32c055afe4f..a3710dfe27b2 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py @@ -20,7 +20,7 @@ ) from models_library.basic_types import IDStr from moto.server import ThreadedMotoServer -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from pytest_mock import MockerFixture from servicelib.aiohttp import status from servicelib.progress_bar import ProgressBarData @@ -213,8 +213,7 @@ async def _creator(num_upload_links: int, chunk_size: ByteSize) -> FileUploadSch assert "UploadId" in response upload_id = response["UploadId"] - upload_links = parse_obj_as( - list[AnyUrl], + upload_links = TypeAdapter(list[AnyUrl]).validate_python( await asyncio.gather( *[ aiobotocore_s3_client.generate_presigned_url( @@ -235,8 +234,8 @@ async def _creator(num_upload_links: int, chunk_size: ByteSize) -> FileUploadSch chunk_size=chunk_size, urls=upload_links, links=FileUploadLinks( - abort_upload=parse_obj_as(AnyUrl, faker.uri()), - complete_upload=parse_obj_as(AnyUrl, faker.uri()), + abort_upload=TypeAdapter(AnyUrl).validate_python(faker.uri()), + complete_upload=TypeAdapter(AnyUrl).validate_python(faker.uri()), ), ) @@ -246,7 +245,12 @@ async def _creator(num_upload_links: int, chunk_size: ByteSize) -> FileUploadSch @pytest.mark.skip(reason="this will allow to reproduce an issue") @pytest.mark.parametrize( "file_size,used_chunk_size", - [(parse_obj_as(ByteSize, 21800510238), parse_obj_as(ByteSize, 10485760))], + [ + ( + TypeAdapter(ByteSize).validate_python(21800510238), + TypeAdapter(ByteSize).validate_python(10485760), + ) + ], ) async def test_upload_file_to_presigned_links( client_session: ClientSession, @@ -254,6 +258,7 @@ async def test_upload_file_to_presigned_links( create_file_of_size: Callable[[ByteSize], Path], file_size: ByteSize, used_chunk_size: ByteSize, + faker: Faker, ): """This test is here to reproduce the issue https://github.com/ITISFoundation/osparc-simcore/issues/3531 One theory is that something might be wrong in how the chunking is done and that AWS times out @@ -268,7 +273,9 @@ async def test_upload_file_to_presigned_links( """ local_file = create_file_of_size(file_size) num_links = 2080 - effective_chunk_size = parse_obj_as(ByteSize, local_file.stat().st_size / num_links) + effective_chunk_size = TypeAdapter(ByteSize).validate_python( + local_file.stat().st_size / num_links + ) assert effective_chunk_size <= used_chunk_size upload_links = await create_upload_links(num_links, used_chunk_size) assert len(upload_links.urls) == num_links @@ -281,5 +288,5 @@ async def test_upload_file_to_presigned_links( io_log_redirect_cb=None, progress_bar=progress_bar, ) - assert progress_bar._current_steps == pytest.approx(1) + assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 assert uploaded_parts diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_links.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_links.py index 5116311ae01a..95b114ae563a 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_links.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_links.py @@ -2,7 +2,7 @@ from uuid import uuid4 import pytest -from pydantic import ValidationError +from pydantic import TypeAdapter, ValidationError from simcore_sdk.node_ports_v2.links import DownloadLink, FileLink, PortLink diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py index f8d09836213a..848b5d60d983 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py @@ -3,12 +3,14 @@ # pylint:disable=redefined-outer-name # pylint:disable=protected-access +import asyncio from pathlib import Path from typing import Any, Callable from unittest.mock import AsyncMock import pytest from faker import Faker +from pydantic import ValidationError from pytest_mock import MockFixture from servicelib.progress_bar import ProgressBarData from simcore_sdk.node_ports_common.filemanager import UploadedFile @@ -51,6 +53,7 @@ async def mock_node_port_creator_cb(*args, **kwargs): user_id=user_id, project_id=project_id, node_uuid=node_uuid, + io_log_redirect_cb=None, save_to_db_cb=mock_save_db_cb, node_port_creator_cb=mock_node_port_creator_cb, auto_update=False, @@ -63,6 +66,7 @@ async def mock_node_port_creator_cb(*args, **kwargs): user_id=user_id, project_id=project_id, node_uuid=node_uuid, + io_log_redirect_cb=None, save_to_db_cb=mock_save_db_cb, node_port_creator_cb=mock_node_port_creator_cb, auto_update=auto_update, @@ -102,6 +106,7 @@ async def mock_node_port_creator_cb(*args, **kwargs): user_id=user_id, project_id=project_id, node_uuid=node_uuid, + io_log_redirect_cb=None, save_to_db_cb=mock_save_db_cb, node_port_creator_cb=mock_node_port_creator_cb, auto_update=False, @@ -114,6 +119,7 @@ async def mock_node_port_creator_cb(*args, **kwargs): user_id=user_id, project_id=project_id, node_uuid=node_uuid, + io_log_redirect_cb=None, save_to_db_cb=mock_save_db_cb, node_port_creator_cb=mock_node_port_creator_cb, auto_update=False, @@ -184,6 +190,7 @@ async def mock_node_port_creator_cb(*args, **kwargs): user_id=user_id, project_id=project_id, node_uuid=node_uuid, + io_log_redirect_cb=None, save_to_db_cb=mock_save_db_cb, node_port_creator_cb=mock_node_port_creator_cb, auto_update=False, @@ -196,6 +203,7 @@ async def mock_node_port_creator_cb(*args, **kwargs): user_id=user_id, project_id=project_id, node_uuid=node_uuid, + io_log_redirect_cb=None, save_to_db_cb=mock_save_db_cb, node_port_creator_cb=mock_node_port_creator_cb, auto_update=False, @@ -217,3 +225,57 @@ async def test_node_ports_v2_packages( db_manager = mock_db_manager(default_configuration) node_ports = await ports(user_id, project_id, node_uuid) node_ports = await ports(user_id, project_id, node_uuid, db_manager=db_manager) + + +@pytest.fixture +def mock_port_set(mocker: MockFixture) -> None: + async def _always_raise_error(*args, **kwargs): + async def _i_raise_errors(): + raise ValidationError("invalid") + + return asyncio.create_task(_i_raise_errors()) + + mocker.patch( + "simcore_sdk.node_ports_v2.port.Port._set", side_effect=_always_raise_error + ) + + +async def test_node_ports_v2_set_multiple_catch_multiple_failing_set_ports( + mock_port_set: None, + mock_db_manager: Callable, + default_configuration: dict[str, Any], + user_id: int, + project_id: str, + node_uuid: str, + faker: Faker, +): + db_manager = mock_db_manager(default_configuration) + + original_inputs = create_valid_port_mapping(InputsList, suffix="original") + original_outputs = create_valid_port_mapping(OutputsList, suffix="original") + + async def _mock_callback(*args, **kwargs): + pass + + node_ports = Nodeports( + inputs=original_inputs, + outputs=original_outputs, + db_manager=db_manager, + user_id=user_id, + project_id=project_id, + node_uuid=node_uuid, + io_log_redirect_cb=None, + save_to_db_cb=_mock_callback, + node_port_creator_cb=_mock_callback, + auto_update=False, + ) + async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: + with pytest.raises(ValidationError): + await node_ports.set_multiple( + { + port.key: (port.value, None) + for port in list(original_inputs.values()) + + list(original_outputs.values()) + }, + progress_bar=progress_bar, + ) diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py index 49fa694742e1..063c71f99f47 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py @@ -23,8 +23,7 @@ from faker import Faker from models_library.api_schemas_storage import FileMetaDataGet from models_library.projects_nodes_io import LocationID -from pydantic import parse_obj_as -from pydantic.error_wrappers import ValidationError +from pydantic import TypeAdapter, ValidationError from pytest_mock.plugin import MockerFixture from servicelib.progress_bar import ProgressBarData from simcore_sdk.node_ports_common.file_io_utils import LogRedirectCB @@ -219,8 +218,8 @@ def e_tag_fixture() -> str: async def mock_filemanager(mocker: MockerFixture, e_tag: str, faker: Faker) -> None: mocker.patch( "simcore_sdk.node_ports_common.filemanager._get_file_meta_data", - return_value=parse_obj_as( - FileMetaDataGet, FileMetaDataGet.Config.schema_extra["examples"][0] + return_value=TypeAdapter(FileMetaDataGet).validate_python( + FileMetaDataGet.model_config["json_schema_extra"]["examples"][0], ), ) mocker.patch( diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py index 10c074591fc9..3746520f42c5 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py @@ -4,20 +4,21 @@ from collections import deque from pprint import pprint -from typing import Any, Dict, List, Type, Union +from typing import Any import pytest from models_library.services import ServiceInput -from pydantic import ValidationError, confloat, schema_of +from pydantic import Field, ValidationError, schema_of from simcore_sdk.node_ports_v2 import exceptions from simcore_sdk.node_ports_v2.port import Port from simcore_sdk.node_ports_v2.ports_mapping import InputsList, OutputsList +from typing_extensions import Annotated from utils_port_v2 import create_valid_port_config @pytest.mark.parametrize("port_class", [InputsList, OutputsList]) -def test_empty_ports_mapping(port_class: Type[Union[InputsList, OutputsList]]): - port_mapping = port_class(__root__={}) +def test_empty_ports_mapping(port_class: type[InputsList | OutputsList]): + port_mapping = port_class(root={}) assert not port_mapping.items() assert not port_mapping.values() assert not port_mapping.keys() @@ -28,17 +29,17 @@ def test_empty_ports_mapping(port_class: Type[Union[InputsList, OutputsList]]): @pytest.mark.parametrize("port_class", [InputsList, OutputsList]) -def test_filled_ports_mapping(port_class: Type[Union[InputsList, OutputsList]]): - port_cfgs: Dict[str, Any] = {} +def test_filled_ports_mapping(port_class: type[InputsList | OutputsList]): + port_cfgs: dict[str, Any] = {} for t in ["integer", "number", "boolean", "string"]: port = create_valid_port_config(t) port_cfgs[port["key"]] = port port_cfgs["some_file"] = create_valid_port_config("data:*/*", key="some_file") - port_mapping = port_class(__root__=port_cfgs) + port_mapping = port_class(root=port_cfgs) # two ways to construct instances of __root__ - assert port_class.parse_obj(port_cfgs) == port_mapping + assert port_class.model_validate(port_cfgs) == port_mapping assert len(port_mapping) == len(port_cfgs) for port_key, port_value in port_mapping.items(): @@ -60,8 +61,8 @@ def test_filled_ports_mapping(port_class: Type[Union[InputsList, OutputsList]]): def test_io_ports_are_not_aliases(): # prevents creating alises as InputsList = PortsMappings - inputs = InputsList(__root__={}) - outputs = OutputsList(__root__={}) + inputs = InputsList(root={}) + outputs = OutputsList(root={}) assert isinstance(inputs, InputsList) assert not isinstance(inputs, OutputsList) @@ -71,10 +72,10 @@ def test_io_ports_are_not_aliases(): @pytest.fixture -def fake_port_meta() -> Dict[str, Any]: +def fake_port_meta() -> dict[str, Any]: """Service port metadata: defines a list of non-negative numbers""" schema = schema_of( - List[confloat(ge=0)], + list[Annotated[float, Field(ge=0)]], title="list[non-negative number]", ) schema.update( @@ -83,10 +84,10 @@ def fake_port_meta() -> Dict[str, Any]: ) port_model = ServiceInput.from_json_schema(port_schema=schema) - return port_model.dict(exclude_unset=True, by_alias=True) + return port_model.model_dump(exclude_unset=True, by_alias=True) -def test_validate_port_value_against_schema(fake_port_meta: Dict[str, Any]): +def test_validate_port_value_against_schema(fake_port_meta: dict[str, Any]): # A simcore-sdk Port instance is a combination of both # - the port's metadata # - the port's value @@ -109,19 +110,19 @@ def test_validate_port_value_against_schema(fake_port_meta: Dict[str, Any]): assert error["loc"] == ("value",) assert "-2 is less than the minimum of 0" in error["msg"] - assert error["type"] == "value_error.port_validation.schema_error" + assert error["type"] == "value_error" assert "ctx" in error - assert error["ctx"]["port_key"] == "port_1" + assert error["ctx"]["error"].port_key == "port_1" - schema_error_message = error["ctx"]["schema_error_message"] - schema_error_path = error["ctx"]["schema_error_path"] + schema_error_message = error["ctx"]["error"].schema_error_message + schema_error_path = error["ctx"]["error"].schema_error_path assert schema_error_message in error["msg"] assert schema_error_path == deque([1]) -def test_validate_iolist_against_schema(fake_port_meta: Dict[str, Any]): +def test_validate_iolist_against_schema(fake_port_meta: dict[str, Any]): # Check how errors propagate from a single Port to InputsList # reference port @@ -151,7 +152,7 @@ def test_validate_iolist_against_schema(fake_port_meta: Dict[str, Any]): # ---- with pytest.raises(ValidationError) as err_info: - InputsList.parse_obj({p["key"]: p for p in ports}) + InputsList.model_validate({p["key"]: p for p in ports}) # --- assert isinstance(err_info.value, ValidationError) @@ -161,14 +162,13 @@ def test_validate_iolist_against_schema(fake_port_meta: Dict[str, Any]): for error in err_info.value.errors(): error_loc = error["loc"] assert "ctx" in error - port_key = error["ctx"].get("port_key") + port_key = error["ctx"]["error"].port_key # path hierachy - assert error_loc[0] == "__root__", f"{error_loc=}" - assert error_loc[1] == port_key, f"{error_loc=}" - assert error_loc[-1] == "value", f"{error_loc=}" + assert error_loc[0] == port_key, f"{error_loc=}" + assert error_loc[1] == "value", f"{error_loc=}" - assert error["type"] == "value_error.port_validation.schema_error" + assert error["type"] == "value_error" port_with_errors.append(port_key) pprint(error) diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py index 41e61669fe58..ee0d19cec901 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py @@ -13,13 +13,14 @@ from unittest.mock import AsyncMock import pytest -from pydantic import BaseModel, conint, schema_of -from pydantic.error_wrappers import ValidationError +from pydantic import BaseModel, Field, schema_of +from pydantic import ValidationError from simcore_sdk.node_ports_v2.port import Port from simcore_sdk.node_ports_v2.port_validation import ( PortUnitError, validate_port_content, ) +from typing_extensions import Annotated def _replace_value_in_dict(item: Any, original_schema: dict[str, Any]): @@ -128,7 +129,7 @@ async def test_port_with_array_of_object(mocker): mocker.patch.object(Port, "_node_ports", new=AsyncMock()) class A(BaseModel): - i: conint(gt=3) + i: Annotated[int, Field(gt=3)] b: bool = False s: str l: list[int] @@ -142,7 +143,7 @@ class A(BaseModel): "contentSchema": content_schema, } sample = [{"i": 5, "s": "x", "l": [1, 2]}, {"i": 6, "s": "y", "l": [2]}] - expected_value = [A(**i).dict() for i in sample] + expected_value = [A(**i).model_dump() for i in sample] print(json.dumps(port_meta, indent=1)) print(json.dumps(expected_value, indent=1)) @@ -244,7 +245,7 @@ async def test_port_with_units_and_constraints(mocker): print(validation_error) assert validation_error["loc"] == ("value",) # starts with value,! - assert validation_error["type"] == "value_error.port_validation.schema_error" + assert validation_error["type"] == "value_error" assert "-3.14 is less than the minimum of 0" in validation_error["msg"] # inits with None + set_value @@ -256,8 +257,6 @@ async def test_port_with_units_and_constraints(mocker): with pytest.raises(ValidationError) as exc_info: await port.set_value(-3.14) - assert exc_info.value.errors()[0] == validation_error - def test_incident__port_validator_check_value(): # SEE incident https://git.speag.com/oSparc/e2e-testing/-/issues/1) diff --git a/packages/simcore-sdk/tests/unit/test_storage_client.py b/packages/simcore-sdk/tests/unit/test_storage_client.py index 91e46c5bd615..7786aafe4941 100644 --- a/packages/simcore-sdk/tests/unit/test_storage_client.py +++ b/packages/simcore-sdk/tests/unit/test_storage_client.py @@ -20,7 +20,7 @@ ) from models_library.projects_nodes_io import SimcoreS3FileID from models_library.users import UserID -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.aiohttp import status from simcore_sdk.node_ports_common import exceptions @@ -176,8 +176,8 @@ async def test_get_file_metada( session=session, file_id=file_id, location_id=location_id, user_id=user_id ) assert file_metadata - assert file_metadata == FileMetaDataGet.parse_obj( - FileMetaDataGet.Config.schema_extra["examples"][0] + assert file_metadata == FileMetaDataGet.model_validate( + FileMetaDataGet.model_config["json_schema_extra"]["examples"][0] ) @@ -362,12 +362,28 @@ def test_mode_ports_storage_without_auth( [ (True, _HTTP_URL, _HTTPS_URL), (False, _HTTP_URL, _HTTP_URL), - (True, parse_obj_as(AnyUrl, _HTTP_URL), _HTTPS_URL), - (False, parse_obj_as(AnyUrl, _HTTP_URL), _HTTP_URL), + ( + True, + str(TypeAdapter(AnyUrl).validate_python(_HTTP_URL)).rstrip("/"), + _HTTPS_URL, + ), + ( + False, + str(TypeAdapter(AnyUrl).validate_python(_HTTP_URL)).rstrip("/"), + _HTTP_URL, + ), (True, _HTTPS_URL, _HTTPS_URL), (False, _HTTPS_URL, _HTTPS_URL), - (True, parse_obj_as(AnyUrl, _HTTPS_URL), _HTTPS_URL), - (False, parse_obj_as(AnyUrl, _HTTPS_URL), _HTTPS_URL), + ( + True, + str(TypeAdapter(AnyUrl).validate_python(_HTTPS_URL)).rstrip("/"), + _HTTPS_URL, + ), + ( + False, + str(TypeAdapter(AnyUrl).validate_python(_HTTPS_URL)).rstrip("/"), + _HTTPS_URL, + ), (True, "http://http", "https://http"), (True, "https://http", "https://http"), ], @@ -382,4 +398,4 @@ def test__get_secure_link( is_storage_secure.cache_clear() setenvs_from_dict(monkeypatch, {"STORAGE_SECURE": "1" if storage_secure else "0"}) - assert _get_https_link_if_storage_secure(provided) == expected + assert _get_https_link_if_storage_secure(str(provided)) == expected diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 80c64bcf5978..3e40b2694d4f 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -32,11 +32,7 @@ urllib3>=1.26.5 # https://github.com/advisories/GH # Breaking changes ----------------------------------------------------------------------------------------- # - -# SEE https://github.com/ITISFoundation/osparc-simcore/issues/4481 -fastapi<0.100.0 -pydantic<2.0 - +# with new released version 1.0.0 (https://github.com/aio-libs/aiozipkin/releases). # TODO: includes async features https://docs.sqlalchemy.org/en/14/changelog/migration_20.html sqlalchemy<2.0 diff --git a/scripts/openapi-pydantic-models-generator.bash b/scripts/openapi-pydantic-models-generator.bash index 82cf503f1d62..788cb90e7921 100755 --- a/scripts/openapi-pydantic-models-generator.bash +++ b/scripts/openapi-pydantic-models-generator.bash @@ -18,17 +18,24 @@ Build() { --load \ - < FastAPI: settings = ApplicationSettings.create_from_envs() _setup_logger(settings) - logger.debug(settings.json(indent=2)) + logger.debug(settings.model_dump_json(indent=2)) assert settings.SC_BOOT_MODE # nosec app = FastAPI( diff --git a/services/agent/src/simcore_service_agent/core/settings.py b/services/agent/src/simcore_service_agent/core/settings.py index 96545d0355db..f322e27163f9 100644 --- a/services/agent/src/simcore_service_agent/core/settings.py +++ b/services/agent/src/simcore_service_agent/core/settings.py @@ -1,7 +1,9 @@ from datetime import timedelta +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy +from common_library.pydantic_validators import validate_numeric_string_as_timedelta from models_library.basic_types import BootModeEnum, LogLevel -from pydantic import AnyHttpUrl, Field, validator +from pydantic import AliasChoices, Field, field_validator from settings_library.base import BaseCustomSettings from settings_library.r_clone import S3Provider from settings_library.rabbit import RabbitSettings @@ -10,16 +12,21 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): LOGLEVEL: LogLevel = Field( - LogLevel.WARNING.value, env=["AGENT_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"] + LogLevel.WARNING.value, + validation_alias=AliasChoices( + "AGENT_LOGLEVEL", + "LOG_LEVEL", + "LOGLEVEL", + ), ) SC_BOOT_MODE: BootModeEnum | None AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=[ + validation_alias=AliasChoices( "AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + ), description=( "Enables local development log format. WARNING: make sure it is " "disabled if you want to have structured logs!" @@ -28,7 +35,7 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): AGENT_VOLUMES_CLEANUP_TARGET_SWARM_STACK_NAME: str = Field( ..., description="Exactly the same as director-v2's `SWARM_STACK_NAME` env var" ) - AGENT_VOLUMES_CLEANUP_S3_ENDPOINT: AnyHttpUrl + AGENT_VOLUMES_CLEANUP_S3_ENDPOINT: AnyHttpUrlLegacy AGENT_VOLUMES_CLEANUP_S3_ACCESS_KEY: str AGENT_VOLUMES_CLEANUP_S3_SECRET_KEY: str AGENT_VOLUMES_CLEANUP_S3_BUCKET: str @@ -47,14 +54,14 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): AGENT_VOLUMES_CLEANUP_INTERVAL: timedelta = Field( timedelta(minutes=1), description="interval for running volumes removal" ) - AGENT_VOLUMES_CLENUP_BOOK_KEEPING_INTERVAL: timedelta = Field( + AGENT_VOLUMES_CLEANUP_BOOK_KEEPING_INTERVAL: timedelta = Field( timedelta(minutes=1), description=( "interval at which to scan for unsued volumes and keep track since " "they were detected as being unused" ), ) - AGENT_VOLUMES_CLENUP_REMOVE_VOLUMES_INACTIVE_FOR: timedelta = Field( + AGENT_VOLUMES_CLEANUP_REMOVE_VOLUMES_INACTIVE_FOR: timedelta = Field( timedelta(minutes=65), description=( "if a volume is unused for more than this interval it can be removed. " @@ -68,10 +75,26 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): AGENT_DOCKER_NODE_ID: str = Field(..., description="used by the rabbitmq module") AGENT_RABBITMQ: RabbitSettings = Field( - auto_default_from_env=True, description="settings for service/rabbitmq" + description="settings for service/rabbitmq", + json_schema_extra={"auto_default_from_env": True}, + ) + + _validate_agent_volumes_cleanup_interval = validate_numeric_string_as_timedelta( + "AGENT_VOLUMES_CLEANUP_INTERVAL" + ) + + _validate_agent_volumes_cleanup_book_keeping_interval = ( + validate_numeric_string_as_timedelta( + "AGENT_VOLUMES_CLEANUP_BOOK_KEEPING_INTERVAL" + ) + ) + _validate_agent_volumes_cleanup_remove_volumes_inactive_for = ( + validate_numeric_string_as_timedelta( + "AGENT_VOLUMES_CLEANUP_REMOVE_VOLUMES_INACTIVE_FOR" + ) ) - @validator("LOGLEVEL") + @field_validator("LOGLEVEL") @classmethod def valid_log_level(cls, value) -> LogLevel: return LogLevel(cls.validate_log_level(value)) diff --git a/services/agent/src/simcore_service_agent/models/volumes.py b/services/agent/src/simcore_service_agent/models/volumes.py index ceb310486502..cf227bf69e9b 100644 --- a/services/agent/src/simcore_service_agent/models/volumes.py +++ b/services/agent/src/simcore_service_agent/models/volumes.py @@ -1,4 +1,5 @@ from pathlib import Path +from typing import Final from models_library.api_schemas_directorv2.services import ( CHARS_IN_VOLUME_NAME_BEFORE_DIR_NAME, @@ -7,7 +8,7 @@ from models_library.projects_nodes_io import NodeID from models_library.services_types import RunID from models_library.users import UserID -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field, TypeAdapter class DynamicServiceVolumeLabels(BaseModel): @@ -22,7 +23,14 @@ class DynamicServiceVolumeLabels(BaseModel): def directory_name(self) -> str: return self.source[CHARS_IN_VOLUME_NAME_BEFORE_DIR_NAME:][::-1].strip("_") + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) + class VolumeDetails(BaseModel): mountpoint: Path = Field(alias="Mountpoint") labels: DynamicServiceVolumeLabels = Field(alias="Labels") + + +VolumeDetailsAdapter: Final[TypeAdapter[VolumeDetails]] = TypeAdapter(VolumeDetails) diff --git a/services/agent/src/simcore_service_agent/services/docker_utils.py b/services/agent/src/simcore_service_agent/services/docker_utils.py index 181fe13a2759..83656783b558 100644 --- a/services/agent/src/simcore_service_agent/services/docker_utils.py +++ b/services/agent/src/simcore_service_agent/services/docker_utils.py @@ -15,7 +15,7 @@ from simcore_service_agent.core.settings import ApplicationSettings from starlette import status -from ..models.volumes import VolumeDetails +from ..models.volumes import VolumeDetails, VolumeDetailsAdapter from .backup import backup_volume from .instrumentation import get_instrumentation @@ -60,7 +60,7 @@ async def get_unused_dynamc_sidecar_volumes(docker: Docker) -> set[str]: async def get_volume_details(docker: Docker, *, volume_name: str) -> VolumeDetails: volume_details = await DockerVolume(docker, volume_name).show() - return VolumeDetails.parse_obj(volume_details) + return VolumeDetailsAdapter.validate_python(volume_details) @contextmanager diff --git a/services/agent/src/simcore_service_agent/services/volumes_manager.py b/services/agent/src/simcore_service_agent/services/volumes_manager.py index 526589a2c9cd..fa5a0cd1b173 100644 --- a/services/agent/src/simcore_service_agent/services/volumes_manager.py +++ b/services/agent/src/simcore_service_agent/services/volumes_manager.py @@ -174,9 +174,9 @@ async def _on_startup() -> None: volumes_manager = VolumesManager( app=app, - book_keeping_interval=settings.AGENT_VOLUMES_CLENUP_BOOK_KEEPING_INTERVAL, + book_keeping_interval=settings.AGENT_VOLUMES_CLEANUP_BOOK_KEEPING_INTERVAL, volume_cleanup_interval=settings.AGENT_VOLUMES_CLEANUP_INTERVAL, - remove_volumes_inactive_for=settings.AGENT_VOLUMES_CLENUP_REMOVE_VOLUMES_INACTIVE_FOR.total_seconds(), + remove_volumes_inactive_for=settings.AGENT_VOLUMES_CLEANUP_REMOVE_VOLUMES_INACTIVE_FOR.total_seconds(), ) volumes_manager.set_to_app_state(app) await volumes_manager.setup() diff --git a/services/agent/tests/conftest.py b/services/agent/tests/conftest.py index 4632ca841023..c71656e2c085 100644 --- a/services/agent/tests/conftest.py +++ b/services/agent/tests/conftest.py @@ -3,10 +3,11 @@ import pytest +from common_library.pydantic_networks_extension import HttpUrlLegacy from faker import Faker from models_library.basic_types import BootModeEnum from moto.server import ThreadedMotoServer -from pydantic import HttpUrl, parse_obj_as +from pydantic import HttpUrl, TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from settings_library.r_clone import S3Provider @@ -63,9 +64,8 @@ def mock_environment( @pytest.fixture(scope="module") -def mocked_s3_server_url(mocked_aws_server: ThreadedMotoServer) -> HttpUrl: +def mocked_s3_server_url(mocked_aws_server: ThreadedMotoServer) -> HttpUrlLegacy: # pylint: disable=protected-access - return parse_obj_as( - HttpUrl, + return TypeAdapter(HttpUrlLegacy).validate_python( f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # noqa: SLF001 ) diff --git a/services/api-server/requirements/_base.in b/services/api-server/requirements/_base.in index f63ab332f474..30b633a2e10d 100644 --- a/services/api-server/requirements/_base.in +++ b/services/api-server/requirements/_base.in @@ -7,6 +7,7 @@ --constraint ./constraints.txt # intra-repo required dependencies +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in diff --git a/services/api-server/requirements/ci.txt b/services/api-server/requirements/ci.txt index 8dcf9528f3be..cc1799cee075 100644 --- a/services/api-server/requirements/ci.txt +++ b/services/api-server/requirements/ci.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +simcore-common-library @ ../../packages/common-library simcore-models-library @ ../../packages/models-library simcore-postgres-database @ ../../packages/postgres-database/ pytest-simcore @ ../../packages/pytest-simcore/ diff --git a/services/api-server/requirements/dev.txt b/services/api-server/requirements/dev.txt index 2de1f4cc3160..5afc552d7533 100644 --- a/services/api-server/requirements/dev.txt +++ b/services/api-server/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/postgres-database --editable ../../packages/pytest-simcore/ diff --git a/services/autoscaling/requirements/_base.in b/services/autoscaling/requirements/_base.in index ae362ec27444..231b8944c9df 100644 --- a/services/autoscaling/requirements/_base.in +++ b/services/autoscaling/requirements/_base.in @@ -7,6 +7,7 @@ --constraint ../../../services/dask-sidecar/requirements/_dask-distributed.txt # intra-repo required dependencies +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in --requirement ../../../packages/aws-library/requirements/_base.in diff --git a/services/autoscaling/requirements/_base.txt b/services/autoscaling/requirements/_base.txt index 927f1ccf5dd6..c1fa0708c901 100644 --- a/services/autoscaling/requirements/_base.txt +++ b/services/autoscaling/requirements/_base.txt @@ -48,6 +48,8 @@ aiormq==6.8.0 # via aio-pika aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -131,20 +133,8 @@ email-validator==2.1.1 # via pydantic fast-depends==2.4.12 # via faststream -fastapi==0.99.1 +fastapi==0.115.0 # via - # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator @@ -353,7 +343,7 @@ psutil==6.0.0 # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # distributed -pydantic==1.10.15 +pydantic==2.9.2 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -364,7 +354,6 @@ pydantic==1.10.15 # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -381,6 +370,26 @@ pydantic==1.10.15 # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.6.2 @@ -391,6 +400,8 @@ python-dateutil==2.9.0.post0 # via # arrow # botocore +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.1 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -467,7 +478,7 @@ sortedcontainers==2.4.0 # via # -c requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed -starlette==0.27.0 +starlette==0.38.6 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -533,6 +544,7 @@ typing-extensions==4.11.0 # faststream # opentelemetry-sdk # pydantic + # pydantic-core # typer # types-aiobotocore # types-aiobotocore-ec2 diff --git a/services/autoscaling/requirements/_test.txt b/services/autoscaling/requirements/_test.txt index 47379c4d69fa..7784681d8b9b 100644 --- a/services/autoscaling/requirements/_test.txt +++ b/services/autoscaling/requirements/_test.txt @@ -1,3 +1,7 @@ +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic antlr4-python3-runtime==4.13.2 # via moto anyio==4.3.0 @@ -181,11 +185,15 @@ py-partiql-parser==0.5.6 # via moto pycparser==2.22 # via cffi -pydantic==1.10.15 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic pyparsing==3.1.4 # via moto pytest==8.3.3 @@ -217,7 +225,9 @@ python-dateutil==2.9.0.post0 # faker # moto python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.1 # via # -c requirements/../../../requirements/constraints.txt @@ -315,6 +325,7 @@ typing-extensions==4.11.0 # aws-sam-translator # cfn-lint # pydantic + # pydantic-core # types-aiobotocore # types-aiobotocore-ec2 # types-aiobotocore-iam diff --git a/services/autoscaling/requirements/ci.txt b/services/autoscaling/requirements/ci.txt index a6c8147ab306..74758ddb53ef 100644 --- a/services/autoscaling/requirements/ci.txt +++ b/services/autoscaling/requirements/ci.txt @@ -13,6 +13,7 @@ # installs this repo's packages simcore-aws-library @ ../../packages/aws-library +simcore-common-library @ ../../packages/common-library simcore-dask-task-models-library @ ../../packages/dask-task-models-library simcore-models-library @ ../../packages/models-library pytest-simcore @ ../../packages/pytest-simcore diff --git a/services/autoscaling/requirements/dev.txt b/services/autoscaling/requirements/dev.txt index 432e7ef62e9e..ab92769203f1 100644 --- a/services/autoscaling/requirements/dev.txt +++ b/services/autoscaling/requirements/dev.txt @@ -13,6 +13,7 @@ # installs this repo's packages --editable ../../packages/aws-library +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/pytest-simcore --editable ../../packages/service-library[fastapi] diff --git a/services/autoscaling/src/simcore_service_autoscaling/_meta.py b/services/autoscaling/src/simcore_service_autoscaling/_meta.py index 22d3ea19043a..c421cfae966a 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/_meta.py +++ b/services/autoscaling/src/simcore_service_autoscaling/_meta.py @@ -2,6 +2,7 @@ from models_library.basic_types import VersionStr, VersionTag from packaging.version import Version +from pydantic import TypeAdapter from servicelib.utils_meta import PackageInfo info: Final = PackageInfo(package_name="simcore-service-autoscaling") @@ -10,7 +11,9 @@ APP_NAME: Final[str] = info.project_name API_VERSION: Final[VersionStr] = info.__version__ VERSION: Final[Version] = info.version -API_VTAG: Final[VersionTag] = VersionTag(info.api_prefix_path_tag) +API_VTAG: Final[VersionTag] = TypeAdapter(VersionTag).validate_python( + info.api_prefix_path_tag +) SUMMARY: Final[str] = info.get_summary() diff --git a/services/autoscaling/src/simcore_service_autoscaling/constants.py b/services/autoscaling/src/simcore_service_autoscaling/constants.py index 086c47b906fb..55fe8468bf1b 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/constants.py +++ b/services/autoscaling/src/simcore_service_autoscaling/constants.py @@ -2,39 +2,39 @@ from typing import Final from aws_library.ec2._models import AWSTagKey, AWSTagValue, EC2Tags -from pydantic import parse_obj_as +from pydantic import TypeAdapter -BUFFER_MACHINE_PULLING_EC2_TAG_KEY: Final[AWSTagKey] = parse_obj_as( - AWSTagKey, "pulling" -) -BUFFER_MACHINE_PULLING_COMMAND_ID_EC2_TAG_KEY: Final[AWSTagKey] = parse_obj_as( - AWSTagKey, "ssm-command-id" -) +BUFFER_MACHINE_PULLING_EC2_TAG_KEY: Final[AWSTagKey] = TypeAdapter( + AWSTagKey +).validate_python("pulling") +BUFFER_MACHINE_PULLING_COMMAND_ID_EC2_TAG_KEY: Final[AWSTagKey] = TypeAdapter( + AWSTagKey +).validate_python("ssm-command-id") PREPULL_COMMAND_NAME: Final[str] = "docker images pulling" DOCKER_JOIN_COMMAND_NAME: Final[str] = "docker swarm join" -DOCKER_JOIN_COMMAND_EC2_TAG_KEY: Final[AWSTagKey] = parse_obj_as( - AWSTagKey, "io.simcore.autoscaling.joined_command_sent" -) +DOCKER_JOIN_COMMAND_EC2_TAG_KEY: Final[AWSTagKey] = TypeAdapter( + AWSTagKey +).validate_python("io.simcore.autoscaling.joined_command_sent") -DOCKER_PULL_COMMAND: Final[str] = ( - "docker compose -f /docker-pull.compose.yml -p buffering pull" -) +DOCKER_PULL_COMMAND: Final[ + str +] = "docker compose -f /docker-pull.compose.yml -p buffering pull" -PRE_PULLED_IMAGES_EC2_TAG_KEY: Final[AWSTagKey] = parse_obj_as( - AWSTagKey, "io.simcore.autoscaling.pre_pulled_images" -) +PRE_PULLED_IMAGES_EC2_TAG_KEY: Final[AWSTagKey] = TypeAdapter( + AWSTagKey +).validate_python("io.simcore.autoscaling.pre_pulled_images") -BUFFER_MACHINE_TAG_KEY: Final[AWSTagKey] = parse_obj_as( - AWSTagKey, "io.simcore.autoscaling.buffer_machine" +BUFFER_MACHINE_TAG_KEY: Final[AWSTagKey] = TypeAdapter(AWSTagKey).validate_python( + "io.simcore.autoscaling.buffer_machine" ) DEACTIVATED_BUFFER_MACHINE_EC2_TAGS: Final[EC2Tags] = { - BUFFER_MACHINE_TAG_KEY: parse_obj_as(AWSTagValue, "true") + BUFFER_MACHINE_TAG_KEY: TypeAdapter(AWSTagValue).validate_python("true") } ACTIVATED_BUFFER_MACHINE_EC2_TAGS: Final[EC2Tags] = { - BUFFER_MACHINE_TAG_KEY: parse_obj_as(AWSTagValue, "false") + BUFFER_MACHINE_TAG_KEY: TypeAdapter(AWSTagValue).validate_python("false") } PRE_PULLED_IMAGES_RE: Final[re.Pattern] = re.compile( - rf"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_\((\d+)\)" + rf"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_(\((\d+)\)|\d+)" ) diff --git a/services/autoscaling/src/simcore_service_autoscaling/core/application.py b/services/autoscaling/src/simcore_service_autoscaling/core/application.py index ce10d22f7825..6261232bce5a 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/core/application.py +++ b/services/autoscaling/src/simcore_service_autoscaling/core/application.py @@ -44,7 +44,7 @@ def create_app(settings: ApplicationSettings) -> FastAPI: for name in _NOISY_LOGGERS: logging.getLogger(name).setLevel(quiet_level) - logger.info("app settings: %s", settings.json(indent=1)) + logger.info("app settings: %s", settings.model_dump_json(indent=1)) app = FastAPI( debug=settings.AUTOSCALING_DEBUG, diff --git a/services/autoscaling/src/simcore_service_autoscaling/core/errors.py b/services/autoscaling/src/simcore_service_autoscaling/core/errors.py index 398b1278806b..e4294631224a 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/core/errors.py +++ b/services/autoscaling/src/simcore_service_autoscaling/core/errors.py @@ -1,12 +1,7 @@ -from typing import Any - -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class AutoscalingRuntimeError(OsparcErrorMixin, RuntimeError): - def __init__(self, **ctx: Any) -> None: - super().__init__(**ctx) - msg_template: str = "Autoscaling unexpected error" diff --git a/services/autoscaling/src/simcore_service_autoscaling/core/settings.py b/services/autoscaling/src/simcore_service_autoscaling/core/settings.py index 600a8cd507c5..d0763fd80708 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/core/settings.py +++ b/services/autoscaling/src/simcore_service_autoscaling/core/settings.py @@ -1,6 +1,6 @@ import datetime from functools import cached_property -from typing import Any, ClassVar, Final, cast +from typing import Final, Self, cast from aws_library.ec2 import EC2InstanceBootSpecific, EC2Tags from fastapi import FastAPI @@ -14,14 +14,16 @@ from models_library.clusters import InternalClusterAuthentication from models_library.docker import DockerLabelKey from pydantic import ( + AliasChoices, AnyUrl, Field, NonNegativeInt, PositiveInt, - parse_obj_as, - root_validator, - validator, + TypeAdapter, + field_validator, + model_validator, ) +from pydantic_settings import SettingsConfigDict from settings_library.base import BaseCustomSettings from settings_library.docker_registry import RegistrySettings from settings_library.ec2 import EC2Settings @@ -42,10 +44,9 @@ class AutoscalingSSMSettings(SSMSettings): class AutoscalingEC2Settings(EC2Settings): - class Config(EC2Settings.Config): - env_prefix = AUTOSCALING_ENV_PREFIX - - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = SettingsConfigDict( + env_prefix=AUTOSCALING_ENV_PREFIX, + json_schema_extra={ "examples": [ { f"{AUTOSCALING_ENV_PREFIX}EC2_ACCESS_KEY_ID": "my_access_key_id", @@ -54,7 +55,8 @@ class Config(EC2Settings.Config): f"{AUTOSCALING_ENV_PREFIX}EC2_SECRET_ACCESS_KEY": "my_secret_access_key", } ], - } + }, + ) class EC2InstancesSettings(BaseCustomSettings): @@ -95,7 +97,7 @@ class EC2InstancesSettings(BaseCustomSettings): EC2_INSTANCES_SECURITY_GROUP_IDS: list[str] = Field( ..., - min_items=1, + min_length=1, description="A security group acts as a virtual firewall for your EC2 instances to control incoming and outgoing traffic" " (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-security-groups.html), " " this is required to start a new EC2 instance", @@ -132,7 +134,7 @@ class EC2InstancesSettings(BaseCustomSettings): description="ARN the EC2 instance should be attached to (example: arn:aws:iam::XXXXX:role/NAME), to disable pass an empty string", ) - @validator("EC2_INSTANCES_TIME_BEFORE_DRAINING") + @field_validator("EC2_INSTANCES_TIME_BEFORE_DRAINING") @classmethod def _ensure_draining_delay_time_is_in_range( cls, value: datetime.timedelta @@ -143,7 +145,7 @@ def _ensure_draining_delay_time_is_in_range( value = datetime.timedelta(minutes=1) return value - @validator("EC2_INSTANCES_TIME_BEFORE_TERMINATION") + @field_validator("EC2_INSTANCES_TIME_BEFORE_TERMINATION") @classmethod def _ensure_termination_delay_time_is_in_range( cls, value: datetime.timedelta @@ -154,14 +156,14 @@ def _ensure_termination_delay_time_is_in_range( value = datetime.timedelta(minutes=59) return value - @validator("EC2_INSTANCES_ALLOWED_TYPES") + @field_validator("EC2_INSTANCES_ALLOWED_TYPES") @classmethod def _check_valid_instance_names_and_not_empty( cls, value: dict[str, EC2InstanceBootSpecific] ) -> dict[str, EC2InstanceBootSpecific]: # NOTE: needed because of a flaw in BaseCustomSettings # issubclass raises TypeError if used on Aliases - parse_obj_as(list[InstanceTypeType], list(value)) + TypeAdapter(list[InstanceTypeType]).validate_python(list(value)) if not value: # NOTE: Field( ... , min_items=...) cannot be used to contraint number of iterms in a dict @@ -225,36 +227,39 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): # RUNTIME ----------------------------------------------------------- AUTOSCALING_DEBUG: bool = Field( - default=False, description="Debug mode", env=["AUTOSCALING_DEBUG", "DEBUG"] + default=False, + description="Debug mode", + validation_alias=AliasChoices("AUTOSCALING_DEBUG", "DEBUG"), ) - AUTOSCALING_REMOTE_DEBUG_PORT: PortInt = PortInt(3000) + AUTOSCALING_REMOTE_DEBUG_PORT: PortInt = 3000 AUTOSCALING_LOGLEVEL: LogLevel = Field( - LogLevel.INFO, env=["AUTOSCALING_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"] + LogLevel.INFO, + validation_alias=AliasChoices("AUTOSCALING_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), ) AUTOSCALING_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=[ + validation_alias=AliasChoices( "AUTOSCALING_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) AUTOSCALING_EC2_ACCESS: AutoscalingEC2Settings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) AUTOSCALING_SSM_ACCESS: AutoscalingSSMSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) AUTOSCALING_EC2_INSTANCES: EC2InstancesSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) AUTOSCALING_NODES_MONITORING: NodesMonitoringSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) AUTOSCALING_POLL_INTERVAL: datetime.timedelta = Field( @@ -263,13 +268,21 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", ) - AUTOSCALING_RABBITMQ: RabbitSettings | None = Field(auto_default_from_env=True) + AUTOSCALING_RABBITMQ: RabbitSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) - AUTOSCALING_REDIS: RedisSettings = Field(auto_default_from_env=True) + AUTOSCALING_REDIS: RedisSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) - AUTOSCALING_REGISTRY: RegistrySettings | None = Field(auto_default_from_env=True) + AUTOSCALING_REGISTRY: RegistrySettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) - AUTOSCALING_DASK: DaskMonitoringSettings | None = Field(auto_default_from_env=True) + AUTOSCALING_DASK: DaskMonitoringSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) AUTOSCALING_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True @@ -280,7 +293,8 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): "but a docker node label named osparc-services-ready is attached", ) AUTOSCALING_TRACING: TracingSettings | None = Field( - auto_default_from_env=True, description="settings for opentelemetry tracing" + description="settings for opentelemetry tracing", + json_schema_extra={"auto_default_from_env": True}, ) AUTOSCALING_DOCKER_JOIN_DRAINED: bool = Field( @@ -298,21 +312,20 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): def LOG_LEVEL(self): # noqa: N802 return self.AUTOSCALING_LOGLEVEL - @validator("AUTOSCALING_LOGLEVEL", pre=True) + @field_validator("AUTOSCALING_LOGLEVEL", mode="before") @classmethod def _valid_log_level(cls, value: str) -> str: return cls.validate_log_level(value) - @root_validator() - @classmethod - def _exclude_both_dynamic_computational_mode(cls, values): + @model_validator(mode="after") + def exclude_both_dynamic_computational_mode(self) -> Self: if ( - values.get("AUTOSCALING_DASK") is not None - and values.get("AUTOSCALING_NODES_MONITORING") is not None + self.AUTOSCALING_DASK is not None + and self.AUTOSCALING_NODES_MONITORING is not None ): msg = "Autoscaling cannot be set to monitor both computational and dynamic services (both AUTOSCALING_DASK and AUTOSCALING_NODES_MONITORING are currently set!)" raise ValueError(msg) - return values + return self def get_application_settings(app: FastAPI) -> ApplicationSettings: diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_core.py b/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_core.py index e7fc947cba52..8d5ff16dd9a6 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_core.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_core.py @@ -63,8 +63,8 @@ def _node_not_ready(node: Node) -> bool: - assert node.Status # nosec - return bool(node.Status.State != NodeState.ready) + assert node.status # nosec + return bool(node.status.state != NodeState.ready) async def _analyze_current_cluster( @@ -177,9 +177,9 @@ async def _cleanup_disconnected_nodes(app: FastAPI, cluster: Cluster) -> Cluster removeable_nodes = [ node for node in cluster.disconnected_nodes - if node.UpdatedAt + if node.updated_at and ( - (utc_now - arrow.get(node.UpdatedAt).datetime).total_seconds() + (utc_now - arrow.get(node.updated_at).datetime).total_seconds() > _DELAY_FOR_REMOVING_DISCONNECTED_NODES_S ) ] @@ -886,7 +886,7 @@ async def _find_drainable_nodes( if drainable_nodes: _logger.info( "the following nodes were found to be drainable: '%s'", - f"{[instance.node.Description.Hostname for instance in drainable_nodes if instance.node.Description]}", + f"{[instance.node.description.hostname for instance in drainable_nodes if instance.node.description]}", ) return drainable_nodes @@ -914,7 +914,7 @@ async def _deactivate_empty_nodes(app: FastAPI, cluster: Cluster) -> Cluster: if updated_nodes: _logger.info( "following nodes were set to drain: '%s'", - f"{[node.Description.Hostname for node in updated_nodes if node.Description]}", + f"{[node.description.hostname for node in updated_nodes if node.description]}", ) newly_drained_instances = [ AssociatedInstance(node=node, ec2_instance=instance.ec2_instance) @@ -964,7 +964,7 @@ async def _find_terminateable_instances( if terminateable_nodes: _logger.info( "the following nodes were found to be terminateable: '%s'", - f"{[instance.node.Description.Hostname for instance in terminateable_nodes if instance.node.Description]}", + f"{[instance.node.description.hostname for instance in terminateable_nodes if instance.node.description]}", ) return terminateable_nodes @@ -975,11 +975,11 @@ async def _try_scale_down_cluster(app: FastAPI, cluster: Cluster) -> Cluster: # instances found to be terminateable will now start the termination process. new_terminating_instances = [] for instance in await _find_terminateable_instances(app, cluster): - assert instance.node.Description is not None # nosec + assert instance.node.description is not None # nosec with log_context( _logger, logging.INFO, - msg=f"termination process for {instance.node.Description.Hostname}:{instance.ec2_instance.id}", + msg=f"termination process for {instance.node.description.hostname}:{instance.ec2_instance.id}", ), log_catch(_logger, reraise=False): await utils_docker.set_node_begin_termination_process( get_docker_client(app), instance.node @@ -999,7 +999,7 @@ async def _try_scale_down_cluster(app: FastAPI, cluster: Cluster) -> Cluster: with log_context( _logger, logging.INFO, - msg=f"definitely terminate '{[i.node.Description.Hostname for i in instances_to_terminate if i.node.Description]}'", + msg=f"definitely terminate '{[i.node.description.hostname for i in instances_to_terminate if i.node.description]}'", ): await get_ec2_client(app).terminate_instances( [i.ec2_instance for i in instances_to_terminate] @@ -1103,7 +1103,7 @@ async def _drain_retired_nodes( if updated_nodes: _logger.info( "following nodes were set to drain: '%s'", - f"{[node.Description.Hostname for node in updated_nodes if node.Description]}", + f"{[node.description.hostname for node in updated_nodes if node.description]}", ) newly_drained_instances = [ AssociatedInstance(node=node, ec2_instance=instance.ec2_instance) diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_computational.py b/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_computational.py index ecddfc5e8ec1..cc6dcef68a48 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_computational.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_computational.py @@ -145,16 +145,16 @@ async def compute_node_used_resources( async def compute_cluster_used_resources( app: FastAPI, instances: list[AssociatedInstance] ) -> Resources: - list_of_used_resources = await logged_gather( + list_of_used_resources: list[Resources] = await logged_gather( *( ComputationalAutoscaling.compute_node_used_resources(app, i) for i in instances ) ) - counter = collections.Counter({k: 0 for k in Resources.__fields__}) + counter = collections.Counter({k: 0 for k in Resources.model_fields}) for result in list_of_used_resources: - counter.update(result.dict()) - return Resources.parse_obj(dict(counter)) + counter.update(result.model_dump()) + return Resources.model_validate(dict(counter)) @staticmethod async def compute_cluster_total_resources( diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/dask.py b/services/autoscaling/src/simcore_service_autoscaling/modules/dask.py index 5e1c7e2f0c75..b547ce2bbd42 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/dask.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/dask.py @@ -13,7 +13,7 @@ from dask_task_models_library.resource_constraints import DaskTaskResources from distributed.core import Status from models_library.clusters import InternalClusterAuthentication, TLSAuthentication -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from ..core.errors import ( DaskNoWorkersError, @@ -59,7 +59,7 @@ async def _scheduler_client( require_encryption=True, ) async with distributed.Client( - url, + f"{url}", asynchronous=True, timeout=f"{_DASK_SCHEDULER_CONNECT_TIMEOUT_S}", security=security, @@ -173,9 +173,9 @@ def _list_tasks( } async with _scheduler_client(scheduler_url, authentication) as client: - list_of_tasks: dict[dask.typing.Key, DaskTaskResources] = ( - await _wrap_client_async_routine(client.run_on_scheduler(_list_tasks)) - ) + list_of_tasks: dict[ + dask.typing.Key, DaskTaskResources + ] = await _wrap_client_async_routine(client.run_on_scheduler(_list_tasks)) _logger.debug("found unrunnable tasks: %s", list_of_tasks) return [ DaskTask( @@ -207,10 +207,10 @@ def _list_processing_tasks( return worker_to_processing_tasks async with _scheduler_client(scheduler_url, authentication) as client: - worker_to_tasks: dict[str, list[tuple[dask.typing.Key, DaskTaskResources]]] = ( - await _wrap_client_async_routine( - client.run_on_scheduler(_list_processing_tasks) - ) + worker_to_tasks: dict[ + str, list[tuple[dask.typing.Key, DaskTaskResources]] + ] = await _wrap_client_async_routine( + client.run_on_scheduler(_list_processing_tasks) ) _logger.debug("found processing tasks: %s", worker_to_tasks) tasks_per_worker = defaultdict(list) @@ -276,12 +276,12 @@ def _list_processing_tasks_on_worker( _logger.debug("looking for processing tasksfor %s", f"{worker_url=}") # now get the used resources - worker_processing_tasks: list[tuple[dask.typing.Key, DaskTaskResources]] = ( - await _wrap_client_async_routine( - client.run_on_scheduler( - _list_processing_tasks_on_worker, worker_url=worker_url - ), - ) + worker_processing_tasks: list[ + tuple[dask.typing.Key, DaskTaskResources] + ] = await _wrap_client_async_routine( + client.run_on_scheduler( + _list_processing_tasks_on_worker, worker_url=worker_url + ), ) total_resources_used: collections.Counter[str] = collections.Counter() @@ -291,7 +291,9 @@ def _list_processing_tasks_on_worker( _logger.debug("found %s for %s", f"{total_resources_used=}", f"{worker_url=}") return Resources( cpus=total_resources_used.get("CPU", 0), - ram=parse_obj_as(ByteSize, total_resources_used.get("RAM", 0)), + ram=TypeAdapter(ByteSize).validate_python( + total_resources_used.get("RAM", 0) + ), ) diff --git a/services/autoscaling/src/simcore_service_autoscaling/utils/auto_scaling_core.py b/services/autoscaling/src/simcore_service_autoscaling/utils/auto_scaling_core.py index d5fca4c3bb68..d7f69d50b54e 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/utils/auto_scaling_core.py +++ b/services/autoscaling/src/simcore_service_autoscaling/utils/auto_scaling_core.py @@ -56,8 +56,8 @@ async def associate_ec2_instances_with_nodes( non_associated_instances: list[EC2InstanceData] = [] def _find_node_with_name(node: Node) -> bool: - assert node.Description # nosec - return bool(node.Description.Hostname == docker_node_name) + assert node.description # nosec + return bool(node.description.hostname == docker_node_name) for instance_data in ec2_instances: try: diff --git a/services/autoscaling/src/simcore_service_autoscaling/utils/buffer_machines_pool_core.py b/services/autoscaling/src/simcore_service_autoscaling/utils/buffer_machines_pool_core.py index 133708001ae3..dcbba97015d6 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/utils/buffer_machines_pool_core.py +++ b/services/autoscaling/src/simcore_service_autoscaling/utils/buffer_machines_pool_core.py @@ -1,11 +1,12 @@ from collections.abc import Iterable from operator import itemgetter +from typing import Final -from aws_library.ec2 import AWSTagKey, AWSTagValue, EC2Tags +from aws_library.ec2 import AWS_TAG_VALUE_MAX_LENGTH, AWSTagKey, AWSTagValue, EC2Tags from fastapi import FastAPI from models_library.docker import DockerGenericTag from models_library.utils.json_serialization import json_dumps -from pydantic import parse_obj_as, parse_raw_as +from pydantic import TypeAdapter from ..constants import ( ACTIVATED_BUFFER_MACHINE_EC2_TAGS, @@ -16,6 +17,8 @@ ) from ..modules.auto_scaling_mode_base import BaseAutoscaling +_NAME_EC2_TAG_KEY: Final[AWSTagKey] = TypeAdapter(AWSTagKey).validate_python("Name") + def get_activated_buffer_ec2_tags( app: FastAPI, auto_scaling_mode: BaseAutoscaling @@ -29,8 +32,8 @@ def get_deactivated_buffer_ec2_tags( base_ec2_tags = ( auto_scaling_mode.get_ec2_tags(app) | DEACTIVATED_BUFFER_MACHINE_EC2_TAGS ) - base_ec2_tags[AWSTagKey("Name")] = AWSTagValue( - f"{base_ec2_tags[AWSTagKey('Name')]}-buffer" + base_ec2_tags[_NAME_EC2_TAG_KEY] = AWSTagValue( + f"{base_ec2_tags[_NAME_EC2_TAG_KEY]}-buffer" ) return base_ec2_tags @@ -43,20 +46,26 @@ def dump_pre_pulled_images_as_tags(images: Iterable[DockerGenericTag]) -> EC2Tag # AWS Tag Values are limited to 256 characaters so we chunk the images # into smaller chunks jsonized_images = json_dumps(images) - assert AWSTagValue.max_length # nosec - if len(jsonized_images) > AWSTagValue.max_length: + assert AWS_TAG_VALUE_MAX_LENGTH # nosec + if len(jsonized_images) > AWS_TAG_VALUE_MAX_LENGTH: # let's chunk the string - chunk_size = AWSTagValue.max_length + chunk_size = AWS_TAG_VALUE_MAX_LENGTH chunks = [ jsonized_images[i : i + chunk_size] for i in range(0, len(jsonized_images), chunk_size) ] return { - AWSTagKey(f"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_({i})"): AWSTagValue(c) + TypeAdapter(AWSTagKey) + .validate_python(f"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_{i}"): TypeAdapter( + AWSTagValue + ) + .validate_python(c) for i, c in enumerate(chunks) } return { - PRE_PULLED_IMAGES_EC2_TAG_KEY: parse_obj_as(AWSTagValue, json_dumps(images)) + PRE_PULLED_IMAGES_EC2_TAG_KEY: TypeAdapter(AWSTagValue).validate_python( + json_dumps(images) + ) } @@ -64,7 +73,9 @@ def load_pre_pulled_images_from_tags(tags: EC2Tags) -> list[DockerGenericTag]: # AWS Tag values are limited to 256 characters so we chunk the images if PRE_PULLED_IMAGES_EC2_TAG_KEY in tags: # read directly - return parse_raw_as(list[DockerGenericTag], tags[PRE_PULLED_IMAGES_EC2_TAG_KEY]) + return TypeAdapter(list[DockerGenericTag]).validate_json( + tags[PRE_PULLED_IMAGES_EC2_TAG_KEY] + ) assembled_json = "".join( map( @@ -80,5 +91,5 @@ def load_pre_pulled_images_from_tags(tags: EC2Tags) -> list[DockerGenericTag]: ) ) if assembled_json: - return parse_raw_as(list[DockerGenericTag], assembled_json) + return TypeAdapter(list[DockerGenericTag]).validate_json(assembled_json) return [] diff --git a/services/autoscaling/src/simcore_service_autoscaling/utils/rabbitmq.py b/services/autoscaling/src/simcore_service_autoscaling/utils/rabbitmq.py index cb1623cb476f..81781fb53469 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/utils/rabbitmq.py +++ b/services/autoscaling/src/simcore_service_autoscaling/utils/rabbitmq.py @@ -42,7 +42,7 @@ async def progress_tasks_message( async def post_task_progress_message(app: FastAPI, task: Task, progress: float) -> None: with log_catch(logger, reraise=False): simcore_label_keys = StandardSimcoreDockerLabels.from_docker_task(task) - message = ProgressRabbitMessageNode.construct( + message = ProgressRabbitMessageNode.model_construct( node_id=simcore_label_keys.node_id, user_id=simcore_label_keys.user_id, project_id=simcore_label_keys.project_id, @@ -55,7 +55,7 @@ async def post_task_progress_message(app: FastAPI, task: Task, progress: float) async def post_task_log_message(app: FastAPI, task: Task, log: str, level: int) -> None: with log_catch(logger, reraise=False): simcore_label_keys = StandardSimcoreDockerLabels.from_docker_task(task) - message = LoggerRabbitMessage.construct( + message = LoggerRabbitMessage.model_construct( node_id=simcore_label_keys.node_id, user_id=simcore_label_keys.user_id, project_id=simcore_label_keys.project_id, @@ -79,15 +79,15 @@ async def create_autoscaling_status_message( origin = f"dynamic:node_labels={app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NODE_LABELS}" elif app_settings.AUTOSCALING_DASK: origin = f"computational:scheduler_url={app_settings.AUTOSCALING_DASK.DASK_MONITORING_URL}" - return RabbitAutoscalingStatusMessage.construct( + return RabbitAutoscalingStatusMessage.model_construct( origin=origin, nodes_total=len(cluster.active_nodes) + len(cluster.drained_nodes) + len(cluster.buffer_drained_nodes), nodes_active=len(cluster.active_nodes), nodes_drained=len(cluster.drained_nodes) + len(cluster.buffer_drained_nodes), - cluster_total_resources=cluster_total_resources.dict(), - cluster_used_resources=cluster_used_resources.dict(), + cluster_total_resources=cluster_total_resources.model_dump(), + cluster_used_resources=cluster_used_resources.model_dump(), instances_pending=len(cluster.pending_ec2s), instances_running=len(cluster.active_nodes) + len(cluster.drained_nodes) diff --git a/services/autoscaling/src/simcore_service_autoscaling/utils/redis.py b/services/autoscaling/src/simcore_service_autoscaling/utils/redis.py index 026449759527..b5ad337c872e 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/utils/redis.py +++ b/services/autoscaling/src/simcore_service_autoscaling/utils/redis.py @@ -22,10 +22,10 @@ def create_lock_key_and_value(app: FastAPI) -> tuple[str, str]: elif app_settings.AUTOSCALING_DASK: lock_key_parts += [ "computational", - app_settings.AUTOSCALING_DASK.DASK_MONITORING_URL, + f"{app_settings.AUTOSCALING_DASK.DASK_MONITORING_URL}", ] lock_value = json.dumps( - {"scheduler_url": app_settings.AUTOSCALING_DASK.DASK_MONITORING_URL} + {"scheduler_url": f"{app_settings.AUTOSCALING_DASK.DASK_MONITORING_URL}"} ) lock_key = ":".join(f"{k}" for k in lock_key_parts) return lock_key, lock_value diff --git a/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py b/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py index 6449952decd8..4758c91a12f7 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py +++ b/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py @@ -29,7 +29,7 @@ Task, TaskState, ) -from pydantic import ByteSize, ValidationError, parse_obj_as +from pydantic import ByteSize, TypeAdapter, ValidationError from servicelib.docker_utils import to_datetime from servicelib.logging_utils import log_context from servicelib.utils import logged_gather @@ -59,25 +59,27 @@ _PENDING_DOCKER_TASK_MESSAGE: Final[str] = "pending task scheduling" _INSUFFICIENT_RESOURCES_DOCKER_TASK_ERR: Final[str] = "insufficient resources on" _NOT_SATISFIED_SCHEDULING_CONSTRAINTS_TASK_ERR: Final[str] = "no suitable node" -_OSPARC_SERVICE_READY_LABEL_KEY: Final[DockerLabelKey] = parse_obj_as( - DockerLabelKey, "io.simcore.osparc-services-ready" -) -_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY: Final[DockerLabelKey] = parse_obj_as( - DockerLabelKey, f"{_OSPARC_SERVICE_READY_LABEL_KEY}-last-changed" +_OSPARC_SERVICE_READY_LABEL_KEY: Final[DockerLabelKey] = TypeAdapter( + DockerLabelKey +).validate_python( + "io.simcore.osparc-services-ready", ) +_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY: Final[DockerLabelKey] = TypeAdapter( + DockerLabelKey +).validate_python(f"{_OSPARC_SERVICE_READY_LABEL_KEY}-last-changed") _OSPARC_SERVICE_READY_LABEL_KEYS: Final[list[DockerLabelKey]] = [ _OSPARC_SERVICE_READY_LABEL_KEY, _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY, ] -_OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY: Final[DockerLabelKey] = parse_obj_as( - DockerLabelKey, "io.simcore.osparc-node-found-empty" -) +_OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY: Final[DockerLabelKey] = TypeAdapter( + DockerLabelKey +).validate_python("io.simcore.osparc-node-found-empty") -_OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY: Final[DockerLabelKey] = parse_obj_as( - DockerLabelKey, "io.simcore.osparc-node-termination-started" -) +_OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY: Final[DockerLabelKey] = TypeAdapter( + DockerLabelKey +).validate_python("io.simcore.osparc-node-termination-started") async def get_monitored_nodes( @@ -86,15 +88,13 @@ async def get_monitored_nodes( node_label_filters = [f"{label}=true" for label in node_labels] + [ f"{label}" for label in _OSPARC_SERVICE_READY_LABEL_KEYS ] - return parse_obj_as( - list[Node], - await docker_client.nodes.list(filters={"node.label": node_label_filters}), + return TypeAdapter(list[Node]).validate_python( + await docker_client.nodes.list(filters={"node.label": node_label_filters}) ) async def get_worker_nodes(docker_client: AutoscalingDocker) -> list[Node]: - return parse_obj_as( - list[Node], + return TypeAdapter(list[Node]).validate_python( await docker_client.nodes.list( filters={ "role": ["worker"], @@ -102,7 +102,7 @@ async def get_worker_nodes(docker_client: AutoscalingDocker) -> list[Node]: f"{label}" for label in _OSPARC_SERVICE_READY_LABEL_KEYS ], } - ), + ) ) @@ -112,8 +112,8 @@ async def remove_nodes( """removes docker nodes that are in the down state (unless force is used and they will be forcibly removed)""" def _check_if_node_is_removable(node: Node) -> bool: - if node.Status and node.Status.State: - return node.Status.State in [ + if node.status and node.status.state: + return node.status.state in [ NodeState.down, NodeState.disconnected, NodeState.unknown, @@ -129,30 +129,30 @@ def _check_if_node_is_removable(node: Node) -> bool: n for n in nodes if (force is True) or _check_if_node_is_removable(n) ] for node in nodes_that_need_removal: - assert node.ID # nosec - with log_context(logger, logging.INFO, msg=f"remove {node.ID=}"): - await docker_client.nodes.remove(node_id=node.ID, force=force) + assert node.id # nosec + with log_context(logger, logging.INFO, msg=f"remove {node.id=}"): + await docker_client.nodes.remove(node_id=node.id, force=force) return nodes_that_need_removal def _is_task_waiting_for_resources(task: Task) -> bool: # NOTE: https://docs.docker.com/engine/swarm/how-swarm-mode-works/swarm-task-states/ with log_context( - logger, level=logging.DEBUG, msg=f"_is_task_waiting_for_resources: {task.ID}" + logger, level=logging.DEBUG, msg=f"_is_task_waiting_for_resources: {task.id}" ): if ( - not task.Status - or not task.Status.State - or not task.Status.Message - or not task.Status.Err + not task.status + or not task.status.state + or not task.status.message + or not task.status.err ): return False return ( - task.Status.State == TaskState.pending - and task.Status.Message == _PENDING_DOCKER_TASK_MESSAGE + task.status.state == TaskState.pending + and task.status.message == _PENDING_DOCKER_TASK_MESSAGE and ( - _INSUFFICIENT_RESOURCES_DOCKER_TASK_ERR in task.Status.Err - or _NOT_SATISFIED_SCHEDULING_CONSTRAINTS_TASK_ERR in task.Status.Err + _INSUFFICIENT_RESOURCES_DOCKER_TASK_ERR in task.status.err + or _NOT_SATISFIED_SCHEDULING_CONSTRAINTS_TASK_ERR in task.status.err ) ) @@ -160,21 +160,21 @@ def _is_task_waiting_for_resources(task: Task) -> bool: async def _associated_service_has_no_node_placement_contraints( docker_client: AutoscalingDocker, task: Task ) -> bool: - assert task.ServiceID # nosec - service_inspect = parse_obj_as( - Service, await docker_client.services.inspect(task.ServiceID) + assert task.service_id # nosec + service_inspect = TypeAdapter(Service).validate_python( + await docker_client.services.inspect(task.service_id) ) - assert service_inspect.Spec # nosec - assert service_inspect.Spec.TaskTemplate # nosec + assert service_inspect.spec # nosec + assert service_inspect.spec.task_template # nosec if ( - not service_inspect.Spec.TaskTemplate.Placement - or not service_inspect.Spec.TaskTemplate.Placement.Constraints + not service_inspect.spec.task_template.placement + or not service_inspect.spec.task_template.placement.constraints ): return True # parse the placement contraints service_placement_constraints = ( - service_inspect.Spec.TaskTemplate.Placement.Constraints + service_inspect.spec.task_template.placement.constraints ) for constraint in service_placement_constraints: # is of type node.id==alskjladskjs or node.hostname==thiscomputerhostname or node.role==manager, sometimes with spaces... @@ -186,15 +186,13 @@ async def _associated_service_has_no_node_placement_contraints( def _by_created_dt(task: Task) -> datetime.datetime: - # NOTE: SAFE implementation to extract task.CreatedAt as datetime for comparison - if task.CreatedAt: + # NOTE: SAFE implementation to extract task.created_at as datetime for comparison + if task.created_at: with suppress(ValueError): - created_at = to_datetime(task.CreatedAt) - created_at_utc: datetime.datetime = created_at.replace( - tzinfo=datetime.timezone.utc - ) + created_at = to_datetime(task.created_at) + created_at_utc: datetime.datetime = created_at.replace(tzinfo=datetime.UTC) return created_at_utc - return datetime.datetime.now(datetime.timezone.utc) + return datetime.datetime.now(datetime.UTC) async def pending_service_tasks_with_insufficient_resources( @@ -209,20 +207,19 @@ async def pending_service_tasks_with_insufficient_resources( - have an error message with "insufficient resources" - are not scheduled on any node """ - tasks = parse_obj_as( - list[Task], + tasks = TypeAdapter(list[Task]).validate_python( await docker_client.tasks.list( filters={ "desired-state": "running", "label": service_labels, } - ), + ) ) sorted_tasks = sorted(tasks, key=_by_created_dt) logger.debug( "found following tasks that might trigger autoscaling: %s", - [task.ID for task in tasks], + [task.id for task in tasks], ) return [ @@ -238,13 +235,13 @@ async def pending_service_tasks_with_insufficient_resources( def get_node_total_resources(node: Node) -> Resources: - assert node.Description # nosec - assert node.Description.Resources # nosec - assert node.Description.Resources.NanoCPUs # nosec - assert node.Description.Resources.MemoryBytes # nosec + assert node.description # nosec + assert node.description.resources # nosec + assert node.description.resources.nano_cp_us # nosec + assert node.description.resources.memory_bytes # nosec return Resources( - cpus=node.Description.Resources.NanoCPUs / _NANO_CPU, - ram=ByteSize(node.Description.Resources.MemoryBytes), + cpus=node.description.resources.nano_cp_us / _NANO_CPU, + ram=ByteSize(node.description.resources.memory_bytes), ) @@ -254,47 +251,46 @@ async def compute_cluster_total_resources(nodes: list[Node]) -> Resources: """ cluster_resources_counter = collections.Counter({"ram": 0, "cpus": 0}) for node in nodes: - assert node.Description # nosec - assert node.Description.Resources # nosec - assert node.Description.Resources.NanoCPUs # nosec + assert node.description # nosec + assert node.description.resources # nosec + assert node.description.resources.nano_cp_us # nosec cluster_resources_counter.update( { - "ram": node.Description.Resources.MemoryBytes, - "cpus": node.Description.Resources.NanoCPUs / _NANO_CPU, + "ram": node.description.resources.memory_bytes, + "cpus": node.description.resources.nano_cp_us / _NANO_CPU, } ) - return Resources.parse_obj(dict(cluster_resources_counter)) + return Resources.model_validate(dict(cluster_resources_counter)) def get_max_resources_from_docker_task(task: Task) -> Resources: """returns the highest values for resources based on both docker reservations and limits""" - assert task.Spec # nosec - if task.Spec.Resources: + assert task.spec # nosec + if task.spec.resources: return Resources( cpus=max( ( - task.Spec.Resources.Reservations - and task.Spec.Resources.Reservations.NanoCPUs + task.spec.resources.reservations + and task.spec.resources.reservations.nano_cp_us or 0 ), ( - task.Spec.Resources.Limits - and task.Spec.Resources.Limits.NanoCPUs + task.spec.resources.limits + and task.spec.resources.limits.nano_cp_us or 0 ), ) / _NANO_CPU, - ram=parse_obj_as( - ByteSize, + ram=TypeAdapter(ByteSize).validate_python( max( - task.Spec.Resources.Reservations - and task.Spec.Resources.Reservations.MemoryBytes + task.spec.resources.reservations + and task.spec.resources.reservations.memory_bytes or 0, - task.Spec.Resources.Limits - and task.Spec.Resources.Limits.MemoryBytes + task.spec.resources.limits + and task.spec.resources.limits.memory_bytes or 0, - ), + ) ), ) return Resources(cpus=0, ram=ByteSize(0)) @@ -304,21 +300,21 @@ async def get_task_instance_restriction( docker_client: AutoscalingDocker, task: Task ) -> InstanceTypeType | None: with contextlib.suppress(ValidationError): - assert task.ServiceID # nosec - service_inspect = parse_obj_as( - Service, await docker_client.services.inspect(task.ServiceID) + assert task.service_id # nosec + service_inspect = TypeAdapter(Service).validate_python( + await docker_client.services.inspect(task.service_id) ) - assert service_inspect.Spec # nosec - assert service_inspect.Spec.TaskTemplate # nosec + assert service_inspect.spec # nosec + assert service_inspect.spec.task_template # nosec if ( - not service_inspect.Spec.TaskTemplate.Placement - or not service_inspect.Spec.TaskTemplate.Placement.Constraints + not service_inspect.spec.task_template.placement + or not service_inspect.spec.task_template.placement.constraints ): return None # parse the placement contraints service_placement_constraints = ( - service_inspect.Spec.TaskTemplate.Placement.Constraints + service_inspect.spec.task_template.placement.constraints ) # should be node.labels.{} node_label_to_find = ( @@ -326,8 +322,8 @@ async def get_task_instance_restriction( ) for constraint in service_placement_constraints: if constraint.startswith(node_label_to_find): - return parse_obj_as( - InstanceTypeType, constraint.removeprefix(node_label_to_find) # type: ignore[arg-type] + return TypeAdapter(InstanceTypeType).validate_python( + constraint.removeprefix(node_label_to_find) ) return None @@ -347,30 +343,29 @@ async def compute_node_used_resources( service_labels: list[DockerLabelKey] | None = None, ) -> Resources: cluster_resources_counter = collections.Counter({"ram": 0, "cpus": 0}) - assert node.ID # nosec - task_filters: dict[str, str | list[DockerLabelKey]] = {"node": node.ID} + assert node.id # nosec + task_filters: dict[str, str | list[DockerLabelKey]] = {"node": node.id} if service_labels is not None: task_filters |= {"label": service_labels} - all_tasks_on_node = parse_obj_as( - list[Task], - await docker_client.tasks.list(filters=task_filters), + all_tasks_on_node = TypeAdapter(list[Task]).validate_python( + await docker_client.tasks.list(filters=task_filters) ) for task in all_tasks_on_node: - assert task.Status # nosec + assert task.status # nosec if ( - task.Status.State in _TASK_STATUS_WITH_ASSIGNED_RESOURCES - and task.Spec - and task.Spec.Resources - and task.Spec.Resources.Reservations + task.status.state in _TASK_STATUS_WITH_ASSIGNED_RESOURCES + and task.spec + and task.spec.resources + and task.spec.resources.reservations ): - task_reservations = task.Spec.Resources.Reservations.dict(exclude_none=True) cluster_resources_counter.update( { - "ram": task_reservations.get("MemoryBytes", 0), - "cpus": task_reservations.get("NanoCPUs", 0) / _NANO_CPU, + "ram": task.spec.resources.reservations.memory_bytes or 0, + "cpus": (task.spec.resources.reservations.nano_cp_us or 0) + / _NANO_CPU, } ) - return Resources.parse_obj(dict(cluster_resources_counter)) + return Resources.model_validate(dict(cluster_resources_counter)) async def compute_cluster_used_resources( @@ -380,11 +375,11 @@ async def compute_cluster_used_resources( list_of_used_resources = await logged_gather( *(compute_node_used_resources(docker_client, node) for node in nodes) ) - counter = collections.Counter({k: 0 for k in Resources.__fields__}) + counter = collections.Counter({k: 0 for k in Resources.model_fields}) for result in list_of_used_resources: - counter.update(result.dict()) + counter.update(result.model_dump()) - return Resources.parse_obj(dict(counter)) + return Resources.model_validate(dict(counter)) _COMMAND_TIMEOUT_S = 10 @@ -446,10 +441,7 @@ def write_compose_file_command( }, } compose_yaml = yaml.safe_dump(compose) - write_compose_file_cmd = " ".join( - ["echo", f'"{compose_yaml}"', ">", f"{_PRE_PULL_COMPOSE_PATH}"] - ) - return write_compose_file_cmd + return " ".join(["echo", f'"{compose_yaml}"', ">", f"{_PRE_PULL_COMPOSE_PATH}"]) def get_docker_pull_images_on_start_bash_command( @@ -504,10 +496,10 @@ async def find_node_with_name( if not list_of_nodes: return None # note that there might be several nodes with a common_prefixed name. so now we want exact matching - parsed_list_of_nodes = parse_obj_as(list[Node], list_of_nodes) + parsed_list_of_nodes = TypeAdapter(list[Node]).validate_python(list_of_nodes) for node in parsed_list_of_nodes: - assert node.Description # nosec - if node.Description.Hostname == name: + assert node.description # nosec + if node.description.hostname == name: return node return None @@ -521,39 +513,41 @@ async def tag_node( available: bool, ) -> Node: with log_context( - logger, logging.DEBUG, msg=f"tagging {node.ID=} with {tags=} and {available=}" + logger, logging.DEBUG, msg=f"tagging {node.id=} with {tags=} and {available=}" ): - assert node.ID # nosec + assert node.id # nosec - latest_version_node = parse_obj_as( - Node, await docker_client.nodes.inspect(node_id=node.ID) + latest_version_node = TypeAdapter(Node).validate_python( + await docker_client.nodes.inspect(node_id=node.id) ) - assert latest_version_node.Version # nosec - assert latest_version_node.Version.Index # nosec - assert latest_version_node.Spec # nosec - assert latest_version_node.Spec.Role # nosec + assert latest_version_node.version # nosec + assert latest_version_node.version.index # nosec + assert latest_version_node.spec # nosec + assert latest_version_node.spec.role # nosec # updating now should work nicely await docker_client.nodes.update( - node_id=node.ID, - version=latest_version_node.Version.Index, + node_id=node.id, + version=latest_version_node.version.index, spec={ "Availability": "active" if available else "drain", "Labels": tags, - "Role": latest_version_node.Spec.Role.value, + "Role": latest_version_node.spec.role.value, }, ) - return parse_obj_as(Node, await docker_client.nodes.inspect(node_id=node.ID)) + return TypeAdapter(Node).validate_python( + await docker_client.nodes.inspect(node_id=node.id) + ) async def set_node_availability( docker_client: AutoscalingDocker, node: Node, *, available: bool ) -> Node: - assert node.Spec # nosec + assert node.spec # nosec return await tag_node( docker_client, node, - tags=cast(dict[DockerLabelKey, str], node.Spec.Labels), + tags=cast(dict[DockerLabelKey, str], node.spec.labels), available=available, ) @@ -576,21 +570,21 @@ def get_new_node_docker_tags( def is_node_ready_and_available(node: Node, *, availability: Availability) -> bool: - assert node.Status # nosec - assert node.Spec # nosec + assert node.status # nosec + assert node.spec # nosec return bool( - node.Status.State == NodeState.ready and node.Spec.Availability == availability + node.status.state == NodeState.ready and node.spec.availability == availability ) def is_node_osparc_ready(node: Node) -> bool: if not is_node_ready_and_available(node, availability=Availability.active): return False - assert node.Spec # nosec + assert node.spec # nosec return bool( - node.Spec.Labels - and _OSPARC_SERVICE_READY_LABEL_KEY in node.Spec.Labels - and node.Spec.Labels[_OSPARC_SERVICE_READY_LABEL_KEY] == "true" + node.spec.labels + and _OSPARC_SERVICE_READY_LABEL_KEY in node.spec.labels + and node.spec.labels[_OSPARC_SERVICE_READY_LABEL_KEY] == "true" ) @@ -601,8 +595,8 @@ async def set_node_osparc_ready( *, ready: bool, ) -> Node: - assert node.Spec # nosec - new_tags = deepcopy(cast(dict[DockerLabelKey, str], node.Spec.Labels)) + assert node.spec # nosec + new_tags = deepcopy(cast(dict[DockerLabelKey, str], node.spec.labels)) new_tags[_OSPARC_SERVICE_READY_LABEL_KEY] = "true" if ready else "false" new_tags[_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY] = arrow.utcnow().isoformat() # NOTE: docker drain sometimes impeed on performance when undraining see https://github.com/ITISFoundation/osparc-simcore/issues/5339 @@ -616,10 +610,10 @@ async def set_node_osparc_ready( def get_node_last_readyness_update(node: Node) -> datetime.datetime: - assert node.Spec # nosec - assert node.Spec.Labels # nosec + assert node.spec # nosec + assert node.spec.labels # nosec return arrow.get( - node.Spec.Labels[_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY] + node.spec.labels[_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY] ).datetime @@ -629,8 +623,8 @@ async def set_node_found_empty( *, empty: bool, ) -> Node: - assert node.Spec # nosec - new_tags = deepcopy(cast(dict[DockerLabelKey, str], node.Spec.Labels)) + assert node.spec # nosec + new_tags = deepcopy(cast(dict[DockerLabelKey, str], node.spec.labels)) if empty: new_tags[_OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY] = arrow.utcnow().isoformat() else: @@ -639,25 +633,25 @@ async def set_node_found_empty( docker_client, node, tags=new_tags, - available=bool(node.Spec.Availability is Availability.active), + available=bool(node.spec.availability is Availability.active), ) async def get_node_empty_since(node: Node) -> datetime.datetime | None: """returns the last time when the node was found empty or None if it was not empty""" - assert node.Spec # nosec - assert node.Spec.Labels # nosec - if _OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY not in node.Spec.Labels: + assert node.spec # nosec + assert node.spec.labels # nosec + if _OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY not in node.spec.labels: return None - return arrow.get(node.Spec.Labels[_OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY]).datetime + return arrow.get(node.spec.labels[_OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY]).datetime async def set_node_begin_termination_process( docker_client: AutoscalingDocker, node: Node ) -> Node: """sets the node to drain and adds a docker label with the time""" - assert node.Spec # nosec - new_tags = deepcopy(cast(dict[DockerLabelKey, str], node.Spec.Labels)) + assert node.spec # nosec + new_tags = deepcopy(cast(dict[DockerLabelKey, str], node.spec.labels)) new_tags[_OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY] = arrow.utcnow().isoformat() return await tag_node( @@ -669,12 +663,12 @@ async def set_node_begin_termination_process( def get_node_termination_started_since(node: Node) -> datetime.datetime | None: - assert node.Spec # nosec - assert node.Spec.Labels # nosec - if _OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY not in node.Spec.Labels: + assert node.spec # nosec + assert node.spec.labels # nosec + if _OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY not in node.spec.labels: return None return arrow.get( - node.Spec.Labels[_OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY] + node.spec.labels[_OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY] ).datetime @@ -685,8 +679,8 @@ async def attach_node( *, tags: dict[DockerLabelKey, str], ) -> Node: - assert node.Spec # nosec - current_tags = cast(dict[DockerLabelKey, str], node.Spec.Labels or {}) + assert node.spec # nosec + current_tags = cast(dict[DockerLabelKey, str], node.spec.labels or {}) new_tags = current_tags | tags | {_OSPARC_SERVICE_READY_LABEL_KEY: "false"} new_tags[_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY] = arrow.utcnow().isoformat() return await tag_node( diff --git a/services/autoscaling/tests/manual/.env-devel b/services/autoscaling/tests/manual/.env-devel index a7069054e6af..e654a4df5236 100644 --- a/services/autoscaling/tests/manual/.env-devel +++ b/services/autoscaling/tests/manual/.env-devel @@ -3,7 +3,7 @@ AUTOSCALING_DRAIN_NODES_WITH_LABELS=False AUTOSCALING_DOCKER_JOIN_DRAINED=True AUTOSCALING_WAIT_FOR_CLOUD_INIT_BEFORE_WARM_BUFFER_ACTIVATION=False AUTOSCALING_LOGLEVEL=INFO -AUTOSCALING_POLL_INTERVAL=10 +AUTOSCALING_POLL_INTERVAL="00:00:10" AUTOSCALING_EC2_ACCESS_KEY_ID=XXXXXXXXXX AUTOSCALING_EC2_SECRET_ACCESS_KEY=XXXXXXXXXX AUTOSCALING_EC2_ENDPOINT=null diff --git a/services/autoscaling/tests/unit/conftest.py b/services/autoscaling/tests/unit/conftest.py index b705ea85b783..5492abb86e3c 100644 --- a/services/autoscaling/tests/unit/conftest.py +++ b/services/autoscaling/tests/unit/conftest.py @@ -43,8 +43,9 @@ ObjectVersion, ResourceObject, Service, + TaskSpec, ) -from pydantic import ByteSize, PositiveInt, parse_obj_as +from pydantic import ByteSize, PositiveInt, TypeAdapter from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.host import get_localhost_ip from pytest_simcore.helpers.logging_tools import log_context @@ -118,7 +119,7 @@ def mocked_ec2_server_envs( # NOTE: overrides the EC2Settings with what autoscaling expects changed_envs: EnvVarsDict = { f"{AUTOSCALING_ENV_PREFIX}{k}": v - for k, v in mocked_ec2_server_settings.dict().items() + for k, v in mocked_ec2_server_settings.model_dump().items() } return setenvs_from_dict(monkeypatch, changed_envs) # type: ignore @@ -173,7 +174,8 @@ def app_with_docker_join_drained( @pytest.fixture(scope="session") def fake_ssm_settings() -> SSMSettings: - return SSMSettings(**SSMSettings.Config.schema_extra["examples"][0]) + assert "json_schema_extra" in SSMSettings.model_config + return SSMSettings(**SSMSettings.model_config["json_schema_extra"]["examples"][0]) @pytest.fixture @@ -213,7 +215,6 @@ def app_environment( external_envfile_dict: EnvVarsDict, ) -> EnvVarsDict: # SEE https://faker.readthedocs.io/en/master/providers/faker.providers.internet.html?highlight=internet#faker-providers-internet - if external_envfile_dict: delenvs_from_dict(monkeypatch, mock_env_devel_environment, raising=False) return setenvs_from_dict(monkeypatch, {**external_envfile_dict}) @@ -236,7 +237,9 @@ def app_environment( "EC2_INSTANCES_ALLOWED_TYPES": json.dumps( { ec2_type_name: random.choice( # noqa: S311 - EC2InstanceBootSpecific.Config.schema_extra["examples"] + EC2InstanceBootSpecific.model_config["json_schema_extra"][ + "examples" + ] ) for ec2_type_name in aws_allowed_ec2_instance_type_names } @@ -267,7 +270,9 @@ def mocked_ec2_instances_envs( "EC2_INSTANCES_ALLOWED_TYPES": json.dumps( { ec2_type_name: random.choice( # noqa: S311 - EC2InstanceBootSpecific.Config.schema_extra["examples"] + EC2InstanceBootSpecific.model_config["json_schema_extra"][ + "examples" + ] ) | {"ami_id": aws_ami_id} for ec2_type_name in aws_allowed_ec2_instance_type_names @@ -436,49 +441,51 @@ async def host_node( docker_swarm: None, async_docker_client: aiodocker.Docker, ) -> AsyncIterator[DockerNode]: - nodes = parse_obj_as(list[DockerNode], await async_docker_client.nodes.list()) + nodes = TypeAdapter(list[DockerNode]).validate_python( + await async_docker_client.nodes.list() + ) assert len(nodes) == 1 # keep state of node for later revert old_node = deepcopy(nodes[0]) - assert old_node.ID - assert old_node.Spec - assert old_node.Spec.Role - assert old_node.Spec.Availability - assert old_node.Version - assert old_node.Version.Index - labels = old_node.Spec.Labels or {} + assert old_node.id + assert old_node.spec + assert old_node.spec.role + assert old_node.spec.availability + assert old_node.version + assert old_node.version.index + labels = old_node.spec.labels or {} # ensure we have the necessary labels await async_docker_client.nodes.update( - node_id=old_node.ID, - version=old_node.Version.Index, + node_id=old_node.id, + version=old_node.version.index, spec={ - "Availability": old_node.Spec.Availability.value, + "Availability": old_node.spec.availability.value, "Labels": labels | { _OSPARC_SERVICE_READY_LABEL_KEY: "true", _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY: arrow.utcnow().isoformat(), }, - "Role": old_node.Spec.Role.value, + "Role": old_node.spec.role.value, }, ) - modified_host_node = parse_obj_as( - DockerNode, await async_docker_client.nodes.inspect(node_id=old_node.ID) + modified_host_node = TypeAdapter(DockerNode).validate_python( + await async_docker_client.nodes.inspect(node_id=old_node.id) ) yield modified_host_node # revert state - current_node = parse_obj_as( - DockerNode, await async_docker_client.nodes.inspect(node_id=old_node.ID) + current_node = TypeAdapter(DockerNode).validate_python( + await async_docker_client.nodes.inspect(node_id=old_node.id) ) - assert current_node.ID - assert current_node.Version - assert current_node.Version.Index + assert current_node.id + assert current_node.version + assert current_node.version.index await async_docker_client.nodes.update( - node_id=current_node.ID, - version=current_node.Version.Index, + node_id=current_node.id, + version=current_node.version.index, spec={ - "Availability": old_node.Spec.Availability.value, - "Labels": old_node.Spec.Labels, - "Role": old_node.Spec.Role.value, + "Availability": old_node.spec.availability.value, + "Labels": old_node.spec.labels, + "Role": old_node.spec.role.value, }, ) @@ -596,46 +603,54 @@ async def _creator( labels=base_labels, # type: ignore ) assert service - service = parse_obj_as( - Service, await async_docker_client.services.inspect(service["ID"]) + service = TypeAdapter(Service).validate_python( + await async_docker_client.services.inspect(service["ID"]) ) - assert service.Spec + assert service.spec ctx.logger.info( "%s", - f"service {service.ID} with {service.Spec.Name} created", + f"service {service.id} with {service.spec.name} created", ) - assert service.Spec.Labels == base_labels + assert service.spec.labels == base_labels created_services.append(service) # get more info on that service - assert service.Spec.Name == service_name + assert service.spec.name == service_name + + original_task_template_model = TypeAdapter(TaskSpec).validate_python( + task_template + ) + excluded_paths = { - "ForceUpdate", - "Runtime", - "root['ContainerSpec']['Isolation']", + "force_update", + "runtime", + "root['container_spec']['isolation']", } if not base_labels: - excluded_paths.add("root['ContainerSpec']['Labels']") - for reservation in ["MemoryBytes", "NanoCPUs"]: + excluded_paths.add("root['container_spec']['labels']") + for reservation in ["memory_bytes", "nano_cp_us"]: if ( - task_template.get("Resources", {}) - .get("Reservations", {}) - .get(reservation, 0) + original_task_template_model.resources + and original_task_template_model.resources.reservations + and getattr( + original_task_template_model.resources.reservations, reservation + ) == 0 ): # NOTE: if a 0 memory reservation is done, docker removes it from the task inspection excluded_paths.add( - f"root['Resources']['Reservations']['{reservation}']" + f"root['resources']['reservations']['{reservation}']" ) - assert service.Spec.TaskTemplate + + assert service.spec.task_template diff = DeepDiff( - task_template, - service.Spec.TaskTemplate.dict(exclude_unset=True), + original_task_template_model.model_dump(exclude_unset=True), + service.spec.task_template.model_dump(exclude_unset=True), exclude_paths=list(excluded_paths), ) assert not diff, f"{diff}" - assert service.Spec.Labels == base_labels + assert service.spec.labels == base_labels await _assert_wait_for_service_state( async_docker_client, service, [wait_for_service_state] ) @@ -644,7 +659,7 @@ async def _creator( yield _creator await asyncio.gather( - *(async_docker_client.services.delete(s.ID) for s in created_services), + *(async_docker_client.services.delete(s.id) for s in created_services), return_exceptions=True, ) @@ -656,15 +671,15 @@ async def _creator( stop=stop_after_delay(30), ) async def _check_service_task_gone(service: Service) -> None: - assert service.Spec + assert service.spec with log_context( logging.INFO, - msg=f"check service {service.ID}:{service.Spec.Name} is really gone", + msg=f"check service {service.id}:{service.spec.name} is really gone", ): assert not await async_docker_client.containers.list( all=True, filters={ - "label": [f"com.docker.swarm.service.id={service.ID}"], + "label": [f"com.docker.swarm.service.id={service.id}"], }, ) @@ -680,7 +695,7 @@ async def _assert_wait_for_service_state( async_docker_client: aiodocker.Docker, service: Service, expected_states: list[str] ) -> None: with log_context( - logging.INFO, msg=f"wait for service {service.ID} to become {expected_states}" + logging.INFO, msg=f"wait for service {service.id} to become {expected_states}" ) as ctx: number_of_success = {"count": 0} @@ -694,9 +709,9 @@ async def _assert_wait_for_service_state( ) async def _() -> None: services = await async_docker_client.services.list( - filters={"id": service.ID} + filters={"id": service.id} ) - assert services, f"no service with {service.ID}!" + assert services, f"no service with {service.id}!" assert len(services) == 1 found_service = services[0] @@ -762,7 +777,7 @@ def host_memory_total() -> ByteSize: def osparc_docker_label_keys( faker: Faker, ) -> StandardSimcoreDockerLabels: - return StandardSimcoreDockerLabels.parse_obj( + return StandardSimcoreDockerLabels.model_validate( { "user_id": faker.pyint(), "project_id": faker.uuid4(), @@ -838,11 +853,11 @@ async def _fake_set_node_availability( docker_client: AutoscalingDocker, node: DockerNode, *, available: bool ) -> DockerNode: returned_node = deepcopy(node) - assert returned_node.Spec - returned_node.Spec.Availability = ( + assert returned_node.spec + returned_node.spec.availability = ( Availability.active if available else Availability.drain ) - returned_node.UpdatedAt = datetime.datetime.now( + returned_node.updated_at = datetime.datetime.now( tz=datetime.timezone.utc ).isoformat() return returned_node @@ -864,9 +879,9 @@ async def fake_tag_node( available: bool, ) -> DockerNode: updated_node = deepcopy(node) - assert updated_node.Spec - updated_node.Spec.Labels = deepcopy(cast(dict[str, str], tags)) - updated_node.Spec.Availability = ( + assert updated_node.spec + updated_node.spec.labels = deepcopy(cast(dict[str, str], tags)) + updated_node.spec.availability = ( Availability.active if available else Availability.drain ) return updated_node diff --git a/services/autoscaling/tests/unit/test_api_health.py b/services/autoscaling/tests/unit/test_api_health.py index 353aabf31a41..e3c22afddac1 100644 --- a/services/autoscaling/tests/unit/test_api_health.py +++ b/services/autoscaling/tests/unit/test_api_health.py @@ -42,7 +42,7 @@ async def test_status_no_rabbit( response = await async_client.get("/status") response.raise_for_status() assert response.status_code == status.HTTP_200_OK - status_response = _StatusGet.parse_obj(response.json()) + status_response = _StatusGet.model_validate(response.json()) assert status_response assert status_response.rabbitmq.is_enabled is False @@ -66,7 +66,7 @@ async def test_status_no_ssm( response = await async_client.get("/status") response.raise_for_status() assert response.status_code == status.HTTP_200_OK - status_response = _StatusGet.parse_obj(response.json()) + status_response = _StatusGet.model_validate(response.json()) assert status_response assert status_response.rabbitmq.is_enabled is False @@ -94,7 +94,7 @@ async def test_status( response = await async_client.get("/status") response.raise_for_status() assert response.status_code == status.HTTP_200_OK - status_response = _StatusGet.parse_obj(response.json()) + status_response = _StatusGet.model_validate(response.json()) assert status_response assert status_response.rabbitmq.is_enabled is True @@ -114,7 +114,7 @@ async def test_status( response = await async_client.get("/status") response.raise_for_status() assert response.status_code == status.HTTP_200_OK - status_response = _StatusGet.parse_obj(response.json()) + status_response = _StatusGet.model_validate(response.json()) assert status_response assert status_response.rabbitmq.is_enabled is True diff --git a/services/autoscaling/tests/unit/test_core_settings.py b/services/autoscaling/tests/unit/test_core_settings.py index 9315c8fcfd1e..10050a565949 100644 --- a/services/autoscaling/tests/unit/test_core_settings.py +++ b/services/autoscaling/tests/unit/test_core_settings.py @@ -1,3 +1,4 @@ +# pylint: disable=no-member # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=unused-variable diff --git a/services/autoscaling/tests/unit/test_models.py b/services/autoscaling/tests/unit/test_models.py index f859ff591d65..f2271889ddb2 100644 --- a/services/autoscaling/tests/unit/test_models.py +++ b/services/autoscaling/tests/unit/test_models.py @@ -10,7 +10,7 @@ import pytest from models_library.docker import DockerLabelKey, StandardSimcoreDockerLabels from models_library.generated_models.docker_rest_api import Service, Task -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError async def test_get_simcore_service_docker_labels_from_task_with_missing_labels_raises( @@ -19,12 +19,11 @@ async def test_get_simcore_service_docker_labels_from_task_with_missing_labels_r task_template: dict[str, Any], ): service_missing_osparc_labels = await create_service(task_template, {}, "running") - assert service_missing_osparc_labels.Spec - service_tasks = parse_obj_as( - list[Task], + assert service_missing_osparc_labels.spec + service_tasks = TypeAdapter(list[Task]).validate_python( await async_docker_client.tasks.list( - filters={"service": service_missing_osparc_labels.Spec.Name} - ), + filters={"service": service_missing_osparc_labels.spec.name} + ) ) assert service_tasks assert len(service_tasks) == 1 @@ -45,12 +44,11 @@ async def test_get_simcore_service_docker_labels( osparc_docker_label_keys.to_simcore_runtime_docker_labels(), "running", ) - assert service_with_labels.Spec - service_tasks = parse_obj_as( - list[Task], + assert service_with_labels.spec + service_tasks = TypeAdapter(list[Task]).validate_python( await async_docker_client.tasks.list( - filters={"service": service_with_labels.Spec.Name} - ), + filters={"service": service_with_labels.spec.name} + ) ) assert service_tasks assert len(service_tasks) == 1 diff --git a/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py b/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py index 5811b43b2f06..f9e0e4c416df 100644 --- a/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py +++ b/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py @@ -31,7 +31,7 @@ from models_library.generated_models.docker_rest_api import Node as DockerNode from models_library.generated_models.docker_rest_api import NodeState, NodeStatus from models_library.rabbitmq_messages import RabbitAutoscalingStatusMessage -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.aws_ec2 import assert_autoscaled_computational_ec2_instances from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict @@ -109,12 +109,12 @@ def _assert_rabbit_autoscaling_message_sent( nodes_total=0, nodes_active=0, nodes_drained=0, - cluster_total_resources=Resources.create_as_empty().dict(), - cluster_used_resources=Resources.create_as_empty().dict(), + cluster_total_resources=Resources.create_as_empty().model_dump(), + cluster_used_resources=Resources.create_as_empty().model_dump(), instances_pending=0, instances_running=0, ) - expected_message = default_message.copy(update=message_update_kwargs) + expected_message = default_message.model_copy(update=message_update_kwargs) mock_rabbitmq_post_message.assert_called_once_with( app, expected_message, @@ -241,7 +241,9 @@ async def test_cluster_scaling_with_task_with_too_much_resources_starts_nothing( dask_spec_local_cluster: distributed.SpecCluster, ): # create a task that needs too much power - dask_future = create_dask_task({"RAM": int(parse_obj_as(ByteSize, "12800GiB"))}) + dask_future = create_dask_task( + {"RAM": int(TypeAdapter(ByteSize).validate_python("12800GiB"))} + ) assert dask_future await auto_scale_cluster( @@ -317,8 +319,7 @@ async def _create_task_with_resources( assert instance_types["InstanceTypes"] assert "MemoryInfo" in instance_types["InstanceTypes"][0] assert "SizeInMiB" in instance_types["InstanceTypes"][0]["MemoryInfo"] - dask_ram = parse_obj_as( - ByteSize, + dask_ram = TypeAdapter(ByteSize).validate_python( f"{instance_types['InstanceTypes'][0]['MemoryInfo']['SizeInMiB']}MiB", ) dask_task_resources = create_dask_task_resources( @@ -335,7 +336,7 @@ async def _create_task_with_resources( [ pytest.param( None, - parse_obj_as(ByteSize, "128Gib"), + TypeAdapter(ByteSize).validate_python("128Gib"), "r5n.4xlarge", id="No explicit instance defined", ), @@ -347,7 +348,7 @@ async def _create_task_with_resources( ), pytest.param( "r5n.8xlarge", - parse_obj_as(ByteSize, "116Gib"), + TypeAdapter(ByteSize).validate_python("116Gib"), "r5n.8xlarge", id="Explicitely ask for r5n.8xlarge and set the resources", ), @@ -458,22 +459,22 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY: expected_ec2_type } assert mock_docker_tag_node.call_count == 2 - assert fake_node.Spec - assert fake_node.Spec.Labels + assert fake_node.spec + assert fake_node.spec.labels fake_attached_node = deepcopy(fake_node) - assert fake_attached_node.Spec - fake_attached_node.Spec.Availability = ( + assert fake_attached_node.spec + fake_attached_node.spec.availability = ( Availability.active if with_drain_nodes_labelled else Availability.drain ) - assert fake_attached_node.Spec.Labels - fake_attached_node.Spec.Labels |= expected_docker_node_tags | { + assert fake_attached_node.spec.labels + fake_attached_node.spec.labels |= expected_docker_node_tags | { _OSPARC_SERVICE_READY_LABEL_KEY: "false", } # check attach call assert mock_docker_tag_node.call_args_list[0] == mock.call( get_docker_client(initialized_app), fake_node, - tags=fake_node.Spec.Labels + tags=fake_node.spec.labels | expected_docker_node_tags | { _OSPARC_SERVICE_READY_LABEL_KEY: "false", @@ -482,7 +483,7 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 available=with_drain_nodes_labelled, ) # update our fake node - fake_attached_node.Spec.Labels[ + fake_attached_node.spec.labels[ _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY ] = mock_docker_tag_node.call_args_list[0][1]["tags"][ _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY @@ -502,7 +503,7 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 assert mock_docker_tag_node.call_args_list[1] == mock.call( get_docker_client(initialized_app), fake_attached_node, - tags=fake_node.Spec.Labels + tags=fake_node.spec.labels | expected_docker_node_tags | { _OSPARC_SERVICE_READY_LABEL_KEY: "true", @@ -511,7 +512,7 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 available=True, ) # update our fake node - fake_attached_node.Spec.Labels[ + fake_attached_node.spec.labels[ _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY ] = mock_docker_tag_node.call_args_list[1][1]["tags"][ _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY @@ -522,13 +523,13 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 mock_rabbitmq_post_message.reset_mock() # now we have 1 monitored node that needs to be mocked - fake_attached_node.Spec.Labels[_OSPARC_SERVICE_READY_LABEL_KEY] = "true" - fake_attached_node.Status = NodeStatus( + fake_attached_node.spec.labels[_OSPARC_SERVICE_READY_LABEL_KEY] = "true" + fake_attached_node.status = NodeStatus( State=NodeState.ready, Message=None, Addr=None ) - fake_attached_node.Spec.Availability = Availability.active - assert fake_attached_node.Description - fake_attached_node.Description.Hostname = internal_dns_name + fake_attached_node.spec.availability = Availability.active + assert fake_attached_node.description + fake_attached_node.description.hostname = internal_dns_name auto_scaling_mode = ComputationalAutoscaling() mocker.patch.object( @@ -591,7 +592,7 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 mock_docker_tag_node.assert_called_once_with( get_docker_client(initialized_app), fake_attached_node, - tags=fake_attached_node.Spec.Labels + tags=fake_attached_node.spec.labels | { _OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY: mock.ANY, }, @@ -601,7 +602,7 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 # now update the fake node to have the required label as expected assert app_settings.AUTOSCALING_EC2_INSTANCES - fake_attached_node.Spec.Labels[_OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY] = ( + fake_attached_node.spec.labels[_OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY] = ( arrow.utcnow() .shift( seconds=-app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_DRAINING.total_seconds() @@ -625,7 +626,7 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 mock_docker_tag_node.assert_called_once_with( get_docker_client(initialized_app), fake_attached_node, - tags=fake_attached_node.Spec.Labels + tags=fake_attached_node.spec.labels | { _OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY: mock.ANY, _OSPARC_SERVICE_READY_LABEL_KEY: "false", @@ -639,7 +640,7 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY ] ) > arrow.get( - fake_attached_node.Spec.Labels[_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY] + fake_attached_node.spec.labels[_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY] ) mock_docker_tag_node.reset_mock() @@ -653,9 +654,9 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 ) # we artifically set the node to drain - fake_attached_node.Spec.Availability = Availability.drain - fake_attached_node.Spec.Labels[_OSPARC_SERVICE_READY_LABEL_KEY] = "false" - fake_attached_node.Spec.Labels[ + fake_attached_node.spec.availability = Availability.drain + fake_attached_node.spec.labels[_OSPARC_SERVICE_READY_LABEL_KEY] = "false" + fake_attached_node.spec.labels[ _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY ] = datetime.datetime.now(tz=datetime.timezone.utc).isoformat() @@ -682,7 +683,7 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 ) # now changing the last update timepoint will trigger the node removal and shutdown the ec2 instance - fake_attached_node.Spec.Labels[_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY] = ( + fake_attached_node.spec.labels[_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY] = ( datetime.datetime.now(tz=datetime.timezone.utc) - app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION - datetime.timedelta(seconds=1) @@ -701,7 +702,7 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 mock_docker_tag_node.assert_called_once_with( get_docker_client(initialized_app), fake_attached_node, - tags=fake_attached_node.Spec.Labels + tags=fake_attached_node.spec.labels | { _OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY: mock.ANY, }, @@ -709,8 +710,8 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 ) mock_docker_tag_node.reset_mock() # set the fake node to drain - fake_attached_node.Spec.Availability = Availability.drain - fake_attached_node.Spec.Labels[_OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY] = ( + fake_attached_node.spec.availability = Availability.drain + fake_attached_node.spec.labels[_OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY] = ( arrow.utcnow() .shift( seconds=-app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_FINAL_TERMINATION.total_seconds() @@ -751,7 +752,7 @@ async def test_cluster_does_not_scale_up_if_defined_instance_is_not_allowed( # create a task that needs more power dask_task_resources = create_dask_task_resources( - faker.pystr(), parse_obj_as(ByteSize, "128GiB") + faker.pystr(), TypeAdapter(ByteSize).validate_python("128GiB") ) dask_future = create_dask_task(dask_task_resources) assert dask_future @@ -787,7 +788,7 @@ async def test_cluster_does_not_scale_up_if_defined_instance_is_not_fitting_reso # create a task that needs more power dask_task_resources = create_dask_task_resources( - "t2.xlarge", parse_obj_as(ByteSize, "128GiB") + "t2.xlarge", TypeAdapter(ByteSize).validate_python("128GiB") ) dask_future = create_dask_task(dask_task_resources) assert dask_future @@ -817,7 +818,8 @@ class _ScaleUpParams: def _dask_task_resources_from_resources(resources: Resources) -> DaskTaskResources: return { - res_key.upper(): res_value for res_key, res_value in resources.dict().items() + res_key.upper(): res_value + for res_key, res_value in resources.model_dump().items() } @@ -847,7 +849,9 @@ async def _change_parameters(*args, **kwargs) -> list[EC2InstanceData]: [ pytest.param( _ScaleUpParams( - task_resources=Resources(cpus=5, ram=parse_obj_as(ByteSize, "36Gib")), + task_resources=Resources( + cpus=5, ram=TypeAdapter(ByteSize).validate_python("36Gib") + ), num_tasks=10, expected_instance_type="g3.4xlarge", expected_num_instances=4, @@ -1106,7 +1110,7 @@ async def test_cluster_scaling_up_more_than_allowed_with_multiple_types_max_star [ pytest.param( None, - parse_obj_as(ByteSize, "128Gib"), + TypeAdapter(ByteSize).validate_python("128Gib"), "r5n.4xlarge", id="No explicit instance defined", ), diff --git a/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py b/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py index 3a79a11c853c..461baee21fa8 100644 --- a/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py +++ b/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py @@ -35,7 +35,7 @@ Task, ) from models_library.rabbitmq_messages import RabbitAutoscalingStatusMessage -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from pytest_mock import MockType from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.aws_ec2 import assert_autoscaled_dynamic_ec2_instances @@ -130,51 +130,51 @@ def with_valid_time_before_termination( ) -> datetime.timedelta: time = "00:11:00" monkeypatch.setenv("EC2_INSTANCES_TIME_BEFORE_TERMINATION", time) - return parse_obj_as(datetime.timedelta, time) + return TypeAdapter(datetime.timedelta).validate_python(time) @pytest.fixture async def drained_host_node( host_node: Node, async_docker_client: aiodocker.Docker ) -> AsyncIterator[Node]: - assert host_node.ID - assert host_node.Version - assert host_node.Version.Index - assert host_node.Spec - assert host_node.Spec.Availability - assert host_node.Spec.Role - - old_availability = host_node.Spec.Availability + assert host_node.id + assert host_node.version + assert host_node.version.index + assert host_node.spec + assert host_node.spec.availability + assert host_node.spec.role + + old_availability = host_node.spec.availability await async_docker_client.nodes.update( - node_id=host_node.ID, - version=host_node.Version.Index, + node_id=host_node.id, + version=host_node.version.index, spec={ "Availability": "drain", - "Labels": host_node.Spec.Labels, - "Role": host_node.Spec.Role.value, + "Labels": host_node.spec.labels, + "Role": host_node.spec.role.value, }, ) - drained_node = parse_obj_as( - Node, await async_docker_client.nodes.inspect(node_id=host_node.ID) + drained_node = TypeAdapter(Node).validate_python( + await async_docker_client.nodes.inspect(node_id=host_node.id) ) yield drained_node # revert # NOTE: getting the node again as the version might have changed - drained_node = parse_obj_as( - Node, await async_docker_client.nodes.inspect(node_id=host_node.ID) - ) - assert drained_node.ID - assert drained_node.Version - assert drained_node.Version.Index - assert drained_node.Spec - assert drained_node.Spec.Role + drained_node = TypeAdapter(Node).validate_python( + await async_docker_client.nodes.inspect(node_id=host_node.id) + ) + assert drained_node.id + assert drained_node.version + assert drained_node.version.index + assert drained_node.spec + assert drained_node.spec.role await async_docker_client.nodes.update( - node_id=drained_node.ID, - version=drained_node.Version.Index, + node_id=drained_node.id, + version=drained_node.version.index, spec={ "Availability": old_availability.value, - "Labels": drained_node.Spec.Labels, - "Role": drained_node.Spec.Role.value, + "Labels": drained_node.spec.labels, + "Role": drained_node.spec.role.value, }, ) @@ -208,12 +208,12 @@ def _assert_rabbit_autoscaling_message_sent( nodes_total=0, nodes_active=0, nodes_drained=0, - cluster_total_resources=Resources.create_as_empty().dict(), - cluster_used_resources=Resources.create_as_empty().dict(), + cluster_total_resources=Resources.create_as_empty().model_dump(), + cluster_used_resources=Resources.create_as_empty().model_dump(), instances_pending=0, instances_running=0, ) - expected_message = default_message.copy(update=message_update_kwargs) + expected_message = default_message.model_copy(update=message_update_kwargs) assert mock_rabbitmq_post_message.call_args == mock.call(app, expected_message) @@ -322,10 +322,10 @@ async def test_cluster_scaling_with_no_services_and_machine_buffer_starts_expect expected_additional_tag_keys=list(ec2_instance_custom_tags), instance_filters=instance_type_filters, ) - assert fake_node.Description - assert fake_node.Description.Resources - assert fake_node.Description.Resources.NanoCPUs - assert fake_node.Description.Resources.MemoryBytes + assert fake_node.description + assert fake_node.description.resources + assert fake_node.description.resources.nano_cp_us + assert fake_node.description.resources.memory_bytes _assert_rabbit_autoscaling_message_sent( mock_rabbitmq_post_message, app_settings, @@ -335,9 +335,9 @@ async def test_cluster_scaling_with_no_services_and_machine_buffer_starts_expect instances_running=mock_machines_buffer, cluster_total_resources={ "cpus": mock_machines_buffer - * fake_node.Description.Resources.NanoCPUs + * fake_node.description.resources.nano_cp_us / 1e9, - "ram": mock_machines_buffer * fake_node.Description.Resources.MemoryBytes, + "ram": mock_machines_buffer * fake_node.description.resources.memory_bytes, }, ) @@ -533,11 +533,11 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: ) fake_attached_node = deepcopy(fake_node) - assert fake_attached_node.Spec - fake_attached_node.Spec.Availability = ( + assert fake_attached_node.spec + fake_attached_node.spec.availability = ( Availability.active if with_drain_nodes_labelled else Availability.drain ) - assert fake_attached_node.Spec.Labels + assert fake_attached_node.spec.labels assert app_settings.AUTOSCALING_NODES_MONITORING expected_docker_node_tags = { tag_key: "true" @@ -548,7 +548,7 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: } | { DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY: scale_up_params.expected_instance_type } - fake_attached_node.Spec.Labels |= expected_docker_node_tags | { + fake_attached_node.spec.labels |= expected_docker_node_tags | { _OSPARC_SERVICE_READY_LABEL_KEY: "false" } @@ -557,13 +557,13 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: mock_find_node_with_name_returns_fake_node.reset_mock() assert mock_docker_tag_node.call_count == 2 - assert fake_node.Spec - assert fake_node.Spec.Labels + assert fake_node.spec + assert fake_node.spec.labels # check attach call assert mock_docker_tag_node.call_args_list[0] == mock.call( get_docker_client(initialized_app), fake_node, - tags=fake_node.Spec.Labels + tags=fake_node.spec.labels | expected_docker_node_tags | { _OSPARC_SERVICE_READY_LABEL_KEY: "false", @@ -572,7 +572,7 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: available=with_drain_nodes_labelled, ) # update our fake node - fake_attached_node.Spec.Labels[ + fake_attached_node.spec.labels[ _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY ] = mock_docker_tag_node.call_args_list[0][1]["tags"][ _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY @@ -596,7 +596,7 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: assert mock_docker_tag_node.call_args_list[1] == mock.call( get_docker_client(initialized_app), fake_attached_node, - tags=fake_node.Spec.Labels + tags=fake_node.spec.labels | expected_docker_node_tags | { _OSPARC_SERVICE_READY_LABEL_KEY: "true", @@ -605,7 +605,7 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: available=True, ) # update our fake node - fake_attached_node.Spec.Labels[ + fake_attached_node.spec.labels[ _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY ] = mock_docker_tag_node.call_args_list[1][1]["tags"][ _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY @@ -629,9 +629,9 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: internal_dns_name = instances[0]["PrivateDnsName"].removesuffix(".ec2.internal") # check rabbit messages were sent, we do have worker - assert fake_attached_node.Description - assert fake_attached_node.Description.Resources - assert fake_attached_node.Description.Resources.NanoCPUs + assert fake_attached_node.description + assert fake_attached_node.description.resources + assert fake_attached_node.description.resources.nano_cp_us _assert_rabbit_autoscaling_message_sent( mock_rabbitmq_post_message, app_settings, @@ -639,8 +639,8 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: nodes_total=scale_up_params.expected_num_instances, nodes_active=scale_up_params.expected_num_instances, cluster_total_resources={ - "cpus": fake_attached_node.Description.Resources.NanoCPUs / 1e9, - "ram": fake_attached_node.Description.Resources.MemoryBytes, + "cpus": fake_attached_node.description.resources.nano_cp_us / 1e9, + "ram": fake_attached_node.description.resources.memory_bytes, }, cluster_used_resources={ "cpus": float(0), @@ -651,12 +651,12 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: mock_rabbitmq_post_message.reset_mock() # now we have 1 monitored node that needs to be mocked - fake_attached_node.Spec.Labels[_OSPARC_SERVICE_READY_LABEL_KEY] = "true" - fake_attached_node.Status = NodeStatus( + fake_attached_node.spec.labels[_OSPARC_SERVICE_READY_LABEL_KEY] = "true" + fake_attached_node.status = NodeStatus( State=NodeState.ready, Message=None, Addr=None ) - fake_attached_node.Spec.Availability = Availability.active - fake_attached_node.Description.Hostname = internal_dns_name + fake_attached_node.spec.availability = Availability.active + fake_attached_node.description.hostname = internal_dns_name auto_scaling_mode = DynamicAutoscaling() mocker.patch.object( @@ -700,9 +700,9 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: # await asyncio.gather( *( - async_docker_client.services.delete(d.ID) + async_docker_client.services.delete(d.id) for d in created_docker_services - if d.ID + if d.id ) ) @@ -723,7 +723,7 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: mock_docker_tag_node.assert_called_once_with( get_docker_client(initialized_app), fake_attached_node, - tags=fake_attached_node.Spec.Labels + tags=fake_attached_node.spec.labels | { _OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY: mock.ANY, }, @@ -733,7 +733,7 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: # now update the fake node to have the required label as expected assert app_settings.AUTOSCALING_EC2_INSTANCES - fake_attached_node.Spec.Labels[_OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY] = ( + fake_attached_node.spec.labels[_OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY] = ( arrow.utcnow() .shift( seconds=-app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_DRAINING.total_seconds() @@ -748,7 +748,7 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: mock_docker_tag_node.assert_called_once_with( get_docker_client(initialized_app), fake_attached_node, - tags=fake_attached_node.Spec.Labels + tags=fake_attached_node.spec.labels | { _OSPARC_SERVICE_READY_LABEL_KEY: "false", _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY: mock.ANY, @@ -761,7 +761,7 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY ] ) > arrow.get( - fake_attached_node.Spec.Labels[_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY] + fake_attached_node.spec.labels[_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY] ) mock_docker_tag_node.reset_mock() @@ -771,7 +771,7 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: mock_docker_tag_node.assert_called_once_with( get_docker_client(initialized_app), fake_attached_node, - tags=fake_attached_node.Spec.Labels + tags=fake_attached_node.spec.labels | { _OSPARC_SERVICE_READY_LABEL_KEY: "false", _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY: mock.ANY, @@ -793,9 +793,9 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: # we artifically set the node to drain if not with_drain_nodes_labelled: - fake_attached_node.Spec.Availability = Availability.drain - fake_attached_node.Spec.Labels[_OSPARC_SERVICE_READY_LABEL_KEY] = "false" - fake_attached_node.Spec.Labels[ + fake_attached_node.spec.availability = Availability.drain + fake_attached_node.spec.labels[_OSPARC_SERVICE_READY_LABEL_KEY] = "false" + fake_attached_node.spec.labels[ _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY ] = datetime.datetime.now(tz=datetime.UTC).isoformat() @@ -824,7 +824,7 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: assert created_instances == instances # now changing the last update timepoint will trigger the node removal process - fake_attached_node.Spec.Labels[_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY] = ( + fake_attached_node.spec.labels[_OSPARC_SERVICES_READY_DATETIME_LABEL_KEY] = ( datetime.datetime.now(tz=datetime.UTC) - app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION - datetime.timedelta(seconds=1) @@ -845,7 +845,7 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: mock_docker_tag_node.assert_called_once_with( get_docker_client(initialized_app), fake_attached_node, - tags=fake_attached_node.Spec.Labels + tags=fake_attached_node.spec.labels | { _OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY: mock.ANY, }, @@ -853,8 +853,8 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: ) mock_docker_tag_node.reset_mock() # set the fake node to drain - fake_attached_node.Spec.Availability = Availability.drain - fake_attached_node.Spec.Labels[_OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY] = ( + fake_attached_node.spec.availability = Availability.drain + fake_attached_node.spec.labels[_OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY] = ( arrow.utcnow() .shift( seconds=-app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_FINAL_TERMINATION.total_seconds() @@ -907,7 +907,7 @@ async def _assert_wait_for_ec2_instances_terminated() -> None: _ScaleUpParams( imposed_instance_type=None, service_resources=Resources( - cpus=4, ram=parse_obj_as(ByteSize, "128Gib") + cpus=4, ram=TypeAdapter(ByteSize).validate_python("128Gib") ), num_services=1, expected_instance_type="r5n.4xlarge", @@ -918,7 +918,9 @@ async def _assert_wait_for_ec2_instances_terminated() -> None: pytest.param( _ScaleUpParams( imposed_instance_type="t2.xlarge", - service_resources=Resources(cpus=4, ram=parse_obj_as(ByteSize, "4Gib")), + service_resources=Resources( + cpus=4, ram=TypeAdapter(ByteSize).validate_python("4Gib") + ), num_services=1, expected_instance_type="t2.xlarge", expected_num_instances=1, @@ -929,7 +931,7 @@ async def _assert_wait_for_ec2_instances_terminated() -> None: _ScaleUpParams( imposed_instance_type="r5n.8xlarge", service_resources=Resources( - cpus=4, ram=parse_obj_as(ByteSize, "128Gib") + cpus=4, ram=TypeAdapter(ByteSize).validate_python("128Gib") ), num_services=1, expected_instance_type="r5n.8xlarge", @@ -998,7 +1000,7 @@ async def test_cluster_scaling_up_and_down( _ScaleUpParams( imposed_instance_type=None, service_resources=Resources( - cpus=4, ram=parse_obj_as(ByteSize, "62Gib") + cpus=4, ram=TypeAdapter(ByteSize).validate_python("62Gib") ), num_services=1, expected_instance_type="r6a.2xlarge", @@ -1084,7 +1086,7 @@ async def test_cluster_scaling_up_and_down_against_aws( _ScaleUpParams( imposed_instance_type=None, service_resources=Resources( - cpus=5, ram=parse_obj_as(ByteSize, "36Gib") + cpus=5, ram=TypeAdapter(ByteSize).validate_python("36Gib") ), num_services=10, expected_instance_type="g3.4xlarge", # 1 GPU, 16 CPUs, 122GiB @@ -1096,7 +1098,7 @@ async def test_cluster_scaling_up_and_down_against_aws( _ScaleUpParams( imposed_instance_type="g4dn.8xlarge", service_resources=Resources( - cpus=5, ram=parse_obj_as(ByteSize, "20480MB") + cpus=5, ram=TypeAdapter(ByteSize).validate_python("20480MB") ), num_services=7, expected_instance_type="g4dn.8xlarge", # 1 GPU, 32 CPUs, 128GiB @@ -1190,7 +1192,7 @@ async def test_cluster_scaling_up_starts_multiple_instances( [ pytest.param( None, - parse_obj_as(ByteSize, "128Gib"), + TypeAdapter(ByteSize).validate_python("128Gib"), "r5n.4xlarge", id="No explicit instance defined", ), @@ -1451,12 +1453,11 @@ async def test__activate_drained_nodes_with_no_drained_nodes( service_with_no_reservations = await create_service( task_template_that_runs, {}, "running" ) - assert service_with_no_reservations.Spec - service_tasks = parse_obj_as( - list[Task], + assert service_with_no_reservations.spec + service_tasks = TypeAdapter(list[Task]).validate_python( await autoscaling_docker.tasks.list( - filters={"service": service_with_no_reservations.Spec.Name} - ), + filters={"service": service_with_no_reservations.spec.name} + ) ) assert service_tasks assert len(service_tasks) == 1 @@ -1495,12 +1496,11 @@ async def test__activate_drained_nodes_with_drained_node( service_with_no_reservations = await create_service( task_template_that_runs, {}, "pending" ) - assert service_with_no_reservations.Spec - service_tasks = parse_obj_as( - list[Task], + assert service_with_no_reservations.spec + service_tasks = TypeAdapter(list[Task]).validate_python( await autoscaling_docker.tasks.list( - filters={"service": service_with_no_reservations.Spec.Name} - ), + filters={"service": service_with_no_reservations.spec.name} + ) ) assert service_tasks assert len(service_tasks) == 1 @@ -1518,7 +1518,7 @@ async def test__activate_drained_nodes_with_drained_node( initialized_app, cluster_with_drained_nodes, DynamicAutoscaling() ) assert updated_cluster.active_nodes == cluster_with_drained_nodes.drained_nodes - assert drained_host_node.Spec + assert drained_host_node.spec mock_docker_tag_node.assert_called_once_with( mock.ANY, drained_host_node, diff --git a/services/autoscaling/tests/unit/test_modules_auto_scaling_task.py b/services/autoscaling/tests/unit/test_modules_auto_scaling_task.py index 4c4037e51ab8..4a3d3e85baef 100644 --- a/services/autoscaling/tests/unit/test_modules_auto_scaling_task.py +++ b/services/autoscaling/tests/unit/test_modules_auto_scaling_task.py @@ -5,15 +5,18 @@ import asyncio +import datetime +from typing import Final from unittest import mock import pytest from fastapi import FastAPI +from pydantic import TypeAdapter from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from simcore_service_autoscaling.core.settings import ApplicationSettings -_FAST_POLL_INTERVAL = 1 +_FAST_POLL_INTERVAL: Final[int] = 1 @pytest.fixture @@ -26,7 +29,10 @@ def app_environment( monkeypatch: pytest.MonkeyPatch, ) -> EnvVarsDict: # fast interval - monkeypatch.setenv("AUTOSCALING_POLL_INTERVAL", f"{_FAST_POLL_INTERVAL}") + monkeypatch.setenv( + "AUTOSCALING_POLL_INTERVAL", + f"{TypeAdapter(datetime.timedelta).validate_python(_FAST_POLL_INTERVAL)}", + ) app_environment["AUTOSCALING_POLL_INTERVAL"] = f"{_FAST_POLL_INTERVAL}" return app_environment diff --git a/services/autoscaling/tests/unit/test_modules_buffer_machine_core.py b/services/autoscaling/tests/unit/test_modules_buffer_machine_core.py index 26ac271db298..8135ac9d2c40 100644 --- a/services/autoscaling/tests/unit/test_modules_buffer_machine_core.py +++ b/services/autoscaling/tests/unit/test_modules_buffer_machine_core.py @@ -22,7 +22,7 @@ from fastapi.encoders import jsonable_encoder from models_library.docker import DockerGenericTag from models_library.utils.json_serialization import json_dumps -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.aws_ec2 import ( assert_autoscaled_dynamic_warm_pools_ec2_instances, @@ -47,14 +47,13 @@ @pytest.fixture def fake_pre_pull_images() -> list[DockerGenericTag]: - return parse_obj_as( - list[DockerGenericTag], + return TypeAdapter(list[DockerGenericTag]).validate_python( [ "nginx:latest", "itisfoundation/my-very-nice-service:latest", "simcore/services/dynamic/another-nice-one:2.4.5", "asd", - ], + ] ) @@ -90,7 +89,7 @@ def ec2_instances_allowed_types_with_only_1_buffered( len(allowed_ec2_types_with_buffer_defined) == 1 ), "more than one type with buffer is disallowed in this test!" return { - parse_obj_as(InstanceTypeType, k): v + TypeAdapter(InstanceTypeType).validate_python(k): v for k, v in allowed_ec2_types_with_buffer_defined.items() } @@ -450,7 +449,11 @@ class _BufferMachineParams: _BufferMachineParams( "stopped", [], - [parse_obj_as(AWSTagKey, "io.simcore.autoscaling.pre_pulled_images")], + [ + TypeAdapter(AWSTagKey).validate_python( + "io.simcore.autoscaling.pre_pulled_images" + ) + ], ), ], ) @@ -589,7 +592,11 @@ def unneeded_instance_type( _BufferMachineParams( "stopped", [], - [parse_obj_as(AWSTagKey, "io.simcore.autoscaling.pre_pulled_images")], + [ + TypeAdapter(AWSTagKey).validate_python( + "io.simcore.autoscaling.pre_pulled_images" + ) + ], ), ], ) diff --git a/services/autoscaling/tests/unit/test_modules_dask.py b/services/autoscaling/tests/unit/test_modules_dask.py index 76dab6883e05..ae2ed0c5f15b 100644 --- a/services/autoscaling/tests/unit/test_modules_dask.py +++ b/services/autoscaling/tests/unit/test_modules_dask.py @@ -17,7 +17,7 @@ NoAuthentication, TLSAuthentication, ) -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from pytest_simcore.helpers.host import get_localhost_ip from simcore_service_autoscaling.core.errors import ( DaskNoWorkersError, @@ -42,7 +42,9 @@ _authentication_types = [ NoAuthentication(), - TLSAuthentication.construct(**TLSAuthentication.Config.schema_extra["examples"][0]), + TLSAuthentication.model_construct( + **TLSAuthentication.model_config["json_schema_extra"]["examples"][0] + ), ] @@ -54,7 +56,9 @@ async def test__scheduler_client_with_wrong_url( ): with pytest.raises(DaskSchedulerNotFoundError): async with _scheduler_client( - parse_obj_as(AnyUrl, f"tcp://{faker.ipv4()}:{faker.port_number()}"), + TypeAdapter(AnyUrl).validate_python( + f"tcp://{faker.ipv4()}:{faker.port_number()}" + ), authentication, ): ... @@ -62,7 +66,9 @@ async def test__scheduler_client_with_wrong_url( @pytest.fixture def scheduler_url(dask_spec_local_cluster: distributed.SpecCluster) -> AnyUrl: - return parse_obj_as(AnyUrl, dask_spec_local_cluster.scheduler_address) + return TypeAdapter(AnyUrl).validate_python( + dask_spec_local_cluster.scheduler_address + ) @pytest.fixture @@ -95,8 +101,8 @@ async def test__scheduler_client( async def test_list_unrunnable_tasks_with_no_workers( dask_local_cluster_without_workers: distributed.SpecCluster, ): - scheduler_url = parse_obj_as( - AnyUrl, dask_local_cluster_without_workers.scheduler_address + scheduler_url = TypeAdapter(AnyUrl).validate_python( + dask_local_cluster_without_workers.scheduler_address ) assert await list_unrunnable_tasks(scheduler_url, NoAuthentication()) == [] @@ -199,8 +205,8 @@ async def test_get_worker_still_has_results_in_memory_with_no_workers_raises( dask_local_cluster_without_workers: distributed.SpecCluster, fake_localhost_ec2_instance_data: EC2InstanceData, ): - scheduler_url = parse_obj_as( - AnyUrl, dask_local_cluster_without_workers.scheduler_address + scheduler_url = TypeAdapter(AnyUrl).validate_python( + dask_local_cluster_without_workers.scheduler_address ) with pytest.raises(DaskNoWorkersError): await get_worker_still_has_results_in_memory( @@ -300,8 +306,8 @@ async def test_worker_used_resources_with_no_workers_raises( dask_local_cluster_without_workers: distributed.SpecCluster, fake_localhost_ec2_instance_data: EC2InstanceData, ): - scheduler_url = parse_obj_as( - AnyUrl, dask_local_cluster_without_workers.scheduler_address + scheduler_url = TypeAdapter(AnyUrl).validate_python( + dask_local_cluster_without_workers.scheduler_address ) with pytest.raises(DaskNoWorkersError): await get_worker_used_resources( diff --git a/services/autoscaling/tests/unit/test_modules_rabbitmq.py b/services/autoscaling/tests/unit/test_modules_rabbitmq.py index 2cc76d1465f3..9aab8d68e348 100644 --- a/services/autoscaling/tests/unit/test_modules_rabbitmq.py +++ b/services/autoscaling/tests/unit/test_modules_rabbitmq.py @@ -131,7 +131,7 @@ async def test_post_message( f"--> checking for message in rabbit exchange {rabbit_message.channel_name}, {attempt.retry_state.retry_object.statistics}" ) mocked_message_handler.assert_called_once_with( - rabbit_message.json().encode() + rabbit_message.model_dump_json().encode() ) print("... message received") diff --git a/services/autoscaling/tests/unit/test_utils_auto_scaling_core.py b/services/autoscaling/tests/unit/test_utils_auto_scaling_core.py index dd0983f39862..f576292ec6b5 100644 --- a/services/autoscaling/tests/unit/test_utils_auto_scaling_core.py +++ b/services/autoscaling/tests/unit/test_utils_auto_scaling_core.py @@ -16,7 +16,7 @@ from faker import Faker from models_library.docker import DockerGenericTag from models_library.generated_models.docker_rest_api import Node as DockerNode -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_autoscaling.core.errors import Ec2InvalidDnsNameError @@ -129,10 +129,10 @@ async def test_associate_ec2_instances_with_corresponding_nodes( assert len(associated_instances) == len(ec2_instances) assert len(associated_instances) == len(nodes) for associated_instance in associated_instances: - assert associated_instance.node.Description - assert associated_instance.node.Description.Hostname + assert associated_instance.node.description + assert associated_instance.node.description.hostname assert ( - associated_instance.node.Description.Hostname + associated_instance.node.description.hostname in associated_instance.ec2_instance.aws_private_dns ) @@ -211,14 +211,13 @@ def ec2_instances_boot_ami_scripts( def ec2_instances_boot_ami_pre_pull( app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch, faker: Faker ) -> EnvVarsDict: - images = parse_obj_as( - list[DockerGenericTag], + images = TypeAdapter(list[DockerGenericTag]).validate_python( [ "nginx:latest", "itisfoundation/my-very-nice-service:latest", "simcore/services/dynamic/another-nice-one:2.4.5", "asd", - ], + ] ) envs = setenvs_from_dict( monkeypatch, @@ -364,9 +363,9 @@ def test_sort_drained_nodes( for _ in range(_NUM_NODES_TERMINATING): fake_node = create_fake_node() - assert fake_node.Spec - assert fake_node.Spec.Labels - fake_node.Spec.Labels[ + assert fake_node.spec + assert fake_node.spec.labels + fake_node.spec.labels[ _OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY ] = arrow.utcnow().datetime.isoformat() fake_associated_instance = create_associated_instance( @@ -404,6 +403,6 @@ def test_sort_drained_nodes( ) assert len(terminating_nodes) == _NUM_NODES_TERMINATING for n in terminating_nodes: - assert n.node.Spec - assert n.node.Spec.Labels - assert _OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY in n.node.Spec.Labels + assert n.node.spec + assert n.node.spec.labels + assert _OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY in n.node.spec.labels diff --git a/services/autoscaling/tests/unit/test_utils_buffer_machines_pool_core.py b/services/autoscaling/tests/unit/test_utils_buffer_machines_pool_core.py index 84ca4baa4742..19cc33c2575d 100644 --- a/services/autoscaling/tests/unit/test_utils_buffer_machines_pool_core.py +++ b/services/autoscaling/tests/unit/test_utils_buffer_machines_pool_core.py @@ -6,7 +6,7 @@ from faker import Faker from fastapi import FastAPI from models_library.docker import DockerGenericTag -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from simcore_service_autoscaling.constants import ( ACTIVATED_BUFFER_MACHINE_EC2_TAGS, @@ -65,8 +65,8 @@ def test_get_deactivated_buffer_ec2_tags_dynamic( | DEACTIVATED_BUFFER_MACHINE_EC2_TAGS ) assert "Name" in expected_tags - expected_tags[AWSTagKey("Name")] = parse_obj_as( - AWSTagValue, str(expected_tags[AWSTagKey("Name")]) + "-buffer" + expected_tags[AWSTagKey("Name")] = TypeAdapter(AWSTagValue).validate_python( + str(expected_tags[AWSTagKey("Name")]) + "-buffer" ) assert expected_tags == deactivated_buffer_tags @@ -107,8 +107,8 @@ def test_get_deactivated_buffer_ec2_tags_computational( | DEACTIVATED_BUFFER_MACHINE_EC2_TAGS ) assert "Name" in expected_tags - expected_tags[AWSTagKey("Name")] = parse_obj_as( - AWSTagValue, str(expected_tags[AWSTagKey("Name")]) + "-buffer" + expected_tags[AWSTagKey("Name")] = TypeAdapter(AWSTagValue).validate_python( + str(expected_tags[AWSTagKey("Name")]) + "-buffer" ) assert expected_tags == deactivated_buffer_tags @@ -144,10 +144,10 @@ def test_is_buffer_machine(tags: EC2Tags, expected_is_buffer: bool): "registry.pytest.com/simcore/services/dynamic/sym-server-8-0-0-dy:3.2.34", ], { - f"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_(0)": '["itisfoundation/dynamic-sidecar:latest","itisfoundation/agent:latest","registry.pytest.com/simcore/services/dynamic/ti-postpro:2.0.34","registry.pytest.com/simcore/services/dynamic/ti-simu:1.0.12","registry.pytest.com/simcore/services/dynamic/ti-pers:1.0.', - f"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_(1)": '19","registry.pytest.com/simcore/services/dynamic/sim4life-postpro:2.0.106","registry.pytest.com/simcore/services/dynamic/s4l-core-postpro:2.0.106","registry.pytest.com/simcore/services/dynamic/s4l-core-stream:2.0.106","registry.pytest.com/simcore/services', - f"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_(2)": '/dynamic/sym-server-8-0-0-dy:2.0.106","registry.pytest.com/simcore/services/dynamic/sim4life-8-0-0-modeling:3.2.34","registry.pytest.com/simcore/services/dynamic/s4l-core-8-0-0-modeling:3.2.34","registry.pytest.com/simcore/services/dynamic/s4l-stream-8-0-0', - f"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_(3)": '-dy:3.2.34","registry.pytest.com/simcore/services/dynamic/sym-server-8-0-0-dy:3.2.34"]', + f"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_0": '["itisfoundation/dynamic-sidecar:latest","itisfoundation/agent:latest","registry.pytest.com/simcore/services/dynamic/ti-postpro:2.0.34","registry.pytest.com/simcore/services/dynamic/ti-simu:1.0.12","registry.pytest.com/simcore/services/dynamic/ti-pers:1.0.', + f"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_1": '19","registry.pytest.com/simcore/services/dynamic/sim4life-postpro:2.0.106","registry.pytest.com/simcore/services/dynamic/s4l-core-postpro:2.0.106","registry.pytest.com/simcore/services/dynamic/s4l-core-stream:2.0.106","registry.pytest.com/simcore/services', + f"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_2": '/dynamic/sym-server-8-0-0-dy:2.0.106","registry.pytest.com/simcore/services/dynamic/sim4life-8-0-0-modeling:3.2.34","registry.pytest.com/simcore/services/dynamic/s4l-core-8-0-0-modeling:3.2.34","registry.pytest.com/simcore/services/dynamic/s4l-stream-8-0-0', + f"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_3": '-dy:3.2.34","registry.pytest.com/simcore/services/dynamic/sym-server-8-0-0-dy:3.2.34"]', }, id="many images that get chunked to AWS Tag max length", ), diff --git a/services/autoscaling/tests/unit/test_utils_computational_scaling.py b/services/autoscaling/tests/unit/test_utils_computational_scaling.py index 97cf493b6b96..b5744f170535 100644 --- a/services/autoscaling/tests/unit/test_utils_computational_scaling.py +++ b/services/autoscaling/tests/unit/test_utils_computational_scaling.py @@ -6,7 +6,7 @@ import pytest from aws_library.ec2 import Resources -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from simcore_service_autoscaling.models import DaskTask, DaskTaskResources from simcore_service_autoscaling.utils.computational_scaling import ( _DEFAULT_MAX_CPU, @@ -21,13 +21,16 @@ pytest.param( DaskTask(task_id="fake", required_resources=DaskTaskResources()), Resources( - cpus=_DEFAULT_MAX_CPU, ram=parse_obj_as(ByteSize, _DEFAULT_MAX_RAM) + cpus=_DEFAULT_MAX_CPU, + ram=TypeAdapter(ByteSize).validate_python(_DEFAULT_MAX_RAM), ), id="missing resources returns defaults", ), pytest.param( DaskTask(task_id="fake", required_resources={"CPU": 2.5}), - Resources(cpus=2.5, ram=parse_obj_as(ByteSize, _DEFAULT_MAX_RAM)), + Resources( + cpus=2.5, ram=TypeAdapter(ByteSize).validate_python(_DEFAULT_MAX_RAM) + ), id="only cpus defined", ), pytest.param( @@ -35,7 +38,7 @@ task_id="fake", required_resources={"CPU": 2.5, "RAM": 2 * 1024 * 1024 * 1024}, ), - Resources(cpus=2.5, ram=parse_obj_as(ByteSize, "2GiB")), + Resources(cpus=2.5, ram=TypeAdapter(ByteSize).validate_python("2GiB")), id="cpu and ram defined", ), pytest.param( @@ -43,7 +46,9 @@ task_id="fake", required_resources={"CPU": 2.5, "ram": 2 * 1024 * 1024 * 1024}, ), - Resources(cpus=2.5, ram=parse_obj_as(ByteSize, _DEFAULT_MAX_RAM)), + Resources( + cpus=2.5, ram=TypeAdapter(ByteSize).validate_python(_DEFAULT_MAX_RAM) + ), id="invalid naming", ), ], diff --git a/services/autoscaling/tests/unit/test_utils_docker.py b/services/autoscaling/tests/unit/test_utils_docker.py index 8e5b8cd90a82..3f9677112bb5 100644 --- a/services/autoscaling/tests/unit/test_utils_docker.py +++ b/services/autoscaling/tests/unit/test_utils_docker.py @@ -1,3 +1,4 @@ +# pylint: disable=no-member # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=unused-variable @@ -30,7 +31,7 @@ Service, Task, ) -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from servicelib.docker_utils import to_datetime @@ -81,23 +82,23 @@ async def create_node_labels( host_node: Node, async_docker_client: aiodocker.Docker, ) -> AsyncIterator[Callable[[list[str]], Awaitable[None]]]: - assert host_node.Spec - old_labels = deepcopy(host_node.Spec.Labels) + assert host_node.spec + old_labels = deepcopy(host_node.spec.labels) async def _creator(labels: list[str]) -> None: - assert host_node.ID - assert host_node.Version - assert host_node.Version.Index - assert host_node.Spec - assert host_node.Spec.Role - assert host_node.Spec.Availability + assert host_node.id + assert host_node.version + assert host_node.version.index + assert host_node.spec + assert host_node.spec.role + assert host_node.spec.availability await async_docker_client.nodes.update( - node_id=host_node.ID, - version=host_node.Version.Index, + node_id=host_node.id, + version=host_node.version.index, spec={ "Name": "foo", - "Availability": host_node.Spec.Availability.value, - "Role": host_node.Spec.Role.value, + "Availability": host_node.spec.availability.value, + "Role": host_node.spec.role.value, "Labels": {f"{label}": "true" for label in labels}, }, ) @@ -158,12 +159,12 @@ async def test_get_monitored_nodes_with_valid_label( # this is the host node with some keys slightly changed EXCLUDED_KEYS = { - "Index": True, - "UpdatedAt": True, - "Version": True, - "Spec": {"Labels", "Name"}, + "index": True, + "updated_at": True, + "version": True, + "spec": {"labels", "name"}, } - assert host_node.dict(exclude=EXCLUDED_KEYS) == monitored_nodes[0].dict( + assert host_node.model_dump(exclude=EXCLUDED_KEYS) == monitored_nodes[0].model_dump( exclude=EXCLUDED_KEYS ) @@ -191,10 +192,10 @@ async def test_remove_monitored_down_nodes_of_non_down_node_does_nothing( @pytest.fixture def fake_docker_node(host_node: Node, faker: Faker) -> Node: - fake_node = host_node.copy(deep=True) - fake_node.ID = faker.uuid4() + fake_node = host_node.model_copy(deep=True) + fake_node.id = faker.uuid4(cast_to=str) assert ( - host_node.ID != fake_node.ID + host_node.id != fake_node.id ), "this should never happen, or you are really unlucky" return fake_node @@ -205,15 +206,15 @@ async def test_remove_monitored_down_nodes_of_down_node( mocker: MockerFixture, ): mocked_aiodocker = mocker.patch.object(autoscaling_docker, "nodes", autospec=True) - assert fake_docker_node.Status - fake_docker_node.Status.State = NodeState.down - assert fake_docker_node.Status.State == NodeState.down + assert fake_docker_node.status + fake_docker_node.status.state = NodeState.down + assert fake_docker_node.status.state == NodeState.down assert await remove_nodes(autoscaling_docker, nodes=[fake_docker_node]) == [ fake_docker_node ] # NOTE: this is the same as calling with aiodocker.Docker() as docker: docker.nodes.remove() mocked_aiodocker.remove.assert_called_once_with( - node_id=fake_docker_node.ID, force=False + node_id=fake_docker_node.id, force=False ) @@ -221,9 +222,9 @@ async def test_remove_monitored_down_node_with_unexpected_state_does_nothing( autoscaling_docker: AutoscalingDocker, fake_docker_node: Node, ): - assert fake_docker_node.Status - fake_docker_node.Status = None - assert not fake_docker_node.Status + assert fake_docker_node.status + fake_docker_node.status = None + assert not fake_docker_node.status assert await remove_nodes(autoscaling_docker, nodes=[fake_docker_node]) == [] @@ -276,7 +277,7 @@ async def test_pending_service_task_with_placement_constrain_is_skipped( service_with_too_many_resources = await create_service( task_template_with_too_many_resource, {}, "pending" ) - assert service_with_too_many_resources.Spec + assert service_with_too_many_resources.spec pending_tasks = await pending_service_tasks_with_insufficient_resources( autoscaling_docker, service_labels=[] @@ -312,13 +313,12 @@ async def test_pending_service_task_with_insufficient_resources_with_service_lac service_with_too_many_resources = await create_service( task_template_with_too_many_resource, {}, "pending" ) - assert service_with_too_many_resources.Spec + assert service_with_too_many_resources.spec - service_tasks = parse_obj_as( - list[Task], + service_tasks = TypeAdapter(list[Task]).validate_python( await autoscaling_docker.tasks.list( - filters={"service": service_with_too_many_resources.Spec.Name} - ), + filters={"service": service_with_too_many_resources.spec.name} + ) ) assert service_tasks assert len(service_tasks) == 1 @@ -382,16 +382,15 @@ async def test_pending_service_task_with_insufficient_resources_with_labelled_se service_with_labels = await create_service( task_template_with_too_many_resource, service_labels, "pending" ) - assert service_with_labels.Spec + assert service_with_labels.spec pending_tasks = await pending_service_tasks_with_insufficient_resources( autoscaling_docker, service_labels=list(service_labels) ) - service_tasks = parse_obj_as( - list[Task], + service_tasks = TypeAdapter(list[Task]).validate_python( await autoscaling_docker.tasks.list( - filters={"service": service_with_labels.Spec.Name} - ), + filters={"service": service_with_labels.spec.name} + ) ) assert service_tasks assert len(service_tasks) == 1 @@ -438,21 +437,16 @@ async def test_pending_service_task_with_insufficient_resources_properly_sorts_t assert len(pending_tasks) == len(services) # check sorting is done by creation date - last_date = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta( - days=1 - ) + last_date = datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=1) for task in pending_tasks: - assert task.CreatedAt # NOTE: in this case they are but they might be None - assert ( - to_datetime(task.CreatedAt).replace(tzinfo=datetime.timezone.utc) - > last_date - ) - last_date = to_datetime(task.CreatedAt).replace(tzinfo=datetime.timezone.utc) + assert task.created_at # NOTE: in this case they are but they might be None + assert to_datetime(task.created_at).replace(tzinfo=datetime.UTC) > last_date + last_date = to_datetime(task.created_at).replace(tzinfo=datetime.UTC) def test_safe_sort_key_callback(): tasks_with_faulty_timestamp = [ - Task(ID=n, CreatedAt=value) # type: ignore + Task(ID=f"{n}", CreatedAt=value) for n, value in enumerate( [ # SEE test_to_datetime_conversion_known_errors @@ -460,7 +454,7 @@ def test_safe_sort_key_callback(): "2023-03-15 09:20:58.123456", "2023-03-15T09:20:58.123456", "2023-03-15T09:20:58.123456Z", - f"{datetime.datetime.now(datetime.timezone.utc)}", + f"{datetime.datetime.now(datetime.UTC)}", "corrupted string", ] ) @@ -468,16 +462,16 @@ def test_safe_sort_key_callback(): sorted_tasks = sorted(tasks_with_faulty_timestamp, key=_by_created_dt) assert len(sorted_tasks) == len(tasks_with_faulty_timestamp) - assert {t.ID for t in sorted_tasks} == {t.ID for t in tasks_with_faulty_timestamp} + assert {t.id for t in sorted_tasks} == {t.id for t in tasks_with_faulty_timestamp} def test_get_node_total_resources(host_node: Node): resources = get_node_total_resources(host_node) - assert host_node.Description - assert host_node.Description.Resources - assert host_node.Description.Resources.NanoCPUs - assert resources.cpus == (host_node.Description.Resources.NanoCPUs / 10**9) - assert resources.ram == host_node.Description.Resources.MemoryBytes + assert host_node.description + assert host_node.description.resources + assert host_node.description.resources.nano_cp_us + assert resources.cpus == (host_node.description.resources.nano_cp_us / 10**9) + assert resources.ram == host_node.description.resources.memory_bytes async def test_compute_cluster_total_resources_with_no_nodes_returns_0( @@ -502,12 +496,11 @@ async def test_get_resources_from_docker_task_with_no_reservation_returns_0( task_template: dict[str, Any], ): service_with_no_resources = await create_service(task_template, {}, "running") - assert service_with_no_resources.Spec - service_tasks = parse_obj_as( - list[Task], + assert service_with_no_resources.spec + service_tasks = TypeAdapter(list[Task]).validate_python( await autoscaling_docker.tasks.list( - filters={"service": service_with_no_resources.Spec.Name} - ), + filters={"service": service_with_no_resources.spec.name} + ) ) assert service_tasks assert len(service_tasks) == 1 @@ -531,10 +524,9 @@ async def test_get_resources_from_docker_task_with_reservations( NUM_CPUS, 0 ) service = await create_service(task_template_with_reservations, {}, "running") - assert service.Spec - service_tasks = parse_obj_as( - list[Task], - await async_docker_client.tasks.list(filters={"service": service.Spec.Name}), + assert service.spec + service_tasks = TypeAdapter(list[Task]).validate_python( + await async_docker_client.tasks.list(filters={"service": service.spec.name}) ) assert service_tasks assert len(service_tasks) == 1 @@ -559,19 +551,18 @@ async def test_get_resources_from_docker_task_with_reservations_and_limits_retur NUM_CPUS, 0 ) task_template_with_reservations["Resources"] |= create_task_limits( - host_cpu_count, parse_obj_as(ByteSize, "100Mib") + host_cpu_count, TypeAdapter(ByteSize).validate_python("100Mib") )["Resources"] service = await create_service(task_template_with_reservations, {}, "running") - assert service.Spec - service_tasks = parse_obj_as( - list[Task], - await async_docker_client.tasks.list(filters={"service": service.Spec.Name}), + assert service.spec + service_tasks = TypeAdapter(list[Task]).validate_python( + await async_docker_client.tasks.list(filters={"service": service.spec.name}) ) assert service_tasks assert len(service_tasks) == 1 assert get_max_resources_from_docker_task(service_tasks[0]) == Resources( - cpus=host_cpu_count, ram=parse_obj_as(ByteSize, "100Mib") + cpus=host_cpu_count, ram=TypeAdapter(ByteSize).validate_python("100Mib") ) @@ -619,10 +610,9 @@ async def test_get_task_instance_restriction( "pending" if placement_constraints else "running", placement_constraints, ) - assert service.Spec - service_tasks = parse_obj_as( - list[Task], - await autoscaling_docker.tasks.list(filters={"service": service.Spec.Name}), + assert service.spec + service_tasks = TypeAdapter(list[Task]).validate_python( + await autoscaling_docker.tasks.list(filters={"service": service.spec.name}) ) instance_type_or_none = await get_task_instance_restriction( autoscaling_docker, service_tasks[0] @@ -642,12 +632,11 @@ async def test_compute_tasks_needed_resources( faker: Faker, ): service_with_no_resources = await create_service(task_template, {}, "running") - assert service_with_no_resources.Spec - service_tasks = parse_obj_as( - list[Task], + assert service_with_no_resources.spec + service_tasks = TypeAdapter(list[Task]).validate_python( await autoscaling_docker.tasks.list( - filters={"service": service_with_no_resources.Spec.Name} - ), + filters={"service": service_with_no_resources.spec.name} + ) ) assert compute_tasks_needed_resources(service_tasks) == Resources.create_as_empty() @@ -662,10 +651,9 @@ async def test_compute_tasks_needed_resources( ) all_tasks = service_tasks for s in services: - assert s.Spec - service_tasks = parse_obj_as( - list[Task], - await autoscaling_docker.tasks.list(filters={"service": s.Spec.Name}), + assert s.spec + service_tasks = TypeAdapter(list[Task]).validate_python( + await autoscaling_docker.tasks.list(filters={"service": s.spec.name}) ) assert compute_tasks_needed_resources(service_tasks) == Resources( cpus=1, ram=ByteSize(0) @@ -872,7 +860,7 @@ async def test_get_docker_swarm_join_script_returning_unexpected_command_raises( def test_get_docker_login_on_start_bash_command(): registry_settings = RegistrySettings( - **RegistrySettings.Config.schema_extra["examples"][0] + **RegistrySettings.model_config["json_schema_extra"]["examples"][0] ) returned_command = get_docker_login_on_start_bash_command(registry_settings) assert ( @@ -884,11 +872,11 @@ def test_get_docker_login_on_start_bash_command(): async def test_try_get_node_with_name( autoscaling_docker: AutoscalingDocker, host_node: Node ): - assert host_node.Description - assert host_node.Description.Hostname + assert host_node.description + assert host_node.description.hostname received_node = await find_node_with_name( - autoscaling_docker, host_node.Description.Hostname + autoscaling_docker, host_node.description.hostname ) assert received_node == host_node @@ -896,11 +884,11 @@ async def test_try_get_node_with_name( async def test_try_get_node_with_name_fake( autoscaling_docker: AutoscalingDocker, fake_node: Node ): - assert fake_node.Description - assert fake_node.Description.Hostname + assert fake_node.description + assert fake_node.description.hostname received_node = await find_node_with_name( - autoscaling_docker, fake_node.Description.Hostname + autoscaling_docker, fake_node.description.hostname ) assert received_node is None @@ -921,8 +909,8 @@ async def test_find_node_with_name_with_common_prefixed_nodes( needed_host_name = f"{common_prefix}11" found_node = await find_node_with_name(autoscaling_docker, needed_host_name) assert found_node - assert found_node.Description - assert found_node.Description.Hostname == needed_host_name + assert found_node.description + assert found_node.description.hostname == needed_host_name async def test_find_node_with_smaller_name_with_common_prefixed_nodes_returns_none( @@ -946,53 +934,53 @@ async def test_find_node_with_smaller_name_with_common_prefixed_nodes_returns_no async def test_tag_node( autoscaling_docker: AutoscalingDocker, host_node: Node, faker: Faker ): - assert host_node.Description - assert host_node.Description.Hostname + assert host_node.description + assert host_node.description.hostname tags = faker.pydict(allowed_types=(str,)) await tag_node(autoscaling_docker, host_node, tags=tags, available=False) updated_node = await find_node_with_name( - autoscaling_docker, host_node.Description.Hostname + autoscaling_docker, host_node.description.hostname ) assert updated_node - assert updated_node.Spec - assert updated_node.Spec.Availability == Availability.drain - assert updated_node.Spec.Labels == tags + assert updated_node.spec + assert updated_node.spec.availability == Availability.drain + assert updated_node.spec.labels == tags await tag_node(autoscaling_docker, updated_node, tags={}, available=True) updated_node = await find_node_with_name( - autoscaling_docker, host_node.Description.Hostname + autoscaling_docker, host_node.description.hostname ) assert updated_node - assert updated_node.Spec - assert updated_node.Spec.Availability == Availability.active - assert updated_node.Spec.Labels == {} + assert updated_node.spec + assert updated_node.spec.availability == Availability.active + assert updated_node.spec.labels == {} async def test_tag_node_out_of_sequence_error( autoscaling_docker: AutoscalingDocker, host_node: Node, faker: Faker ): - assert host_node.Description - assert host_node.Description.Hostname + assert host_node.description + assert host_node.description.hostname tags = faker.pydict(allowed_types=(str,)) # this works updated_node = await tag_node( autoscaling_docker, host_node, tags=tags, available=False ) assert updated_node - assert host_node.Version - assert host_node.Version.Index - assert updated_node.Version - assert updated_node.Version.Index - assert host_node.Version.Index < updated_node.Version.Index + assert host_node.version + assert host_node.version.index + assert updated_node.version + assert updated_node.version.index + assert host_node.version.index < updated_node.version.index # running the same call with the old node should not raise an out of sequence error updated_node2 = await tag_node( autoscaling_docker, host_node, tags=tags, available=True ) assert updated_node2 - assert updated_node2.Version - assert updated_node2.Version.Index - assert updated_node2.Version.Index > updated_node.Version.Index + assert updated_node2.version + assert updated_node2.version.index + assert updated_node2.version.index > updated_node.version.index async def test_set_node_availability( @@ -1132,25 +1120,25 @@ def test_is_node_ready_and_available(create_fake_node: Callable[..., Node]): def test_is_node_osparc_ready(create_fake_node: Callable[..., Node], faker: Faker): fake_node = create_fake_node() - assert fake_node.Spec - assert fake_node.Spec.Availability is Availability.drain + assert fake_node.spec + assert fake_node.spec.availability is Availability.drain # no labels, not ready and drained assert not is_node_osparc_ready(fake_node) # no labels, not ready, but active - fake_node.Spec.Availability = Availability.active + fake_node.spec.availability = Availability.active assert not is_node_osparc_ready(fake_node) # no labels, ready and active - fake_node.Status = NodeStatus(State=NodeState.ready, Message=None, Addr=None) + fake_node.status = NodeStatus(State=NodeState.ready, Message=None, Addr=None) assert not is_node_osparc_ready(fake_node) # add some random labels - assert fake_node.Spec - fake_node.Spec.Labels = faker.pydict(allowed_types=(str,)) + assert fake_node.spec + fake_node.spec.labels = faker.pydict(allowed_types=(str,)) assert not is_node_osparc_ready(fake_node) # add the expected label - fake_node.Spec.Labels[_OSPARC_SERVICE_READY_LABEL_KEY] = "false" + fake_node.spec.labels[_OSPARC_SERVICE_READY_LABEL_KEY] = "false" assert not is_node_osparc_ready(fake_node) # make it ready - fake_node.Spec.Labels[_OSPARC_SERVICE_READY_LABEL_KEY] = "true" + fake_node.spec.labels[_OSPARC_SERVICE_READY_LABEL_KEY] = "true" assert is_node_osparc_ready(fake_node) @@ -1209,9 +1197,9 @@ async def test_set_node_found_empty( ): # initial state assert is_node_ready_and_available(host_node, availability=Availability.active) - assert host_node.Spec - assert host_node.Spec.Labels - assert _OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY not in host_node.Spec.Labels + assert host_node.spec + assert host_node.spec.labels + assert _OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY not in host_node.spec.labels # the date does not exist as nothing was done node_empty_since = await get_node_empty_since(host_node) @@ -1219,9 +1207,9 @@ async def test_set_node_found_empty( # now we set it to empty updated_node = await set_node_found_empty(autoscaling_docker, host_node, empty=True) - assert updated_node.Spec - assert updated_node.Spec.Labels - assert _OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY in updated_node.Spec.Labels + assert updated_node.spec + assert updated_node.spec.labels + assert _OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY in updated_node.spec.labels # we can get that empty date back node_empty_since = await get_node_empty_since(updated_node) @@ -1232,9 +1220,9 @@ async def test_set_node_found_empty( updated_node = await set_node_found_empty( autoscaling_docker, host_node, empty=False ) - assert updated_node.Spec - assert updated_node.Spec.Labels - assert _OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY not in updated_node.Spec.Labels + assert updated_node.spec + assert updated_node.spec.labels + assert _OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY not in updated_node.spec.labels # we can't get a date anymore node_empty_since = await get_node_empty_since(updated_node) @@ -1253,9 +1241,9 @@ async def test_set_node_begin_termination_process( ): # initial state assert is_node_ready_and_available(host_node, availability=Availability.active) - assert host_node.Spec - assert host_node.Spec.Labels - assert _OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY not in host_node.Spec.Labels + assert host_node.spec + assert host_node.spec.labels + assert _OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY not in host_node.spec.labels # the termination was not started, therefore no date assert get_node_termination_started_since(host_node) is None @@ -1263,9 +1251,9 @@ async def test_set_node_begin_termination_process( updated_node = await set_node_begin_termination_process( autoscaling_docker, host_node ) - assert updated_node.Spec - assert updated_node.Spec.Labels - assert _OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY in updated_node.Spec.Labels + assert updated_node.spec + assert updated_node.spec.labels + assert _OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY in updated_node.spec.labels await asyncio.sleep(1) diff --git a/services/autoscaling/tests/unit/test_utils_rabbitmq.py b/services/autoscaling/tests/unit/test_utils_rabbitmq.py index 1c5920f9dc74..93924bf9a5a3 100644 --- a/services/autoscaling/tests/unit/test_utils_rabbitmq.py +++ b/services/autoscaling/tests/unit/test_utils_rabbitmq.py @@ -19,7 +19,7 @@ ProgressRabbitMessageNode, ProgressType, ) -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_mock.plugin import MockerFixture from servicelib.rabbitmq import BIND_TO_ALL_TOPICS, RabbitMQClient from settings_library.rabbit import RabbitSettings @@ -78,12 +78,11 @@ async def test_post_task_log_message( osparc_docker_label_keys.to_simcore_runtime_docker_labels(), "running", ) - assert service_with_labels.Spec - service_tasks = parse_obj_as( - list[Task], + assert service_with_labels.spec + service_tasks = TypeAdapter(list[Task]).validate_python( await async_docker_client.tasks.list( - filters={"service": service_with_labels.Spec.Name} - ), + filters={"service": service_with_labels.spec.name} + ) ) assert service_tasks assert len(service_tasks) == 1 @@ -104,7 +103,7 @@ async def test_post_task_log_message( messages=[f"[cluster] {log_message}"], log_level=0, ) - .json() + .model_dump_json() .encode() ) print("... message received") @@ -125,12 +124,11 @@ async def test_post_task_log_message_does_not_raise_if_service_has_no_labels( faker: Faker, ): service_without_labels = await create_service(task_template, {}, "running") - assert service_without_labels.Spec - service_tasks = parse_obj_as( - list[Task], + assert service_without_labels.spec + service_tasks = TypeAdapter(list[Task]).validate_python( await async_docker_client.tasks.list( - filters={"service": service_without_labels.Spec.Name} - ), + filters={"service": service_without_labels.spec.name} + ) ) assert service_tasks assert len(service_tasks) == 1 @@ -170,12 +168,11 @@ async def test_post_task_progress_message( osparc_docker_label_keys.to_simcore_runtime_docker_labels(), "running", ) - assert service_with_labels.Spec - service_tasks = parse_obj_as( - list[Task], + assert service_with_labels.spec + service_tasks = TypeAdapter(list[Task]).validate_python( await async_docker_client.tasks.list( - filters={"service": service_with_labels.Spec.Name} - ), + filters={"service": service_with_labels.spec.name} + ) ) assert service_tasks assert len(service_tasks) == 1 @@ -196,7 +193,7 @@ async def test_post_task_progress_message( progress_type=ProgressType.CLUSTER_UP_SCALING, report=ProgressReport(actual_value=progress_value, total=1), ) - .json() + .model_dump_json() .encode() ) print("... message received") @@ -217,12 +214,11 @@ async def test_post_task_progress_does_not_raise_if_service_has_no_labels( faker: Faker, ): service_without_labels = await create_service(task_template, {}, "running") - assert service_without_labels.Spec - service_tasks = parse_obj_as( - list[Task], + assert service_without_labels.spec + service_tasks = TypeAdapter(list[Task]).validate_python( await async_docker_client.tasks.list( - filters={"service": service_without_labels.Spec.Name} - ), + filters={"service": service_without_labels.spec.name} + ) ) assert service_tasks assert len(service_tasks) == 1 diff --git a/services/catalog/requirements/_base.in b/services/catalog/requirements/_base.in index ef6f55597c3f..1394dd65e5ed 100644 --- a/services/catalog/requirements/_base.in +++ b/services/catalog/requirements/_base.in @@ -6,6 +6,7 @@ --constraint ../../../requirements/constraints.txt --constraint constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in diff --git a/services/catalog/requirements/ci.txt b/services/catalog/requirements/ci.txt index 56552c181bf3..68ad56caa9a7 100644 --- a/services/catalog/requirements/ci.txt +++ b/services/catalog/requirements/ci.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +simcore-common-library @ ../../packages/common-library simcore-models-library @ ../../packages/models-library simcore-postgres-database @ ../../packages/postgres-database pytest-simcore @ ../../packages/pytest-simcore/ diff --git a/services/catalog/requirements/dev.txt b/services/catalog/requirements/dev.txt index dccc4f79f396..c9df003398e1 100644 --- a/services/catalog/requirements/dev.txt +++ b/services/catalog/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/postgres-database --editable ../../packages/pytest-simcore/ diff --git a/services/clusters-keeper/requirements/_base.in b/services/clusters-keeper/requirements/_base.in index dc3b222d6dba..558d68b67cc7 100644 --- a/services/clusters-keeper/requirements/_base.in +++ b/services/clusters-keeper/requirements/_base.in @@ -7,6 +7,7 @@ --constraint ../../../services/dask-sidecar/requirements/_dask-distributed.txt # intra-repo required dependencies +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in --requirement ../../../packages/aws-library/requirements/_base.in diff --git a/services/clusters-keeper/requirements/_base.txt b/services/clusters-keeper/requirements/_base.txt index 6c53105c403e..5b72b81e357c 100644 --- a/services/clusters-keeper/requirements/_base.txt +++ b/services/clusters-keeper/requirements/_base.txt @@ -46,6 +46,8 @@ aiormq==6.8.0 # via aio-pika aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -129,20 +131,8 @@ email-validator==2.1.1 # via pydantic fast-depends==2.4.12 # via faststream -fastapi==0.99.1 +fastapi==0.115.0 # via - # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator @@ -351,7 +341,7 @@ psutil==6.0.0 # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # distributed -pydantic==1.10.15 +pydantic==2.9.2 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -362,7 +352,6 @@ pydantic==1.10.15 # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -379,6 +368,26 @@ pydantic==1.10.15 # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.6.2 @@ -389,6 +398,8 @@ python-dateutil==2.9.0.post0 # via # arrow # botocore +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.1 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -465,7 +476,7 @@ sortedcontainers==2.4.0 # via # -c requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed -starlette==0.27.0 +starlette==0.38.6 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -531,6 +542,7 @@ typing-extensions==4.11.0 # faststream # opentelemetry-sdk # pydantic + # pydantic-core # typer # types-aiobotocore # types-aiobotocore-ec2 diff --git a/services/clusters-keeper/requirements/_test.txt b/services/clusters-keeper/requirements/_test.txt index 00a7437644c0..a43369668cc5 100644 --- a/services/clusters-keeper/requirements/_test.txt +++ b/services/clusters-keeper/requirements/_test.txt @@ -11,6 +11,10 @@ aiosignal==1.3.1 # via # -c requirements/_base.txt # aiohttp +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic antlr4-python3-runtime==4.13.2 # via moto anyio==4.3.0 @@ -201,11 +205,15 @@ py-partiql-parser==0.5.6 # via moto pycparser==2.22 # via cffi -pydantic==1.10.15 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic pyparsing==3.1.4 # via moto pytest==8.3.3 @@ -231,7 +239,9 @@ python-dateutil==2.9.0.post0 # faker # moto python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.1 # via # -c requirements/../../../requirements/constraints.txt @@ -306,6 +316,7 @@ typing-extensions==4.11.0 # aws-sam-translator # cfn-lint # pydantic + # pydantic-core urllib3==2.2.1 # via # -c requirements/../../../requirements/constraints.txt diff --git a/services/clusters-keeper/requirements/ci.txt b/services/clusters-keeper/requirements/ci.txt index 9adfcb62d50f..7bb3a4afb29f 100644 --- a/services/clusters-keeper/requirements/ci.txt +++ b/services/clusters-keeper/requirements/ci.txt @@ -13,6 +13,7 @@ # installs this repo's packages simcore-aws-library @ ../../packages/aws-library +simcore-common-library @ ../../packages/common-library simcore-models-library @ ../../packages/models-library pytest-simcore @ ../../packages/pytest-simcore simcore-service-library[fastapi] @ ../../packages/service-library diff --git a/services/clusters-keeper/requirements/dev.txt b/services/clusters-keeper/requirements/dev.txt index 5324f4c79f7c..faf4378c83db 100644 --- a/services/clusters-keeper/requirements/dev.txt +++ b/services/clusters-keeper/requirements/dev.txt @@ -13,6 +13,7 @@ # installs this repo's packages --editable ../../packages/aws-library +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/pytest-simcore --editable ../../packages/service-library[fastapi] diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/_meta.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/_meta.py index 828216222aa9..58d79f3b9ba7 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/_meta.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/_meta.py @@ -9,17 +9,21 @@ from models_library.basic_types import VersionStr, VersionTag from packaging.version import Version -from pydantic import parse_obj_as +from pydantic import TypeAdapter _current_distribution = distribution("simcore-service-clusters-keeper") __version__: str = version("simcore-service-clusters-keeper") APP_NAME: Final[str] = _current_distribution.metadata["Name"] -API_VERSION: Final[VersionStr] = parse_obj_as(VersionStr, __version__) +API_VERSION: Final[VersionStr] = TypeAdapter(VersionStr).validate_python(__version__) VERSION: Final[Version] = Version(__version__) -API_VTAG: Final[VersionTag] = parse_obj_as(VersionTag, f"v{VERSION.major}") -RPC_VTAG: Final[VersionTag] = parse_obj_as(VersionTag, f"v{VERSION.major}") +API_VTAG: Final[VersionTag] = TypeAdapter(VersionTag).validate_python( + f"v{VERSION.major}" +) +RPC_VTAG: Final[VersionTag] = TypeAdapter(VersionTag).validate_python( + f"v{VERSION.major}" +) def get_summary() -> str: diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/constants.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/constants.py index 7f970665f25e..a5d4f3636da8 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/constants.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/constants.py @@ -1,15 +1,21 @@ from typing import Final from aws_library.ec2._models import AWSTagKey, AWSTagValue -from pydantic import parse_obj_as +from pydantic import TypeAdapter DOCKER_STACK_DEPLOY_COMMAND_NAME: Final[str] = "private cluster docker deploy" -DOCKER_STACK_DEPLOY_COMMAND_EC2_TAG_KEY: Final[AWSTagKey] = parse_obj_as( - AWSTagKey, "io.simcore.clusters-keeper.private_cluster_docker_deploy" -) +DOCKER_STACK_DEPLOY_COMMAND_EC2_TAG_KEY: Final[AWSTagKey] = TypeAdapter( + AWSTagKey +).validate_python("io.simcore.clusters-keeper.private_cluster_docker_deploy") -USER_ID_TAG_KEY: Final[AWSTagKey] = parse_obj_as(AWSTagKey, "user_id") -WALLET_ID_TAG_KEY: Final[AWSTagKey] = parse_obj_as(AWSTagKey, "wallet_id") -ROLE_TAG_KEY: Final[AWSTagKey] = parse_obj_as(AWSTagKey, "role") -WORKER_ROLE_TAG_VALUE: Final[AWSTagValue] = parse_obj_as(AWSTagValue, "worker") -MANAGER_ROLE_TAG_VALUE: Final[AWSTagValue] = parse_obj_as(AWSTagValue, "manager") +USER_ID_TAG_KEY: Final[AWSTagKey] = TypeAdapter(AWSTagKey).validate_python("user_id") +WALLET_ID_TAG_KEY: Final[AWSTagKey] = TypeAdapter(AWSTagKey).validate_python( + "wallet_id" +) +ROLE_TAG_KEY: Final[AWSTagKey] = TypeAdapter(AWSTagKey).validate_python("role") +WORKER_ROLE_TAG_VALUE: Final[AWSTagValue] = TypeAdapter(AWSTagValue).validate_python( + "worker" +) +MANAGER_ROLE_TAG_VALUE: Final[AWSTagValue] = TypeAdapter(AWSTagValue).validate_python( + "manager" +) diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py index 5948715b0813..ac3955a3f253 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py @@ -27,7 +27,7 @@ def create_app(settings: ApplicationSettings) -> FastAPI: - logger.info("app settings: %s", settings.json(indent=1)) + logger.info("app settings: %s", settings.model_dump_json(indent=1)) app = FastAPI( debug=settings.CLUSTERS_KEEPER_DEBUG, diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/errors.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/errors.py index 068a13f702e6..02824102d435 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/errors.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class ClustersKeeperRuntimeError(PydanticErrorMixin, RuntimeError): +class ClustersKeeperRuntimeError(OsparcErrorMixin, RuntimeError): msg_template: str = "clusters-keeper unexpected error" diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py index ff2e74bbedc9..019d66cea9e3 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py @@ -1,6 +1,6 @@ import datetime from functools import cached_property -from typing import Any, ClassVar, Final, Literal, cast +from typing import Final, Literal, cast from aws_library.ec2 import EC2InstanceBootSpecific, EC2Tags from fastapi import FastAPI @@ -12,14 +12,16 @@ ) from models_library.clusters import InternalClusterAuthentication from pydantic import ( + AliasChoices, Field, NonNegativeFloat, NonNegativeInt, PositiveInt, SecretStr, - parse_obj_as, - validator, + TypeAdapter, + field_validator, ) +from pydantic_settings import SettingsConfigDict from settings_library.base import BaseCustomSettings from settings_library.docker_registry import RegistrySettings from settings_library.ec2 import EC2Settings @@ -36,10 +38,9 @@ class ClustersKeeperEC2Settings(EC2Settings): - class Config(EC2Settings.Config): - env_prefix = CLUSTERS_KEEPER_ENV_PREFIX - - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = SettingsConfigDict( + env_prefix=CLUSTERS_KEEPER_ENV_PREFIX, + json_schema_extra={ "examples": [ { f"{CLUSTERS_KEEPER_ENV_PREFIX}EC2_ACCESS_KEY_ID": "my_access_key_id", @@ -48,22 +49,27 @@ class Config(EC2Settings.Config): f"{CLUSTERS_KEEPER_ENV_PREFIX}EC2_SECRET_ACCESS_KEY": "my_secret_access_key", } ], - } + }, + ) class ClustersKeeperSSMSettings(SSMSettings): - class Config(SSMSettings.Config): - env_prefix = CLUSTERS_KEEPER_ENV_PREFIX - - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = SettingsConfigDict( + env_prefix=CLUSTERS_KEEPER_ENV_PREFIX, + json_schema_extra={ "examples": [ { f"{CLUSTERS_KEEPER_ENV_PREFIX}{key}": var - for key, var in example.items() + for key, var in example.items() # type:ignore[union-attr] } - for example in SSMSettings.Config.schema_extra["examples"] + for example in SSMSettings.model_config[ # type:ignore[union-attr,index] + "json_schema_extra" + ][ + "examples" + ] ], - } + }, + ) class WorkersEC2InstancesSettings(BaseCustomSettings): @@ -94,7 +100,7 @@ class WorkersEC2InstancesSettings(BaseCustomSettings): # NAME PREFIX is not exposed since we override it anyway WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS: list[str] = Field( ..., - min_items=1, + min_length=1, description="A security group acts as a virtual firewall for your EC2 instances to control incoming and outgoing traffic" " (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-security-groups.html), " " this is required to start a new EC2 instance", @@ -125,14 +131,14 @@ class WorkersEC2InstancesSettings(BaseCustomSettings): "a tag must have a key and an optional value. see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html]", ) - @validator("WORKERS_EC2_INSTANCES_ALLOWED_TYPES") + @field_validator("WORKERS_EC2_INSTANCES_ALLOWED_TYPES") @classmethod def check_valid_instance_names( cls, value: dict[str, EC2InstanceBootSpecific] ) -> dict[str, EC2InstanceBootSpecific]: # NOTE: needed because of a flaw in BaseCustomSettings # issubclass raises TypeError if used on Aliases - parse_obj_as(list[InstanceTypeType], list(value)) + TypeAdapter(list[InstanceTypeType]).validate_python(list(value)) return value @@ -147,7 +153,7 @@ class PrimaryEC2InstancesSettings(BaseCustomSettings): ) PRIMARY_EC2_INSTANCES_SECURITY_GROUP_IDS: list[str] = Field( ..., - min_items=1, + min_length=1, description="A security group acts as a virtual firewall for your EC2 instances to control incoming and outgoing traffic" " (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-security-groups.html), " " this is required to start a new EC2 instance", @@ -205,17 +211,17 @@ class PrimaryEC2InstancesSettings(BaseCustomSettings): "(see https://docs.docker.com/reference/cli/docker/swarm/init/)", ) - @validator("PRIMARY_EC2_INSTANCES_ALLOWED_TYPES") + @field_validator("PRIMARY_EC2_INSTANCES_ALLOWED_TYPES") @classmethod def check_valid_instance_names( cls, value: dict[str, EC2InstanceBootSpecific] ) -> dict[str, EC2InstanceBootSpecific]: # NOTE: needed because of a flaw in BaseCustomSettings # issubclass raises TypeError if used on Aliases - parse_obj_as(list[InstanceTypeType], list(value)) + TypeAdapter(list[InstanceTypeType]).validate_python(list(value)) return value - @validator("PRIMARY_EC2_INSTANCES_ALLOWED_TYPES") + @field_validator("PRIMARY_EC2_INSTANCES_ALLOWED_TYPES") @classmethod def check_only_one_value( cls, value: dict[str, EC2InstanceBootSpecific] @@ -254,34 +260,39 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): # RUNTIME ----------------------------------------------------------- CLUSTERS_KEEPER_DEBUG: bool = Field( - default=False, description="Debug mode", env=["CLUSTERS_KEEPER_DEBUG", "DEBUG"] + default=False, + description="Debug mode", + validation_alias=AliasChoices("CLUSTERS_KEEPER_DEBUG", "DEBUG"), ) CLUSTERS_KEEPER_LOGLEVEL: LogLevel = Field( - LogLevel.INFO, env=["CLUSTERS_KEEPER_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"] + LogLevel.INFO, + validation_alias=AliasChoices( + "CLUSTERS_KEEPER_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL" + ), ) CLUSTERS_KEEPER_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=[ + validation_alias=AliasChoices( "CLUSTERS_KEEPER_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) CLUSTERS_KEEPER_EC2_ACCESS: ClustersKeeperEC2Settings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) CLUSTERS_KEEPER_SSM_ACCESS: ClustersKeeperSSMSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES: PrimaryEC2InstancesSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES: WorkersEC2InstancesSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX: str = Field( @@ -289,14 +300,18 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): description="set a prefix to all machines created (useful for testing)", ) - CLUSTERS_KEEPER_RABBITMQ: RabbitSettings | None = Field(auto_default_from_env=True) + CLUSTERS_KEEPER_RABBITMQ: RabbitSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) CLUSTERS_KEEPER_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True - CLUSTERS_KEEPER_REDIS: RedisSettings = Field(auto_default_from_env=True) + CLUSTERS_KEEPER_REDIS: RedisSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) CLUSTERS_KEEPER_REGISTRY: RegistrySettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) CLUSTERS_KEEPER_TASK_INTERVAL: datetime.timedelta = Field( @@ -339,7 +354,8 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): ", see https://selectfrom.dev/deep-dive-into-dask-distributed-scheduler-9fdb3b36b7c7", ) CLUSTERS_KEEPER_TRACING: TracingSettings | None = Field( - auto_default_from_env=True, description="settings for opentelemetry tracing" + json_schema_extra={"auto_default_from_env": True}, + description="settings for opentelemetry tracing", ) SWARM_STACK_NAME: str = Field( @@ -350,11 +366,22 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): def LOG_LEVEL(self) -> LogLevel: # noqa: N802 return self.CLUSTERS_KEEPER_LOGLEVEL - @validator("CLUSTERS_KEEPER_LOGLEVEL", pre=True) + @field_validator("CLUSTERS_KEEPER_LOGLEVEL", mode="before") @classmethod - def valid_log_level(cls, value: str) -> str: + def _valid_log_level(cls, value: str) -> str: return cls.validate_log_level(value) + @field_validator( + "CLUSTERS_KEEPER_TASK_INTERVAL", "SERVICE_TRACKING_HEARTBEAT", mode="before" + ) + @classmethod + def _validate_interval( + cls, value: str | datetime.timedelta + ) -> int | datetime.timedelta: + if isinstance(value, str): + return int(value) + return value + def get_application_settings(app: FastAPI) -> ApplicationSettings: return cast(ApplicationSettings, app.state.settings) diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/data/docker-compose.yml b/services/clusters-keeper/src/simcore_service_clusters_keeper/data/docker-compose.yml index dc76ded446fb..d0e829c151f5 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/data/docker-compose.yml +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/data/docker-compose.yml @@ -90,7 +90,7 @@ services: AUTOSCALING_EC2_REGION_NAME: ${CLUSTERS_KEEPER_EC2_REGION_NAME} AUTOSCALING_EC2_SECRET_ACCESS_KEY: ${CLUSTERS_KEEPER_EC2_SECRET_ACCESS_KEY} AUTOSCALING_NODES_MONITORING: null - AUTOSCALING_POLL_INTERVAL: 10 + AUTOSCALING_POLL_INTERVAL: 00:00:10 DASK_MONITORING_URL: tls://dask-scheduler:8786 DASK_SCHEDULER_AUTH: '{"type":"tls","tls_ca_file":"${DASK_TLS_CA_FILE}","tls_client_cert":"${DASK_TLS_CERT}","tls_client_key":"${DASK_TLS_KEY}"}' EC2_INSTANCES_ALLOWED_TYPES: ${WORKERS_EC2_INSTANCES_ALLOWED_TYPES} diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_core.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_core.py index 871ad8bd242b..f3ebe712b9a3 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_core.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_core.py @@ -9,7 +9,7 @@ from fastapi import FastAPI from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import parse_obj_as +from pydantic import TypeAdapter from servicelib.logging_utils import log_catch from servicelib.utils import limited_gather @@ -53,8 +53,10 @@ def _get_instance_last_heartbeat(instance: EC2InstanceData) -> datetime.datetime return None -_USER_ID_TAG_KEY: Final[AWSTagKey] = parse_obj_as(AWSTagKey, "user_id") -_WALLET_ID_TAG_KEY: Final[AWSTagKey] = parse_obj_as(AWSTagKey, "wallet_id") +_USER_ID_TAG_KEY: Final[AWSTagKey] = TypeAdapter(AWSTagKey).validate_python("user_id") +_WALLET_ID_TAG_KEY: Final[AWSTagKey] = TypeAdapter(AWSTagKey).validate_python( + "wallet_id" +) async def _get_all_associated_worker_instances( diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/dask.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/dask.py index de585dc654fe..af1d0df0e663 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/dask.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/dask.py @@ -34,7 +34,7 @@ async def ping_scheduler( require_encryption=True, ) async with distributed.Client( - url, asynchronous=True, timeout=_CONNECTION_TIMEOUT, security=security + f"{url}", asynchronous=True, timeout=_CONNECTION_TIMEOUT, security=security ): ... return True @@ -59,7 +59,7 @@ async def is_scheduler_busy( require_encryption=True, ) async with distributed.Client( - url, asynchronous=True, timeout=_CONNECTION_TIMEOUT, security=security + f"{url}", asynchronous=True, timeout=_CONNECTION_TIMEOUT, security=security ) as client: datasets_on_scheduler = await _wrap_client_async_routine(client.list_datasets()) _logger.info("cluster currently has %s datasets", len(datasets_on_scheduler)) diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py index c9b4a32f4afa..d91a6b3df78f 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py @@ -95,7 +95,7 @@ def _convert_to_env_dict(entries: dict[str, Any]) -> str: f"EC2_INSTANCES_NAME_PREFIX={cluster_machines_name_prefix}", f"LOG_LEVEL={app_settings.LOG_LEVEL}", f"WORKERS_EC2_INSTANCES_ALLOWED_TYPES={_convert_to_env_dict(app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_ALLOWED_TYPES)}", - f"WORKERS_EC2_INSTANCES_CUSTOM_TAGS={_convert_to_env_dict(app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_CUSTOM_TAGS | additional_custom_tags)}", # type: ignore[arg-type] + f"WORKERS_EC2_INSTANCES_CUSTOM_TAGS={_convert_to_env_dict(app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_CUSTOM_TAGS | additional_custom_tags)}", f"WORKERS_EC2_INSTANCES_KEY_NAME={app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_KEY_NAME}", f"WORKERS_EC2_INSTANCES_MAX_INSTANCES={app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_MAX_INSTANCES}", f"WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS={_convert_to_env_list(app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS)}", diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/dask.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/dask.py index 957644f63463..266557358b7f 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/dask.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/dask.py @@ -1,13 +1,15 @@ from aws_library.ec2 import EC2InstanceData from fastapi import FastAPI from models_library.clusters import InternalClusterAuthentication -from pydantic import AnyUrl, parse_obj_as +from pydantic import AnyUrl, TypeAdapter from ..core.settings import get_application_settings def get_scheduler_url(ec2_instance: EC2InstanceData) -> AnyUrl: - url: AnyUrl = parse_obj_as(AnyUrl, f"tls://{ec2_instance.aws_private_dns}:8786") + url: AnyUrl = TypeAdapter(AnyUrl).validate_python( + f"tls://{ec2_instance.aws_private_dns}:8786" + ) return url diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/ec2.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/ec2.py index b48e1076e59d..1d4534ff0258 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/ec2.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/ec2.py @@ -4,7 +4,7 @@ from aws_library.ec2 import AWSTagKey, AWSTagValue, EC2Tags from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import parse_obj_as +from pydantic import TypeAdapter from .._meta import VERSION from ..constants import ( @@ -16,11 +16,13 @@ from ..core.settings import ApplicationSettings _APPLICATION_TAG_KEY: Final[str] = "io.simcore.clusters-keeper" -_APPLICATION_VERSION_TAG: Final[EC2Tags] = parse_obj_as( - EC2Tags, {f"{_APPLICATION_TAG_KEY}.version": f"{VERSION}"} +_APPLICATION_VERSION_TAG: Final[EC2Tags] = TypeAdapter(EC2Tags).validate_python( + {f"{_APPLICATION_TAG_KEY}.version": f"{VERSION}"} ) -HEARTBEAT_TAG_KEY: Final[AWSTagKey] = parse_obj_as(AWSTagKey, "last_heartbeat") +HEARTBEAT_TAG_KEY: Final[AWSTagKey] = TypeAdapter(AWSTagKey).validate_python( + "last_heartbeat" +) CLUSTER_NAME_PREFIX: Final[str] = "osparc-computational-cluster-" diff --git a/services/clusters-keeper/tests/unit/conftest.py b/services/clusters-keeper/tests/unit/conftest.py index 43805123c30e..da94a9886f39 100644 --- a/services/clusters-keeper/tests/unit/conftest.py +++ b/services/clusters-keeper/tests/unit/conftest.py @@ -83,7 +83,7 @@ def mocked_ec2_server_envs( # NOTE: overrides the EC2Settings with what clusters-keeper expects changed_envs: EnvVarsDict = { f"{CLUSTERS_KEEPER_ENV_PREFIX}{k}": v - for k, v in mocked_ec2_server_settings.dict().items() + for k, v in mocked_ec2_server_settings.model_dump().items() } return setenvs_from_dict(monkeypatch, changed_envs) @@ -98,7 +98,7 @@ def mocked_ssm_server_envs( f"{CLUSTERS_KEEPER_ENV_PREFIX}{k}": ( v.get_secret_value() if isinstance(v, SecretStr) else v ) - for k, v in mocked_ssm_server_settings.dict().items() + for k, v in mocked_ssm_server_settings.model_dump().items() } return setenvs_from_dict(monkeypatch, changed_envs) @@ -139,7 +139,9 @@ def app_environment( { random.choice( # noqa: S311 ec2_instances - ): EC2InstanceBootSpecific.Config.schema_extra["examples"][ + ): EC2InstanceBootSpecific.model_config["json_schema_extra"][ + "examples" + ][ 1 ] # NOTE: we use example with custom script } @@ -157,7 +159,9 @@ def app_environment( "WORKERS_EC2_INSTANCES_ALLOWED_TYPES": json.dumps( { ec2_type_name: random.choice( # noqa: S311 - EC2InstanceBootSpecific.Config.schema_extra["examples"] + EC2InstanceBootSpecific.model_config["json_schema_extra"][ + "examples" + ] ) for ec2_type_name in ec2_instances } diff --git a/services/clusters-keeper/tests/unit/test_api_health.py b/services/clusters-keeper/tests/unit/test_api_health.py index 5bf72ccae8ef..e1a5de4c6ced 100644 --- a/services/clusters-keeper/tests/unit/test_api_health.py +++ b/services/clusters-keeper/tests/unit/test_api_health.py @@ -41,7 +41,7 @@ async def test_status_no_rabbit( response = await async_client.get("/status") response.raise_for_status() assert response.status_code == status.HTTP_200_OK - status_response = _StatusGet.parse_obj(response.json()) + status_response = _StatusGet.model_validate(response.json()) assert status_response assert status_response.rabbitmq.is_enabled is False @@ -61,7 +61,7 @@ async def test_status( response = await async_client.get("/status") response.raise_for_status() assert response.status_code == status.HTTP_200_OK - status_response = _StatusGet.parse_obj(response.json()) + status_response = _StatusGet.model_validate(response.json()) assert status_response assert status_response.rabbitmq.is_enabled is True @@ -79,7 +79,7 @@ async def test_status( response = await async_client.get("/status") response.raise_for_status() assert response.status_code == status.HTTP_200_OK - status_response = _StatusGet.parse_obj(response.json()) + status_response = _StatusGet.model_validate(response.json()) assert status_response assert status_response.rabbitmq.is_enabled is True diff --git a/services/clusters-keeper/tests/unit/test_core_settings.py b/services/clusters-keeper/tests/unit/test_core_settings.py index 0e467dc1e67b..d734bf32cffa 100644 --- a/services/clusters-keeper/tests/unit/test_core_settings.py +++ b/services/clusters-keeper/tests/unit/test_core_settings.py @@ -45,7 +45,9 @@ def test_multiple_primary_ec2_instances_raises( "PRIMARY_EC2_INSTANCES_ALLOWED_TYPES": json.dumps( { ec2_type_name: random.choice( # noqa: S311 - EC2InstanceBootSpecific.Config.schema_extra["examples"] + EC2InstanceBootSpecific.model_config["json_schema_extra"][ + "examples" + ] ) for ec2_type_name in ec2_instances } diff --git a/services/clusters-keeper/tests/unit/test_modules_dask.py b/services/clusters-keeper/tests/unit/test_modules_dask.py index db1833ffd91f..7f0408d7057c 100644 --- a/services/clusters-keeper/tests/unit/test_modules_dask.py +++ b/services/clusters-keeper/tests/unit/test_modules_dask.py @@ -12,7 +12,7 @@ NoAuthentication, TLSAuthentication, ) -from pydantic import AnyUrl, parse_obj_as +from pydantic import AnyUrl, TypeAdapter from simcore_service_clusters_keeper.modules.dask import ( is_scheduler_busy, ping_scheduler, @@ -24,7 +24,9 @@ _authentication_types = [ NoAuthentication(), - TLSAuthentication.construct(**TLSAuthentication.Config.schema_extra["examples"][0]), + TLSAuthentication.model_construct( + **TLSAuthentication.model_config["json_schema_extra"]["examples"][0] + ), ] @@ -36,7 +38,7 @@ async def test_ping_scheduler_non_existing_scheduler( ): assert ( await ping_scheduler( - parse_obj_as(AnyUrl, f"tcp://{faker.ipv4()}:{faker.port_number()}"), + TypeAdapter(AnyUrl).validate_python(f"tcp://{faker.ipv4()}:{faker.port_number()}"), authentication, ) is False @@ -46,7 +48,7 @@ async def test_ping_scheduler_non_existing_scheduler( async def test_ping_scheduler(dask_spec_local_cluster: SpecCluster): assert ( await ping_scheduler( - parse_obj_as(AnyUrl, dask_spec_local_cluster.scheduler_address), + TypeAdapter(AnyUrl).validate_python(dask_spec_local_cluster.scheduler_address), NoAuthentication(), ) is True @@ -69,7 +71,7 @@ async def test_is_scheduler_busy( dask_spec_cluster_client: distributed.Client, ): # nothing runs right now - scheduler_address = parse_obj_as(AnyUrl, dask_spec_local_cluster.scheduler_address) + scheduler_address = TypeAdapter(AnyUrl).validate_python(dask_spec_local_cluster.scheduler_address) assert await is_scheduler_busy(scheduler_address, NoAuthentication()) is False _SLEEP_TIME = 5 @@ -84,5 +86,5 @@ def _some_long_running_fct(sleep_time: int) -> str: busy=True, ) - result = await future.result(timeout=2 * _SLEEP_TIME) # type: ignore + result = await future.result(timeout=2 * _SLEEP_TIME) assert "seconds" in result diff --git a/services/clusters-keeper/tests/unit/test_modules_rabbitmq.py b/services/clusters-keeper/tests/unit/test_modules_rabbitmq.py index 1bbd5683c769..e1ef5f850dc5 100644 --- a/services/clusters-keeper/tests/unit/test_modules_rabbitmq.py +++ b/services/clusters-keeper/tests/unit/test_modules_rabbitmq.py @@ -119,7 +119,7 @@ async def test_post_message( f"--> checking for message in rabbit exchange {rabbit_message.channel_name}, {attempt.retry_state.retry_object.statistics}" ) mocked_message_handler.assert_called_once_with( - rabbit_message.json().encode() + rabbit_message.model_dump_json().encode() ) print("... message received") diff --git a/services/clusters-keeper/tests/unit/test_utils_clusters.py b/services/clusters-keeper/tests/unit/test_utils_clusters.py index 1c4a7760d5fb..20cde00c0476 100644 --- a/services/clusters-keeper/tests/unit/test_utils_clusters.py +++ b/services/clusters-keeper/tests/unit/test_utils_clusters.py @@ -23,7 +23,7 @@ TLSAuthentication, ) from models_library.utils.json_serialization import json_dumps -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from simcore_service_clusters_keeper.core.settings import ApplicationSettings from simcore_service_clusters_keeper.utils.clusters import ( @@ -69,9 +69,9 @@ def app_environment( monkeypatch, { "CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH": json_dumps( - TLSAuthentication.Config.schema_extra["examples"][0] + TLSAuthentication.model_config["json_schema_extra"]["examples"][0] if isinstance(backend_cluster_auth, TLSAuthentication) - else NoAuthentication.Config.schema_extra["examples"][0] + else NoAuthentication.model_config["json_schema_extra"]["examples"][0] ) }, ) @@ -223,7 +223,7 @@ def test_create_startup_script_script_size_below_16kb( script_size_in_bytes = len(startup_script.encode("utf-8")) print( - f"current script size is {parse_obj_as(ByteSize, script_size_in_bytes).human_readable()}" + f"current script size is {TypeAdapter(ByteSize).validate_python(script_size_in_bytes).human_readable()}" ) # NOTE: EC2 user data cannot be above 16KB, we keep some margin here assert script_size_in_bytes < 15 * 1024 @@ -285,7 +285,9 @@ def test__prepare_environment_variables_defines_all_envs_for_docker_compose( "authentication", [ NoAuthentication(), - TLSAuthentication(**TLSAuthentication.Config.schema_extra["examples"][0]), + TLSAuthentication( + **TLSAuthentication.model_config["json_schema_extra"]["examples"][0] + ), ], ) def test_create_cluster_from_ec2_instance( diff --git a/services/dask-sidecar/requirements/_base.in b/services/dask-sidecar/requirements/_base.in index 4224aaecd881..2352652e4a01 100644 --- a/services/dask-sidecar/requirements/_base.in +++ b/services/dask-sidecar/requirements/_base.in @@ -11,6 +11,7 @@ # - Added as constraints instead of requirements in order to avoid polluting base.txt # - Will be installed when prod.txt or dev.txt # +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/dask-task-models-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/service-library/requirements/_base.in @@ -24,6 +25,6 @@ dask[distributed, diagnostics] dask-gateway # needed for the osparc-dask-gateway to preload the module fsspec[http, s3] # sub types needed as we acces http and s3 here lz4 # for compression -pydantic[email,dotenv] +pydantic prometheus_client repro-zipfile diff --git a/services/dask-sidecar/requirements/_base.txt b/services/dask-sidecar/requirements/_base.txt index 6d776591b157..9c36e1d2e6ea 100644 --- a/services/dask-sidecar/requirements/_base.txt +++ b/services/dask-sidecar/requirements/_base.txt @@ -16,13 +16,22 @@ aiofiles==23.2.1 # -r requirements/_base.in aiohttp==3.9.5 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # aiobotocore @@ -36,6 +45,8 @@ aiormq==6.8.0 # via aio-pika aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -59,13 +70,22 @@ botocore==1.34.106 # via aiobotocore certifi==2024.7.4 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # requests @@ -137,13 +157,22 @@ importlib-metadata==7.1.0 # opentelemetry-api jinja2==3.1.4 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # bokeh @@ -222,13 +251,22 @@ opentelemetry-util-http==0.47b0 # via opentelemetry-instrumentation-requests orjson==3.10.3 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in @@ -257,27 +295,62 @@ psutil==6.0.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # distributed -pydantic==1.10.15 +pydantic==2.9.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/dask-task-models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fast-depends + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.6.0 + # via + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.6.2 @@ -288,18 +361,27 @@ python-dateutil==2.9.0.post0 # botocore # pandas python-dotenv==1.0.1 - # via pydantic + # via pydantic-settings pytz==2024.1 # via pandas pyyaml==6.0.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_base.in @@ -309,13 +391,22 @@ pyyaml==6.0.1 # distributed redis==5.0.4 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_base.in @@ -384,18 +475,28 @@ typing-extensions==4.11.0 # faststream # opentelemetry-sdk # pydantic + # pydantic-core # typer tzdata==2024.1 # via pandas urllib3==2.2.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # botocore diff --git a/services/dask-sidecar/requirements/_test.txt b/services/dask-sidecar/requirements/_test.txt index 7f13a97ad899..92130c87b772 100644 --- a/services/dask-sidecar/requirements/_test.txt +++ b/services/dask-sidecar/requirements/_test.txt @@ -1,3 +1,7 @@ +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic antlr4-python3-runtime==4.13.2 # via moto attrs==23.2.0 @@ -141,11 +145,15 @@ py-partiql-parser==0.5.6 # via moto pycparser==2.22 # via cffi -pydantic==1.10.15 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic pyftpdlib==2.0.0 # via pytest-localftpserver pyopenssl==24.2.1 @@ -244,6 +252,7 @@ typing-extensions==4.11.0 # aws-sam-translator # cfn-lint # pydantic + # pydantic-core urllib3==2.2.1 # via # -c requirements/../../../requirements/constraints.txt diff --git a/services/dask-sidecar/requirements/ci.txt b/services/dask-sidecar/requirements/ci.txt index 6f79fbbaec1e..343964753b09 100644 --- a/services/dask-sidecar/requirements/ci.txt +++ b/services/dask-sidecar/requirements/ci.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +simcore-common-library @ ../../packages/common-library/ simcore-dask-task-models-library @ ../../packages/dask-task-models-library/ simcore-models-library @ ../../packages/models-library/ pytest-simcore @ ../../packages/pytest-simcore/ diff --git a/services/dask-sidecar/requirements/dev.txt b/services/dask-sidecar/requirements/dev.txt index 82fbeaefec68..6ad6237135be 100644 --- a/services/dask-sidecar/requirements/dev.txt +++ b/services/dask-sidecar/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library/ --editable ../../packages/dask-task-models-library/ --editable ../../packages/models-library/ --editable ../../packages/pytest-simcore/ diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py index 63e9bc97a1b2..b6ae0b256118 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py @@ -71,7 +71,7 @@ async def _write_input_data( if isinstance(input_params, FileUrl): file_name = ( input_params.file_mapping - or Path(URL(input_params.url).path.strip("/")).name + or Path(URL(f"{input_params.url}").path.strip("/")).name ) destination_path = task_volumes.inputs_folder / file_name @@ -114,7 +114,7 @@ async def _retrieve_output_data( ) _logger.debug( "following outputs will be searched for:\n%s", - self.task_parameters.output_data_keys.json(indent=1), + self.task_parameters.output_data_keys.model_dump_json(indent=1), ) output_data = TaskOutputData.from_task_output( @@ -132,7 +132,7 @@ async def _retrieve_output_data( if isinstance(output_params, FileUrl): assert ( # nosec output_params.file_mapping - ), f"{output_params.json(indent=1)} expected resolved in TaskOutputData.from_task_output" + ), f"{output_params.model_dump_json(indent=1)} expected resolved in TaskOutputData.from_task_output" src_path = task_volumes.outputs_folder / output_params.file_mapping upload_tasks.append( @@ -146,7 +146,9 @@ async def _retrieve_output_data( await asyncio.gather(*upload_tasks) await self._publish_sidecar_log("All the output data were uploaded.") - _logger.info("retrieved outputs data:\n%s", output_data.json(indent=1)) + _logger.info( + "retrieved outputs data:\n%s", output_data.model_dump_json(indent=1) + ) return output_data except (ValueError, ValidationError) as exc: diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py index 0a2d9e3e9d3e..289f5df9169a 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py @@ -26,7 +26,7 @@ from models_library.services_resources import BootMode from models_library.utils.labels_annotations import OSPARC_LABEL_PREFIXES, from_labels from packaging import version -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from servicelib.logging_utils import ( LogLevelInt, LogMessageStr, @@ -95,7 +95,7 @@ async def create_container_config( NanoCPUs=nano_cpus_limit, ), ) - logger.debug("Container configuration: \n%s", pformat(config.dict())) + logger.debug("Container configuration: \n%s", pformat(config.model_dump())) return config @@ -109,7 +109,7 @@ async def managed_container( logger, logging.DEBUG, msg=f"managing container {name} for {config.image}" ): container = await docker_client.containers.create( - config.dict(by_alias=True), name=name + config.model_dump(by_alias=True), name=name ) yield container except asyncio.CancelledError: @@ -443,7 +443,7 @@ async def get_image_labels( data = from_labels( image_labels, prefix_key=OSPARC_LABEL_PREFIXES[0], trim_key_head=False ) - return parse_obj_as(ImageLabels, data) + return TypeAdapter(ImageLabels).validate_python(data) return ImageLabels() diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/errors.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/errors.py index eabe5f00d033..8e5d1e8794ff 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/errors.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class ComputationalSidecarRuntimeError(PydanticErrorMixin, RuntimeError): +class ComputationalSidecarRuntimeError(OsparcErrorMixin, RuntimeError): ... diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py index 691192716e9e..ee270aeb8882 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py @@ -3,7 +3,14 @@ from models_library.basic_regex import SIMPLE_VERSION_RE from models_library.services import ServiceMetaDataPublished from packaging import version -from pydantic import BaseModel, ByteSize, Extra, Field, validator +from pydantic import ( + BaseModel, + ByteSize, + ConfigDict, + Field, + field_validator, + model_validator, +) LEGACY_INTEGRATION_VERSION = version.Version("0") PROGRESS_REGEXP: re.Pattern[str] = re.compile( @@ -41,21 +48,15 @@ class ContainerHostConfig(BaseModel): ..., alias="NanoCPUs", description="CPU quota in units of 10-9 CPUs" ) - @validator("memory_swap", pre=True, always=True) - @classmethod - def ensure_no_memory_swap_means_no_swap(cls, v, values): - if v is None: - # if not set it will be the same value as memory to ensure swap is disabled - return values["memory"] - return v + @model_validator(mode="after") + def ensure_memory_swap_is_not_unlimited(self) -> "ContainerHostConfig": + if self.memory_swap is None: + self.memory_swap = self.memory - @validator("memory_swap") - @classmethod - def ensure_memory_swap_cannot_be_unlimited_nor_smaller_than_memory(cls, v, values): - if v < values["memory"]: + if self.memory_swap < self.memory: msg = "Memory swap cannot be set to a smaller value than memory" raise ValueError(msg) - return v + return self class DockerContainerConfig(BaseModel): @@ -71,7 +72,7 @@ class ImageLabels(BaseModel): default=str(LEGACY_INTEGRATION_VERSION), alias="integration-version", description="integration version number", - regex=SIMPLE_VERSION_RE, + pattern=SIMPLE_VERSION_RE, examples=["1.0.0"], ) progress_regexp: str = Field( @@ -79,18 +80,16 @@ class ImageLabels(BaseModel): alias="progress_regexp", description="regexp pattern for detecting computational service's progress", ) + model_config = ConfigDict(extra="ignore") - class Config: - extra = Extra.ignore - - @validator("integration_version", pre=True) + @field_validator("integration_version", mode="before") @classmethod def default_integration_version(cls, v): if v is None: return ImageLabels().integration_version return v - @validator("progress_regexp", pre=True) + @field_validator("progress_regexp", mode="before") @classmethod def default_progress_regexp(cls, v): if v is None: @@ -104,6 +103,6 @@ def get_progress_regexp(self) -> re.Pattern[str]: return re.compile(self.progress_regexp) -assert set(ImageLabels.__fields__).issubset( - ServiceMetaDataPublished.__fields__ +assert set(ImageLabels.model_fields).issubset( + ServiceMetaDataPublished.model_fields ), "ImageLabels must be compatible with ServiceDockerData" diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/dask_utils.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/dask_utils.py index e042c5c022a2..d04682dac075 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/dask_utils.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/dask_utils.py @@ -153,4 +153,4 @@ async def periodicaly_check_if_aborted(task_name: str) -> None: def publish_event(dask_pub: distributed.Pub, event: BaseTaskEvent) -> None: """never reraises, only CancellationError""" with log_catch(_logger, reraise=False): - dask_pub.put(event.json()) + dask_pub.put(event.model_dump_json()) diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py index b14b5db657f4..e5b78bd286a5 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py @@ -13,7 +13,7 @@ import aiofiles.tempfile import fsspec # type: ignore[import-untyped] import repro_zipfile # type: ignore[import-untyped] -from pydantic import ByteSize, FileUrl, parse_obj_as +from pydantic import ByteSize, FileUrl, TypeAdapter from pydantic.networks import AnyUrl from servicelib.logging_utils import LogLevelInt, LogMessageStr from settings_library.s3 import S3Settings @@ -96,9 +96,9 @@ async def _copy_file( ): src_storage_kwargs = src_storage_cfg or {} dst_storage_kwargs = dst_storage_cfg or {} - with fsspec.open(src_url, mode="rb", **src_storage_kwargs) as src_fp, fsspec.open( - dst_url, "wb", **dst_storage_kwargs - ) as dst_fp: + with fsspec.open( + f"{src_url}", mode="rb", **src_storage_kwargs + ) as src_fp, fsspec.open(f"{dst_url}", "wb", **dst_storage_kwargs) as dst_fp: assert isinstance(src_fp, IOBase) # nosec assert isinstance(dst_fp, IOBase) # nosec file_size = getattr(src_fp, "size", None) @@ -148,7 +148,7 @@ async def pull_file_from_remote( storage_kwargs = _s3fs_settings_from_s3_settings(s3_settings) await _copy_file( src_url, - parse_obj_as(FileUrl, dst_path.as_uri()), + TypeAdapter(FileUrl).validate_python(dst_path.as_uri()), src_storage_cfg=cast(dict[str, Any], storage_kwargs), log_publishing_cb=log_publishing_cb, text_prefix=f"Downloading '{src_url.path.strip('/')}':", @@ -218,7 +218,7 @@ async def _push_file_to_remote( storage_kwargs = _s3fs_settings_from_s3_settings(s3_settings) await _copy_file( - parse_obj_as(FileUrl, file_to_upload.as_uri()), + TypeAdapter(FileUrl).validate_python(file_to_upload.as_uri()), dst_url, dst_storage_cfg=cast(dict[str, Any], storage_kwargs), log_publishing_cb=log_publishing_cb, @@ -246,7 +246,7 @@ async def push_file_to_remote( src_mime_type, _ = mimetypes.guess_type(src_path) if dst_mime_type == _ZIP_MIME_TYPE and src_mime_type != _ZIP_MIME_TYPE: - archive_file_path = Path(tmp_dir) / Path(URL(dst_url).path).name + archive_file_path = Path(tmp_dir) / Path(URL(f"{dst_url}").path).name await log_publishing_cb( f"Compressing '{src_path.name}' to '{archive_file_path.name}'...", logging.INFO, diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py index 80661c7ecb26..7073f2a4caaa 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py @@ -2,7 +2,7 @@ from typing import Any from models_library.basic_types import LogLevel -from pydantic import Field, validator +from pydantic import AliasChoices, Field, field_validator from settings_library.base import BaseCustomSettings from settings_library.utils_logging import MixinLoggingSettings @@ -14,7 +14,9 @@ class Settings(BaseCustomSettings, MixinLoggingSettings): SC_BOOT_MODE: str | None = None LOG_LEVEL: LogLevel = Field( LogLevel.INFO.value, - env=["DASK_SIDECAR_LOGLEVEL", "SIDECAR_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"], + validation_alias=AliasChoices( + "DASK_SIDECAR_LOGLEVEL", "SIDECAR_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL" + ), ) # sidecar config --- @@ -37,7 +39,10 @@ class Settings(BaseCustomSettings, MixinLoggingSettings): DASK_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=["DASK_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED"], + validation_alias=AliasChoices( + "DASK_LOG_FORMAT_LOCAL_DEV_ENABLED", + "LOG_FORMAT_LOCAL_DEV_ENABLED", + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) @@ -50,7 +55,7 @@ def as_worker(self) -> bool: assert self.DASK_SCHEDULER_HOST is not None # nosec return as_worker - @validator("LOG_LEVEL", pre=True) + @field_validator("LOG_LEVEL", mode="before") @classmethod def _validate_loglevel(cls, value: Any) -> str: return cls.validate_log_level(f"{value}") diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/tasks.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/tasks.py index 79dfd08cbdb5..e818a3301b63 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/tasks.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/tasks.py @@ -67,7 +67,7 @@ async def dask_setup(worker: distributed.Worker) -> None: ) logger.info("Setting up worker...") - logger.info("Settings: %s", pformat(settings.dict())) + logger.info("Settings: %s", pformat(settings.model_dump())) print_dask_sidecar_banner() @@ -94,7 +94,7 @@ async def _run_computational_sidecar_async( _logger.info( "run_computational_sidecar %s", - f"{task_parameters.dict()=}, {docker_auth=}, {log_file_url=}, {s3_settings=}", + f"{task_parameters.model_dump()=}, {docker_auth=}, {log_file_url=}, {s3_settings=}", ) current_task = asyncio.current_task() assert current_task # nosec diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/utils.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/utils.py index 936d54a33777..61481d32c0a3 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/utils.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/utils.py @@ -6,7 +6,7 @@ import aiodocker from aiodocker.containers import DockerContainer -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter logger = logging.getLogger(__name__) @@ -57,7 +57,7 @@ async def async_num_available_gpus() -> int: if container_data.setdefault("StatusCode", 127) == 0 else 0 ) - except asyncio.TimeoutError as err: + except TimeoutError as err: logger.warning( "num_gpus timedout while check-run %s: %s", spec_config, err ) @@ -100,14 +100,14 @@ async def async_video_memory() -> int: Coroutine, container.log(stdout=True, stderr=True, follow=False), ) - video_ram = parse_obj_as(ByteSize, 0) + video_ram = TypeAdapter(ByteSize).validate_python(0) if container_data.setdefault("StatusCode", 127) == 0: for line in container_logs: - video_ram = parse_obj_as( - ByteSize, video_ram + parse_obj_as(ByteSize, line) + video_ram = TypeAdapter(ByteSize).validate_python( + video_ram + TypeAdapter(ByteSize).validate_python(line) ) - except asyncio.TimeoutError as err: + except TimeoutError as err: logger.warning( "num_gpus timedout while check-run %s: %s", spec_config, err ) diff --git a/services/dask-sidecar/tests/unit/conftest.py b/services/dask-sidecar/tests/unit/conftest.py index edc92c87969c..4d4801752d94 100644 --- a/services/dask-sidecar/tests/unit/conftest.py +++ b/services/dask-sidecar/tests/unit/conftest.py @@ -6,6 +6,7 @@ from collections.abc import AsyncIterator, Callable, Iterator from pathlib import Path from pprint import pformat +from typing import cast import dask import dask.config @@ -19,7 +20,7 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.users import UserID -from pydantic import AnyUrl, parse_obj_as +from pydantic import AnyUrl, TypeAdapter from pytest_localftpserver.servers import ProcessFTPServer from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict @@ -169,8 +170,7 @@ def s3_settings(mocked_s3_server_envs: None) -> S3Settings: @pytest.fixture def s3_endpoint_url(s3_settings: S3Settings) -> AnyUrl: assert s3_settings.S3_ENDPOINT - return parse_obj_as( - AnyUrl, + return TypeAdapter(AnyUrl).validate_python( f"{s3_settings.S3_ENDPOINT}", ) @@ -203,9 +203,7 @@ async def bucket( response = await aiobotocore_s3_client.list_buckets() assert response["Buckets"] assert len(response["Buckets"]) == 1 - bucket_name = response["Buckets"][0]["Name"] - return bucket_name - # await _clean_bucket_content(aiobotocore_s3_client, bucket_name) + return response["Buckets"][0]["Name"] @pytest.fixture @@ -214,7 +212,7 @@ def creator(file_path: Path | None = None) -> AnyUrl: file_path_with_bucket = Path(s3_settings.S3_BUCKET_NAME) / ( file_path or faker.file_name() ) - return parse_obj_as(AnyUrl, f"s3://{file_path_with_bucket}") + return TypeAdapter(AnyUrl).validate_python(f"s3://{file_path_with_bucket}") return creator @@ -230,7 +228,7 @@ def file_on_s3_server( def creator() -> AnyUrl: new_remote_file = s3_remote_file_url() - open_file = fsspec.open(new_remote_file, mode="wt", **s3_storage_kwargs) + open_file = fsspec.open(f"{new_remote_file}", mode="wt", **s3_storage_kwargs) with open_file as fp: fp.write( # type: ignore f"This is the file contents of file #'{(len(list_of_created_files)+1):03}'\n" @@ -245,7 +243,7 @@ def creator() -> AnyUrl: # cleanup fs = fsspec.filesystem("s3", **s3_storage_kwargs) for file in list_of_created_files: - fs.delete(file.partition(f"{file.scheme}://")[2]) + fs.delete(f"{file}".partition(f"{file.scheme}://")[2]) @pytest.fixture @@ -255,12 +253,12 @@ def job_id() -> str: @pytest.fixture def project_id(faker: Faker) -> ProjectID: - return faker.uuid4(cast_to=None) + return cast(ProjectID, faker.uuid4(cast_to=None)) @pytest.fixture def node_id(faker: Faker) -> NodeID: - return faker.uuid4(cast_to=None) + return cast(NodeID, faker.uuid4(cast_to=None)) @pytest.fixture(params=["no_parent_node", "with_parent_node"]) @@ -276,9 +274,13 @@ def task_owner( project_id=project_id, node_id=node_id, parent_project_id=( - None if request.param == "no_parent_node" else faker.uuid4(cast_to=None) + None + if request.param == "no_parent_node" + else cast(ProjectID, faker.uuid4(cast_to=None)) ), parent_node_id=( - None if request.param == "no_parent_node" else faker.uuid4(cast_to=None) + None + if request.param == "no_parent_node" + else cast(NodeID, faker.uuid4(cast_to=None)) ), ) diff --git a/services/dask-sidecar/tests/unit/test_cli.py b/services/dask-sidecar/tests/unit/test_cli.py index 4af796ec69bb..101b0e4bcdcd 100644 --- a/services/dask-sidecar/tests/unit/test_cli.py +++ b/services/dask-sidecar/tests/unit/test_cli.py @@ -29,5 +29,5 @@ def test_list_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): assert result.exit_code == os.EX_OK, result.output print(result.output) - settings = Settings.parse_raw(result.output) + settings = Settings.model_validate_json(result.output) assert settings == Settings.create_from_envs() diff --git a/services/dask-sidecar/tests/unit/test_dask_utils.py b/services/dask-sidecar/tests/unit/test_dask_utils.py index a12ee06e2113..214a95502009 100644 --- a/services/dask-sidecar/tests/unit/test_dask_utils.py +++ b/services/dask-sidecar/tests/unit/test_dask_utils.py @@ -52,7 +52,8 @@ def test_publish_event( # hence the long time out message = dask_sub.get(timeout=DASK_TESTING_TIMEOUT_S) assert message is not None - received_task_log_event = TaskLogEvent.parse_raw(message) # type: ignore + assert isinstance(message, str) + received_task_log_event = TaskLogEvent.model_validate_json(message) assert received_task_log_event == event_to_publish @@ -73,7 +74,7 @@ async def test_publish_event_async( assert isinstance(message, Coroutine) message = await message assert message is not None - received_task_log_event = TaskLogEvent.parse_raw(message) # type: ignore + received_task_log_event = TaskLogEvent.model_validate_json(message) assert received_task_log_event == event_to_publish diff --git a/services/dask-sidecar/tests/unit/test_docker_utils.py b/services/dask-sidecar/tests/unit/test_docker_utils.py index 41e801b70bb9..4bc154edd95d 100644 --- a/services/dask-sidecar/tests/unit/test_docker_utils.py +++ b/services/dask-sidecar/tests/unit/test_docker_utils.py @@ -91,7 +91,7 @@ async def test_create_container_config( envs=task_envs, labels=task_labels, ) - assert container_config.dict(by_alias=True) == ( + assert container_config.model_dump(by_alias=True) == ( { "Env": [ "INPUT_FOLDER=/inputs", @@ -221,7 +221,7 @@ async def test_managed_container_always_removes_container( call() .__aenter__() .containers.create( - container_config.dict(by_alias=True), name=None + container_config.model_dump(by_alias=True), name=None ), ] ) diff --git a/services/dask-sidecar/tests/unit/test_file_utils.py b/services/dask-sidecar/tests/unit/test_file_utils.py index 5c51f5f5b007..b31980b46a50 100644 --- a/services/dask-sidecar/tests/unit/test_file_utils.py +++ b/services/dask-sidecar/tests/unit/test_file_utils.py @@ -15,7 +15,7 @@ import fsspec import pytest from faker import Faker -from pydantic import AnyUrl, parse_obj_as +from pydantic import AnyUrl, TypeAdapter from pytest_localftpserver.servers import ProcessFTPServer from pytest_mock.plugin import MockerFixture from settings_library.s3 import S3Settings @@ -28,7 +28,6 @@ @pytest.fixture() async def mocked_log_publishing_cb( - event_loop: asyncio.AbstractEventLoop, mocker: MockerFixture, ) -> AsyncIterable[mock.AsyncMock]: async with mocker.AsyncMock() as mocked_callback: @@ -46,8 +45,8 @@ def s3_presigned_link_storage_kwargs(s3_settings: S3Settings) -> dict[str, Any]: @pytest.fixture def ftp_remote_file_url(ftpserver: ProcessFTPServer, faker: Faker) -> AnyUrl: - return parse_obj_as( - AnyUrl, f"{ftpserver.get_login_data(style='url')}/{faker.file_name()}" + return TypeAdapter(AnyUrl).validate_python( + f"{ftpserver.get_login_data(style='url')}/{faker.file_name()}" ) @@ -57,8 +56,7 @@ async def s3_presigned_link_remote_file_url( aiobotocore_s3_client, faker: Faker, ) -> AnyUrl: - return parse_obj_as( - AnyUrl, + return TypeAdapter(AnyUrl).validate_python( await aiobotocore_s3_client.generate_presigned_url( "put_object", Params={"Bucket": s3_settings.S3_BUCKET_NAME, "Key": faker.file_name()}, @@ -69,7 +67,9 @@ async def s3_presigned_link_remote_file_url( @pytest.fixture def s3_remote_file_url(s3_settings: S3Settings, faker: Faker) -> AnyUrl: - return parse_obj_as(AnyUrl, f"s3://{s3_settings.S3_BUCKET_NAME}{faker.file_path()}") + return TypeAdapter(AnyUrl).validate_python( + f"s3://{s3_settings.S3_BUCKET_NAME}{faker.file_path()}" + ) @dataclass(frozen=True) @@ -122,7 +122,7 @@ async def test_push_file_to_remote( with cast( fsspec.core.OpenFile, fsspec.open( - remote_parameters.remote_file_url, + f"{remote_parameters.remote_file_url}", mode="rt", **storage_kwargs, ), @@ -153,15 +153,14 @@ async def test_push_file_to_remote_s3_http_presigned_link( ) # check the remote is actually having the file in, but we need s3 access now - s3_remote_file_url = parse_obj_as( - AnyUrl, + s3_remote_file_url = TypeAdapter(AnyUrl).validate_python( f"s3:/{s3_presigned_link_remote_file_url.path}", ) storage_kwargs = _s3fs_settings_from_s3_settings(s3_settings) with cast( fsspec.core.OpenFile, - fsspec.open(s3_remote_file_url, mode="rt", **storage_kwargs), + fsspec.open(f"{s3_remote_file_url}", mode="rt", **storage_kwargs), ) as fp: assert fp.read() == TEXT_IN_FILE mocked_log_publishing_cb.assert_called() @@ -173,7 +172,9 @@ async def test_push_file_to_remote_compresses_if_zip_destination( faker: Faker, mocked_log_publishing_cb: mock.AsyncMock, ): - destination_url = parse_obj_as(AnyUrl, f"{remote_parameters.remote_file_url}.zip") + destination_url = TypeAdapter(AnyUrl).validate_python( + f"{remote_parameters.remote_file_url}.zip" + ) src_path = tmp_path / faker.file_name() TEXT_IN_FILE = faker.text() src_path.write_text(TEXT_IN_FILE) @@ -214,7 +215,7 @@ async def test_pull_file_from_remote( with cast( fsspec.core.OpenFile, fsspec.open( - remote_parameters.remote_file_url, + f"{remote_parameters.remote_file_url}", mode="wt", **storage_kwargs, ), @@ -250,7 +251,7 @@ async def test_pull_file_from_remote_s3_presigned_link( with cast( fsspec.core.OpenFile, fsspec.open( - s3_remote_file_url, + f"{s3_remote_file_url}", mode="wt", **storage_kwargs, ), @@ -259,8 +260,7 @@ async def test_pull_file_from_remote_s3_presigned_link( # create a corresponding presigned get link assert s3_remote_file_url.path - remote_file_url = parse_obj_as( - AnyUrl, + remote_file_url = TypeAdapter(AnyUrl).validate_python( await aiobotocore_s3_client.generate_presigned_url( "get_object", Params={ @@ -303,7 +303,9 @@ async def test_pull_compressed_zip_file_from_remote( zfp.write(local_test_file, local_test_file.name) file_names_within_zip_file.add(local_test_file.name) - destination_url = parse_obj_as(AnyUrl, f"{remote_parameters.remote_file_url}.zip") + destination_url = TypeAdapter(AnyUrl).validate_python( + f"{remote_parameters.remote_file_url}.zip" + ) storage_kwargs = {} if remote_parameters.s3_settings: storage_kwargs = _s3fs_settings_from_s3_settings(remote_parameters.s3_settings) @@ -311,7 +313,7 @@ async def test_pull_compressed_zip_file_from_remote( with cast( fsspec.core.OpenFile, fsspec.open( - destination_url, + f"{destination_url}", mode="wb", **storage_kwargs, ), @@ -395,8 +397,12 @@ async def test_push_file_to_remote_creates_reproducible_zip_archive( faker: Faker, mocked_log_publishing_cb: mock.AsyncMock, ): - destination_url1 = parse_obj_as(AnyUrl, f"{remote_parameters.remote_file_url}1.zip") - destination_url2 = parse_obj_as(AnyUrl, f"{remote_parameters.remote_file_url}2.zip") + destination_url1 = TypeAdapter(AnyUrl).validate_python( + f"{remote_parameters.remote_file_url}1.zip" + ) + destination_url2 = TypeAdapter(AnyUrl).validate_python( + f"{remote_parameters.remote_file_url}2.zip" + ) src_path = tmp_path / faker.file_name() TEXT_IN_FILE = faker.text() src_path.write_text(TEXT_IN_FILE) diff --git a/services/dask-sidecar/tests/unit/test_models.py b/services/dask-sidecar/tests/unit/test_models.py index 65ec5304631f..f9e80f67fa45 100644 --- a/services/dask-sidecar/tests/unit/test_models.py +++ b/services/dask-sidecar/tests/unit/test_models.py @@ -10,7 +10,7 @@ def test_container_host_config_sets_swap_same_as_memory_if_not_set(faker: Faker) instance = ContainerHostConfig( Binds=[faker.pystr() for _ in range(5)], Memory=ByteSize(faker.pyint()), - NanoCPUs=faker.pyfloat(min_value=0.1), + NanoCPUs=faker.pyint(min_value=1), ) assert instance.memory == instance.memory_swap @@ -22,7 +22,7 @@ def test_container_host_config_raises_if_set_negative( ContainerHostConfig( Binds=[faker.pystr() for _ in range(5)], Memory=ByteSize(faker.pyint(min_value=234)), - NanoCPUs=faker.pyfloat(min_value=0.1), + NanoCPUs=faker.pyint(min_value=1), MemorySwap=ByteSize(faker.pyint(min_value=-84654, max_value=-1)), ) @@ -34,14 +34,14 @@ def test_container_host_config_raises_if_set_smaller_than_memory( ContainerHostConfig( Binds=[faker.pystr() for _ in range(5)], Memory=ByteSize(faker.pyint(min_value=234)), - NanoCPUs=faker.pyfloat(min_value=0.1), + NanoCPUs=faker.pyint(min_value=1), MemorySwap=ByteSize(0), ) with pytest.raises(ValidationError): ContainerHostConfig( Binds=[faker.pystr() for _ in range(5)], Memory=ByteSize(faker.pyint(min_value=234)), - NanoCPUs=faker.pyfloat(min_value=0.1), + NanoCPUs=faker.pyint(min_value=1), MemorySwap=ByteSize(faker.pyint(min_value=1, max_value=233)), ) @@ -52,7 +52,7 @@ def test_container_host_config_sets_swap_if_set_bigger_than_memory( instance = ContainerHostConfig( Binds=[faker.pystr() for _ in range(5)], Memory=ByteSize(faker.pyint(min_value=234, max_value=434234)), - NanoCPUs=faker.pyfloat(min_value=0.1), + NanoCPUs=faker.pyint(min_value=1), MemorySwap=ByteSize(faker.pyint(min_value=434235, max_value=12343424234)), ) assert instance.memory_swap diff --git a/services/dask-sidecar/tests/unit/test_tasks.py b/services/dask-sidecar/tests/unit/test_tasks.py index 4aff3a1fd3d3..5beebe2e37fb 100644 --- a/services/dask-sidecar/tests/unit/test_tasks.py +++ b/services/dask-sidecar/tests/unit/test_tasks.py @@ -41,7 +41,7 @@ from models_library.services import ServiceMetaDataPublished from models_library.services_resources import BootMode from packaging import version -from pydantic import AnyUrl, SecretStr, parse_obj_as +from pydantic import AnyUrl, SecretStr, TypeAdapter from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from settings_library.s3 import S3Settings @@ -178,7 +178,9 @@ def integration_version(request: pytest.FixtureRequest) -> version.Version: @pytest.fixture def additional_envs(faker: Faker) -> dict[EnvVarKey, str]: - return parse_obj_as(dict[EnvVarKey, str], faker.pydict(allowed_types=(str,))) + return TypeAdapter(dict[EnvVarKey, str]).validate_python( + faker.pydict(allowed_types=(str,)) + ) @pytest.fixture @@ -198,7 +200,7 @@ def sleeper_task( list_of_files = [file_on_s3_server() for _ in range(NUM_FILES)] # defines the inputs of the task - input_data = TaskInputData.parse_obj( + input_data = TaskInputData.model_validate( { "input_1": 23, "input_23": "a string input", @@ -276,7 +278,7 @@ def sleeper_task( "pytest_bool": False, } output_file_url = s3_remote_file_url(file_path="output_file") - expected_output_keys = TaskOutputDataSchema.parse_obj( + expected_output_keys = TaskOutputDataSchema.model_validate( { **( {k: {"required": True} for k in jsonable_outputs} @@ -295,7 +297,7 @@ def sleeper_task( ), } ) - expected_output_data = TaskOutputData.parse_obj( + expected_output_data = TaskOutputData.model_validate( { **( jsonable_outputs @@ -395,10 +397,10 @@ def _creator(command: list[str] | None = None) -> ServiceExampleParam: service_version="latest", command=command or ["/bin/bash", "-c", "echo 'hello I'm an empty ubuntu task!"], - input_data=TaskInputData.parse_obj({}), - output_data_keys=TaskOutputDataSchema.parse_obj({}), + input_data=TaskInputData.model_validate({}), + output_data_keys=TaskOutputDataSchema.model_validate({}), log_file_url=s3_remote_file_url(file_path="log.dat"), - expected_output_data=TaskOutputData.parse_obj({}), + expected_output_data=TaskOutputData.model_validate({}), expected_logs=[], integration_version=integration_version, task_envs={}, @@ -433,12 +435,16 @@ def caplog_info_level( yield caplog +# from pydantic.json_schema import JsonDict + + @pytest.fixture def mocked_get_image_labels( integration_version: version.Version, mocker: MockerFixture ) -> mock.Mock: - labels: ImageLabels = parse_obj_as( - ImageLabels, ServiceMetaDataPublished.Config.schema_extra["examples"][0] + assert "json_schema_extra" in ServiceMetaDataPublished.model_config + labels: ImageLabels = TypeAdapter(ImageLabels).validate_python( + ServiceMetaDataPublished.model_config["json_schema_extra"]["examples"][0], ) labels.integration_version = f"{integration_version}" return mocker.patch( @@ -580,7 +586,8 @@ async def test_run_computational_sidecar_dask( # check that the task produces expected logs worker_progresses = [ - TaskProgressEvent.parse_raw(msg).progress for msg in progress_sub.buffer + TaskProgressEvent.model_validate_json(msg).progress + for msg in progress_sub.buffer ] # check ordering assert worker_progresses == sorted( @@ -588,7 +595,7 @@ async def test_run_computational_sidecar_dask( ), "ordering of progress values incorrectly sorted!" assert worker_progresses[0] == 0, "missing/incorrect initial progress value" assert worker_progresses[-1] == 1, "missing/incorrect final progress value" - worker_logs = [TaskLogEvent.parse_raw(msg).log for msg in log_sub.buffer] + worker_logs = [TaskLogEvent.model_validate_json(msg).log for msg in log_sub.buffer] print(f"<-- we got {len(worker_logs)} lines of logs") for log in sleeper_task.expected_logs: @@ -649,7 +656,8 @@ async def test_run_computational_sidecar_dask_does_not_lose_messages_with_pubsub # check that the task produces expected logs worker_progresses = [ - TaskProgressEvent.parse_raw(msg).progress for msg in progress_sub.buffer + TaskProgressEvent.model_validate_json(msg).progress + for msg in progress_sub.buffer ] # check length assert len(worker_progresses) == len( @@ -659,7 +667,7 @@ async def test_run_computational_sidecar_dask_does_not_lose_messages_with_pubsub assert worker_progresses[0] == 0, "missing/incorrect initial progress value" assert worker_progresses[-1] == 1, "missing/incorrect final progress value" - worker_logs = [TaskLogEvent.parse_raw(msg).log for msg in log_sub.buffer] + worker_logs = [TaskLogEvent.model_validate_json(msg).log for msg in log_sub.buffer] # check all the awaited logs are in there filtered_worker_logs = filter(lambda log: "This is iteration" in log, worker_logs) assert len(list(filtered_worker_logs)) == NUMBER_OF_LOGS diff --git a/services/dask-sidecar/tests/unit/test_utils.py b/services/dask-sidecar/tests/unit/test_utils.py index 5ee6f9156e54..f3d162952ff8 100644 --- a/services/dask-sidecar/tests/unit/test_utils.py +++ b/services/dask-sidecar/tests/unit/test_utils.py @@ -13,12 +13,11 @@ from simcore_service_dask_sidecar.utils import num_available_gpus -@pytest.fixture(scope="function") +@pytest.fixture def mock_aiodocker(mocker: MockerFixture) -> mock.MagicMock: - mock_docker = mocker.patch( + return mocker.patch( "simcore_service_dask_sidecar.utils.aiodocker.Docker", autospec=True ) - return mock_docker def test_num_available_gpus_returns_0_when_container_not_created( @@ -74,7 +73,7 @@ def test_num_available_gpus_returns_0_when_container_wait_timesout( mock_aiodocker: mock.MagicMock, ): mock_aiodocker.return_value.__aenter__.return_value.containers.run.return_value.wait.side_effect = ( - asyncio.TimeoutError() + TimeoutError() ) assert num_available_gpus() == 0 @@ -91,6 +90,9 @@ def test_num_available_gpus( mock_aiodocker: mock.MagicMock, ): # default with mock should return 0 gpus + mock_aiodocker.return_value.__aenter__.return_value.containers.run.return_value.wait.return_value = { + "StatusCode": 0 + } assert num_available_gpus() == 0 # add the correct log diff --git a/services/datcore-adapter/requirements/_base.in b/services/datcore-adapter/requirements/_base.in index de131dd64305..791d139123ad 100644 --- a/services/datcore-adapter/requirements/_base.in +++ b/services/datcore-adapter/requirements/_base.in @@ -4,6 +4,7 @@ # NOTE: ALL version constraints MUST be commented --constraint ../../../requirements/constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in # service-library[fastapi] @@ -17,6 +18,6 @@ aiofiles fastapi fastapi-pagination httpx[http2] -pydantic[email] +pydantic python-multipart # for fastapi multipart uploads uvicorn[standard] diff --git a/services/datcore-adapter/requirements/_base.txt b/services/datcore-adapter/requirements/_base.txt index 9cb77e4a359d..b572351f2e2f 100644 --- a/services/datcore-adapter/requirements/_base.txt +++ b/services/datcore-adapter/requirements/_base.txt @@ -14,10 +14,16 @@ aiofiles==23.2.1 # -r requirements/_base.in aiohttp==3.9.3 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # aiodocker @@ -25,6 +31,8 @@ aiormq==6.8.0 # via aio-pika aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -52,10 +60,16 @@ botocore==1.34.75 # s3transfer certifi==2024.2.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # httpcore @@ -79,19 +93,12 @@ email-validator==2.1.1 # via pydantic fast-depends==2.4.12 # via faststream -fastapi==0.99.1 +fastapi==0.115.3 # via - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in - # fastapi-pagination # prometheus-fastapi-instrumentator -fastapi-pagination==0.12.21 +fastapi-pagination==0.12.31 # via -r requirements/_base.in faststream==0.5.10 # via -r requirements/../../../packages/service-library/requirements/_base.in @@ -119,10 +126,16 @@ httptools==0.6.1 # via uvicorn httpx==0.27.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in @@ -211,10 +224,16 @@ opentelemetry-util-http==0.47b0 # opentelemetry-instrumentation-requests orjson==3.10.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in @@ -233,24 +252,49 @@ protobuf==4.25.4 # opentelemetry-proto psutil==6.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -pydantic==1.10.14 +pydantic==2.9.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fast-depends # fastapi # fastapi-pagination + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.6.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.17.2 # via rich pyinstrument==4.6.2 @@ -260,25 +304,39 @@ python-dateutil==2.9.0.post0 # arrow # botocore python-dotenv==1.0.1 - # via uvicorn + # via + # pydantic-settings + # uvicorn python-multipart==0.0.9 # via -r requirements/_base.in pyyaml==6.0.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_base.in # uvicorn redis==5.0.4 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_base.in @@ -312,12 +370,18 @@ sniffio==1.3.1 # via # anyio # httpx -starlette==0.27.0 +starlette==0.41.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # fastapi @@ -343,13 +407,20 @@ typing-extensions==4.10.0 # faststream # opentelemetry-sdk # pydantic + # pydantic-core # typer urllib3==2.2.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # botocore diff --git a/services/datcore-adapter/requirements/ci.txt b/services/datcore-adapter/requirements/ci.txt index 8d9e5ba16b92..95484d40524b 100644 --- a/services/datcore-adapter/requirements/ci.txt +++ b/services/datcore-adapter/requirements/ci.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +simcore-common-library @ ../../packages/common-library simcore-models-library @ ../../packages/models-library pytest-simcore @ ../../packages/pytest-simcore simcore-service-library[fastapi] @ ../../packages/service-library diff --git a/services/datcore-adapter/requirements/dev.txt b/services/datcore-adapter/requirements/dev.txt index 73afce79c614..04e2ca590251 100644 --- a/services/datcore-adapter/requirements/dev.txt +++ b/services/datcore-adapter/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/pytest-simcore --editable ../../packages/service-library[fastapi] diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/_meta.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/_meta.py index 673b3bec726e..db004a8a9d3b 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/_meta.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/_meta.py @@ -6,12 +6,12 @@ from typing import Final from models_library.basic_types import VersionStr -from pydantic import parse_obj_as +from pydantic import TypeAdapter current_distribution = distribution("simcore_service_datcore_adapter") __version__ = version("simcore_service_datcore_adapter") -API_VERSION: Final[VersionStr] = parse_obj_as(VersionStr, __version__) +API_VERSION: Final[VersionStr] = TypeAdapter(VersionStr).validate_python(__version__) MAJOR, MINOR, PATCH = __version__.split(".") API_VTAG: Final[str] = f"v{MAJOR}" APP_NAME: Final[str] = current_distribution.metadata["Name"] diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/http_error.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/http_error.py index 6b8dcd0796e6..bcf8cdec9c65 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/http_error.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/http_error.py @@ -1,4 +1,4 @@ -from typing import Callable, Optional +from typing import Callable from fastapi import HTTPException from fastapi.encoders import jsonable_encoder @@ -6,7 +6,8 @@ from starlette.responses import JSONResponse -async def http_error_handler(_: Request, exc: HTTPException) -> JSONResponse: +async def http_error_handler(_: Request, exc: Exception) -> JSONResponse: + assert isinstance(exc, HTTPException) # nosec return JSONResponse( content=jsonable_encoder({"errors": [exc.detail]}), status_code=exc.status_code ) @@ -16,7 +17,7 @@ def make_http_error_handler_for_exception( status_code: int, exception_cls: type[BaseException], *, - override_detail_message: Optional[str] = None, + override_detail_message: str | None = None, ) -> Callable: """ Produces a handler for BaseException-type exceptions which converts them diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/pennsieve_error.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/pennsieve_error.py index 79c16ebaa63d..c1101961b34a 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/pennsieve_error.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/pennsieve_error.py @@ -7,8 +7,11 @@ async def botocore_exceptions_handler( _: Request, - exc: ClientError, + exc: Exception, ) -> JSONResponse: + assert isinstance(exc, ClientError) # nosec + assert "Error" in exc.response # nosec + assert "Code" in exc.response["Error"] # nosec if exc.response["Error"]["Code"] == "NotAuthorizedException": return JSONResponse( content=jsonable_encoder({"errors": exc.response["Error"]}), diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/validation_error.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/validation_error.py index fb70f6791ac9..3770d62cb23d 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/validation_error.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/errors/validation_error.py @@ -1,5 +1,3 @@ -from typing import Union - from fastapi.encoders import jsonable_encoder from fastapi.exceptions import RequestValidationError from fastapi.openapi.constants import REF_PREFIX @@ -12,8 +10,9 @@ async def http422_error_handler( _: Request, - exc: Union[RequestValidationError, ValidationError], + exc: Exception, ) -> JSONResponse: + assert isinstance(exc, RequestValidationError | ValidationError) # nosec return JSONResponse( content=jsonable_encoder({"errors": exc.errors()}), status_code=HTTP_422_UNPROCESSABLE_ENTITY, diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/files.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/files.py index 44fdfb965991..2234c17d3dc1 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/files.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/files.py @@ -2,7 +2,7 @@ from typing import Annotated, Any from fastapi import APIRouter, Depends, Header, Request -from pydantic import AnyUrl, parse_obj_as +from pydantic import AnyUrl, TypeAdapter from servicelib.fastapi.requests_decorators import cancel_on_disconnect from starlette import status @@ -34,7 +34,9 @@ async def download_file( api_secret=x_datcore_api_secret, package_id=file_id, ) - return FileDownloadOut(link=parse_obj_as(AnyUrl, f"{presigned_download_link}")) + return FileDownloadOut( + link=TypeAdapter(AnyUrl).validate_python(f"{presigned_download_link}") + ) @router.delete( diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py index 8c4fb44e8e91..7d308fbd3b1a 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py @@ -50,7 +50,7 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: for name in NOISY_LOGGERS: logging.getLogger(name).setLevel(quiet_level) - logger.debug("App settings:\n%s", settings.json(indent=2)) + logger.debug("App settings:\n%s", settings.model_dump_json(indent=2)) app = FastAPI( debug=settings.debug, diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py index 68e879807abd..784ee5bc7938 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py @@ -1,7 +1,7 @@ from functools import cached_property from models_library.basic_types import BootModeEnum, LogLevel -from pydantic import Field, parse_obj_as, validator +from pydantic import AliasChoices, Field, TypeAdapter, field_validator from pydantic.networks import AnyUrl from settings_library.base import BaseCustomSettings from settings_library.tracing import TracingSettings @@ -11,7 +11,9 @@ class PennsieveSettings(BaseCustomSettings): PENNSIEVE_ENABLED: bool = True - PENNSIEVE_API_URL: AnyUrl = parse_obj_as(AnyUrl, "https://api.pennsieve.io") + PENNSIEVE_API_URL: AnyUrl = TypeAdapter(AnyUrl).validate_python( + "https://api.pennsieve.io" + ) PENNSIEVE_API_GENERAL_TIMEOUT: float = 20.0 PENNSIEVE_HEALTCHCHECK_TIMEOUT: float = 1.0 @@ -21,28 +23,31 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): SC_BOOT_MODE: BootModeEnum | None LOG_LEVEL: LogLevel = Field( - LogLevel.INFO.value, - env=[ + default=LogLevel.INFO.value, + validation_alias=AliasChoices( "DATCORE_ADAPTER_LOGLEVEL", "DATCORE_ADAPTER_LOG_LEVEL", "LOG_LEVEL", "LOGLEVEL", - ], + ), ) - PENNSIEVE: PennsieveSettings = Field(auto_default_from_env=True) + PENNSIEVE: PennsieveSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( - False, - env=[ + default=False, + validation_alias=AliasChoices( "DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) DATCORE_ADAPTER_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True DATCORE_ADAPTER_TRACING: TracingSettings | None = Field( - auto_default_from_env=True, description="settings for opentelemetry tracing" + description="settings for opentelemetry tracing", + json_schema_extra={"auto_default_from_env": True}, ) @cached_property @@ -54,7 +59,7 @@ def debug(self) -> bool: BootModeEnum.LOCAL, ] - @validator("LOG_LEVEL", pre=True) + @field_validator("LOG_LEVEL", mode="before") @classmethod - def _validate_loglevel(cls, value) -> str: + def _validate_loglevel(cls, value: str) -> str: return cls.validate_log_level(value) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/schemas/datasets.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/models/schemas/datasets.py index 4d5190c5512c..5a10a88dfcb1 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/models/schemas/datasets.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/models/schemas/datasets.py @@ -45,14 +45,16 @@ def from_pennsieve_package( return cls( dataset_id=package["content"]["datasetNodeId"], package_id=package["content"]["nodeId"], - id=package["content"]["id"], + id=f"{package['content']['id']}", name=pck_name, path=base_path / pck_name, type=package["content"]["packageType"], size=file_size, created_at=package["content"]["createdAt"], last_modified_at=package["content"]["updatedAt"], - data_type=DataType.FOLDER - if package["content"]["packageType"] == "Collection" - else DataType.FILE, + data_type=( + DataType.FOLDER + if package["content"]["packageType"] == "Collection" + else DataType.FILE + ), ) diff --git a/services/datcore-adapter/tests/unit/conftest.py b/services/datcore-adapter/tests/unit/conftest.py index e4fa08204f5c..0130fda027a5 100644 --- a/services/datcore-adapter/tests/unit/conftest.py +++ b/services/datcore-adapter/tests/unit/conftest.py @@ -3,8 +3,9 @@ # pylint:disable=redefined-outer-name import json +from collections.abc import AsyncIterator, Callable from pathlib import Path -from typing import Any, AsyncIterator, Callable +from typing import Any from uuid import uuid4 import faker @@ -14,7 +15,9 @@ import simcore_service_datcore_adapter from asgi_lifespan import LifespanManager from fastapi.applications import FastAPI +from models_library.basic_types import BootModeEnum from pytest_mock import MockFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from simcore_service_datcore_adapter.modules.pennsieve import ( PennsieveAuthorizationHeaders, ) @@ -61,7 +64,9 @@ def pennsieve_mock_dataset_packages(mocks_dir: Path) -> dict[str, Any]: @pytest.fixture() -def minimal_app() -> FastAPI: +def minimal_app( + app_envs: None, +) -> FastAPI: from simcore_service_datcore_adapter.main import the_app return the_app @@ -76,7 +81,7 @@ def client(minimal_app: FastAPI) -> TestClient: @pytest.fixture def app_envs(monkeypatch: pytest.MonkeyPatch): # disable tracing as together with LifespanManager, it does not remove itself nicely - ... + return setenvs_from_dict(monkeypatch, {"SC_BOOT_MODE": BootModeEnum.DEBUG}) @pytest.fixture() @@ -87,7 +92,7 @@ async def initialized_app( yield minimal_app -@pytest.fixture(scope="function") +@pytest.fixture async def async_client(initialized_app: FastAPI) -> AsyncIterator[httpx.AsyncClient]: async with httpx.AsyncClient( app=initialized_app, @@ -215,14 +220,13 @@ def pennsieve_api_headers( def pennsieve_random_fake_datasets( create_pennsieve_fake_dataset_id: Callable, ) -> dict[str, Any]: - datasets = { + return { "datasets": [ {"content": {"id": create_pennsieve_fake_dataset_id(), "name": fake.text()}} for _ in range(10) ], "totalCount": 20, } - return datasets @pytest.fixture diff --git a/services/datcore-adapter/tests/unit/test_route_datasets.py b/services/datcore-adapter/tests/unit/test_route_datasets.py index 2c9c98b20f40..2a0d7dc85d6f 100644 --- a/services/datcore-adapter/tests/unit/test_route_datasets.py +++ b/services/datcore-adapter/tests/unit/test_route_datasets.py @@ -3,12 +3,10 @@ # pylint:disable=redefined-outer-name -from typing import Optional - import httpx import respx from fastapi_pagination import Page -from pydantic import parse_obj_as +from pydantic import TypeAdapter from simcore_service_datcore_adapter.models.schemas.datasets import ( DatasetMetaData, FileMetaData, @@ -18,7 +16,7 @@ async def test_list_datasets_entrypoint( async_client: httpx.AsyncClient, - pennsieve_subsystem_mock: Optional[respx.MockRouter], + pennsieve_subsystem_mock: respx.MockRouter | None, pennsieve_api_headers: dict[str, str], ): response = await async_client.get( @@ -29,7 +27,7 @@ async def test_list_datasets_entrypoint( assert response.status_code == status.HTTP_200_OK data = response.json() assert data - parse_obj_as(Page[DatasetMetaData], data) + TypeAdapter(Page[DatasetMetaData]).validate_python(data) async def test_list_dataset_files_legacy_entrypoint( @@ -47,7 +45,7 @@ async def test_list_dataset_files_legacy_entrypoint( assert response.status_code == status.HTTP_200_OK data = response.json() assert data - parse_obj_as(list[FileMetaData], data) + TypeAdapter(list[FileMetaData]).validate_python(data) async def test_list_dataset_top_level_files_entrypoint( @@ -65,7 +63,7 @@ async def test_list_dataset_top_level_files_entrypoint( assert response.status_code == status.HTTP_200_OK data = response.json() assert data - parse_obj_as(Page[FileMetaData], data) + TypeAdapter(Page[FileMetaData]).validate_python(data) async def test_list_dataset_collection_files_entrypoint( @@ -85,4 +83,4 @@ async def test_list_dataset_collection_files_entrypoint( assert response.status_code == status.HTTP_200_OK data = response.json() assert data - parse_obj_as(Page[FileMetaData], data) + TypeAdapter(Page[FileMetaData]).validate_python(data) diff --git a/services/datcore-adapter/tests/unit/test_route_files.py b/services/datcore-adapter/tests/unit/test_route_files.py index 840a7edf79f7..cbaa09704faf 100644 --- a/services/datcore-adapter/tests/unit/test_route_files.py +++ b/services/datcore-adapter/tests/unit/test_route_files.py @@ -5,7 +5,7 @@ from unittest.mock import Mock import httpx -from pydantic import parse_obj_as +from pydantic import TypeAdapter from simcore_service_datcore_adapter.models.domains.files import FileDownloadOut from starlette import status @@ -23,7 +23,7 @@ async def test_download_file_entrypoint( assert response.status_code == status.HTTP_200_OK data = response.json() assert data - parse_obj_as(FileDownloadOut, data) + TypeAdapter(FileDownloadOut).validate_python(data) async def test_delete_file_entrypoint( diff --git a/services/datcore-adapter/tests/unit/test_route_health.py b/services/datcore-adapter/tests/unit/test_route_health.py index 7ab697612c41..3f0b1712f7ea 100644 --- a/services/datcore-adapter/tests/unit/test_route_health.py +++ b/services/datcore-adapter/tests/unit/test_route_health.py @@ -31,7 +31,7 @@ async def test_check_subsystem_health(async_client: httpx.AsyncClient): assert pennsieve_health_route.called assert response.status_code == status.HTTP_200_OK - app_status = AppStatusCheck.parse_obj(response.json()) + app_status = AppStatusCheck.model_validate(response.json()) assert app_status assert app_status.app_name == "simcore-service-datcore-adapter" assert app_status.services == {"pennsieve": True} @@ -43,7 +43,7 @@ async def test_check_subsystem_health(async_client: httpx.AsyncClient): assert pennsieve_health_route.called assert response.status_code == status.HTTP_200_OK - app_status = AppStatusCheck.parse_obj(response.json()) + app_status = AppStatusCheck.model_validate(response.json()) assert app_status assert app_status.app_name == "simcore-service-datcore-adapter" assert app_status.services == {"pennsieve": False} diff --git a/services/director-v2/requirements/_base.in b/services/director-v2/requirements/_base.in index 2198739ef709..dc173e2c2b68 100644 --- a/services/director-v2/requirements/_base.in +++ b/services/director-v2/requirements/_base.in @@ -6,6 +6,7 @@ --constraint ./constraints.txt # NOTE: Make sure they are added in setup.install_requires +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/dask-task-models-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in diff --git a/services/director-v2/requirements/ci.txt b/services/director-v2/requirements/ci.txt index f2d316f73fd6..17eacb4cfdaa 100644 --- a/services/director-v2/requirements/ci.txt +++ b/services/director-v2/requirements/ci.txt @@ -13,6 +13,7 @@ --requirement _tools.txt # installs this repo's packages +simcore-common-library @ ../../packages/common-library/ simcore-dask-task-models-library @ ../../packages/dask-task-models-library/ simcore-models-library @ ../../packages/models-library simcore-postgres-database @ ../../packages/postgres-database diff --git a/services/director-v2/requirements/dev.txt b/services/director-v2/requirements/dev.txt index 6d932514ae94..f183201fd55a 100644 --- a/services/director-v2/requirements/dev.txt +++ b/services/director-v2/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library/ --editable ../../packages/dask-task-models-library/ --editable ../../packages/models-library --editable ../../packages/postgres-database/ diff --git a/services/director-v2/src/simcore_service_director_v2/core/application.py b/services/director-v2/src/simcore_service_director_v2/core/application.py index 330717e60624..3f9044486963 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/application.py +++ b/services/director-v2/src/simcore_service_director_v2/core/application.py @@ -115,7 +115,7 @@ def create_base_app(settings: AppSettings | None = None) -> FastAPI: config_all_loggers( log_format_local_dev_enabled=settings.DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED ) - _logger.debug(settings.json(indent=2)) + _logger.debug(settings.model_dump_json(indent=2)) # keep mostly quiet noisy loggers quiet_level: int = max( diff --git a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/scheduler.py b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/scheduler.py index 831c7df2f185..74810cdd101b 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/scheduler.py @@ -1,6 +1,7 @@ from datetime import timedelta from typing import Final +from common_library.pydantic_validators import validate_numeric_string_as_timedelta from models_library.projects_networks import DockerNetworkName from pydantic import Field, NonNegativeInt, PositiveFloat from settings_library.base import BaseCustomSettings @@ -166,3 +167,12 @@ class DynamicServicesSchedulerSettings(BaseCustomSettings): DIRECTOR_V2_DYNAMIC_SIDECAR_SLEEP_AFTER_CONTAINER_REMOVAL: timedelta = Field( timedelta(0), description="time to sleep before removing a container" ) + + _validate_director_v2_dynamic_scheduler_interval = ( + validate_numeric_string_as_timedelta("DIRECTOR_V2_DYNAMIC_SCHEDULER_INTERVAL") + ) + _validate_director_v2_dynamic_sidecar_sleep_after_container_removal = ( + validate_numeric_string_as_timedelta( + "DIRECTOR_V2_DYNAMIC_SIDECAR_SLEEP_AFTER_CONTAINER_REMOVAL" + ) + ) diff --git a/services/director-v2/src/simcore_service_director_v2/core/settings.py b/services/director-v2/src/simcore_service_director_v2/core/settings.py index d495dd4aeef2..3c63028747b9 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/core/settings.py @@ -5,6 +5,7 @@ import datetime from functools import cached_property +from common_library.pydantic_validators import validate_numeric_string_as_timedelta from models_library.basic_types import ( BootModeEnum, BuildTargetEnum, @@ -234,3 +235,7 @@ class AppSettings(BaseCustomSettings, MixinLoggingSettings): def _validate_loglevel(cls, value: str) -> str: log_level: str = cls.validate_log_level(value) return log_level + + _validate_service_tracking_heartbeat = validate_numeric_string_as_timedelta( + "SERVICE_TRACKING_HEARTBEAT" + ) diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py index bd75682dec28..6403be5a78ed 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py @@ -282,9 +282,12 @@ async def test_put_volumes( "post_containers_tasks", "/containers", { - "metrics_params": parse_obj_as( - CreateServiceMetricsAdditionalParams, - CreateServiceMetricsAdditionalParams.Config.schema_extra["example"], + "metrics_params": TypeAdapter( + CreateServiceMetricsAdditionalParams + ).validate_python( + CreateServiceMetricsAdditionalParams.model_config[ + "json_schema_extra" + ]["example"], ) }, id="post_containers_tasks", diff --git a/services/dynamic-scheduler/requirements/_base.in b/services/dynamic-scheduler/requirements/_base.in index ab95aec0daa5..ceb76bbb30f2 100644 --- a/services/dynamic-scheduler/requirements/_base.in +++ b/services/dynamic-scheduler/requirements/_base.in @@ -6,6 +6,7 @@ --constraint ./constraints.txt # intra-repo required dependencies +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in diff --git a/services/dynamic-scheduler/requirements/_base.txt b/services/dynamic-scheduler/requirements/_base.txt index f088dd12890b..495550bffb1d 100644 --- a/services/dynamic-scheduler/requirements/_base.txt +++ b/services/dynamic-scheduler/requirements/_base.txt @@ -1,30 +1,41 @@ -aio-pika==9.4.1 +aio-pika==9.4.3 # via -r requirements/../../../packages/service-library/requirements/_base.in -aiocache==0.12.2 +aiocache==0.12.3 # via -r requirements/../../../packages/service-library/requirements/_base.in aiodebug==2.3.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -aiodocker==0.21.0 +aiodocker==0.23.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -aiofiles==23.2.1 +aiofiles==24.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -aiohttp==3.9.3 +aiohappyeyeballs==2.4.3 + # via aiohttp +aiohttp==3.10.10 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # aiodocker -aiormq==6.8.0 +aiormq==6.8.1 # via aio-pika aiosignal==1.3.1 # via aiohttp -alembic==1.13.1 +alembic==1.13.3 # via -r requirements/../../../packages/postgres-database/requirements/_base.in -anyio==4.3.0 +annotated-types==0.7.0 + # via pydantic +anyio==4.6.2.post1 # via # fast-depends # faststream @@ -43,26 +54,33 @@ async-timeout==4.0.3 # via asyncpg asyncpg==0.29.0 # via sqlalchemy -attrs==23.2.0 +attrs==24.2.0 # via # aiohttp # jsonschema # referencing bidict==0.23.1 # via python-socketio -certifi==2024.2.2 +certifi==2024.8.30 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # httpcore # httpx # requests -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 # via requests click==8.1.7 # via @@ -74,25 +92,17 @@ deprecated==1.2.14 # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http # opentelemetry-semantic-conventions -dnspython==2.6.1 +dnspython==2.7.0 # via email-validator -email-validator==2.1.1 +email-validator==2.2.0 # via pydantic fast-depends==2.4.12 # via faststream -fastapi==0.99.1 +fastapi==0.115.2 # via - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in - # prometheus-fastapi-instrumentator -faststream==0.5.10 +faststream==0.5.27 # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.4.1 # via @@ -102,66 +112,80 @@ googleapis-common-protos==1.65.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -greenlet==3.0.3 +greenlet==3.1.1 # via sqlalchemy -grpcio==1.66.0 +grpcio==1.67.0 # via opentelemetry-exporter-otlp-proto-grpc h11==0.14.0 # via # httpcore # uvicorn # wsproto -httpcore==1.0.5 +httpcore==1.0.6 # via httpx -httptools==0.6.1 +httptools==0.6.2 # via uvicorn -httpx==0.27.0 +httpx==0.27.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in -idna==3.6 +idna==3.10 # via # anyio # email-validator # httpx # requests # yarl -importlib-metadata==8.0.0 +importlib-metadata==8.4.0 # via opentelemetry-api -jsonschema==4.21.1 +jsonschema==4.23.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2023.7.1 # via jsonschema -mako==1.3.2 +mako==1.3.5 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # alembic markdown-it-py==3.0.0 # via rich -markupsafe==2.1.5 +markupsafe==3.0.1 # via mako mdurl==0.1.2 # via markdown-it-py -multidict==6.0.5 +multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.26.0 +opentelemetry-api==1.27.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -172,125 +196,177 @@ opentelemetry-api==1.26.0 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.26.0 +opentelemetry-exporter-otlp==1.27.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.26.0 +opentelemetry-exporter-otlp-proto-common==1.27.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.26.0 +opentelemetry-exporter-otlp-proto-grpc==1.27.0 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.26.0 +opentelemetry-exporter-otlp-proto-http==1.27.0 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.47b0 +opentelemetry-instrumentation==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-asgi==0.47b0 +opentelemetry-instrumentation-asgi==0.48b0 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-fastapi==0.47b0 +opentelemetry-instrumentation-fastapi==0.48b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-requests==0.47b0 +opentelemetry-instrumentation-requests==0.48b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.26.0 +opentelemetry-proto==1.27.0 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.26.0 +opentelemetry-sdk==1.27.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.47b0 +opentelemetry-semantic-conventions==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.47b0 +opentelemetry-util-http==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-requests -orjson==3.10.0 +orjson==3.10.7 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in -packaging==24.0 +packaging==24.1 # via -r requirements/_base.in pamqp==3.3.0 # via aiormq -prometheus-client==0.20.0 +prometheus-client==0.21.0 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # prometheus-fastapi-instrumentator -prometheus-fastapi-instrumentator==6.1.0 +prometheus-fastapi-instrumentator==7.0.0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -protobuf==4.25.4 +propcache==0.2.0 + # via yarl +protobuf==4.25.5 # via # googleapis-common-protos # opentelemetry-proto psutil==6.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -psycopg2-binary==2.9.9 +psycopg2-binary==2.9.10 # via sqlalchemy -pydantic==1.10.15 +pydantic==2.9.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/postgres-database/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi -pygments==2.17.2 + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in +pygments==2.18.0 # via rich -pyinstrument==4.6.2 +pyinstrument==5.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in python-dateutil==2.9.0.post0 # via arrow python-dotenv==1.0.1 - # via uvicorn -python-engineio==4.9.1 + # via + # pydantic-settings + # uvicorn +python-engineio==4.10.1 # via python-socketio -python-socketio==5.11.2 +python-socketio==5.11.4 # via -r requirements/_base.in -pyyaml==6.0.1 +pyyaml==6.0.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_base.in # uvicorn -redis==5.0.4 +redis==5.1.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_base.in @@ -303,20 +379,20 @@ repro-zipfile==0.3.1 # via -r requirements/../../../packages/service-library/requirements/_base.in requests==2.32.3 # via opentelemetry-exporter-otlp-proto-http -rich==13.7.1 +rich==13.9.2 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # typer -rpds-py==0.18.0 +rpds-py==0.20.0 # via # jsonschema # referencing -setuptools==74.0.0 +setuptools==75.2.0 # via opentelemetry-instrumentation shellingham==1.5.4 # via typer -simple-websocket==1.0.0 +simple-websocket==1.1.0 # via python-engineio six==1.16.0 # via python-dateutil @@ -324,70 +400,91 @@ sniffio==1.3.1 # via # anyio # httpx -sqlalchemy==1.4.52 +sqlalchemy==1.4.54 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # alembic -starlette==0.27.0 +starlette==0.40.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # fastapi -tenacity==8.5.0 + # prometheus-fastapi-instrumentator +tenacity==9.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -toolz==0.12.1 +toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -tqdm==4.66.2 +tqdm==4.66.5 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.12.3 +typer==0.12.5 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in - # faststream -types-python-dateutil==2.9.0.20240316 +types-python-dateutil==2.9.0.20241003 # via arrow -typing-extensions==4.10.0 +typing-extensions==4.12.2 # via # aiodebug - # aiodocker # alembic # fastapi # faststream # opentelemetry-sdk # pydantic + # pydantic-core # typer -urllib3==2.2.2 +urllib3==2.2.3 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # requests -uvicorn==0.29.0 +uvicorn==0.32.0 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in -uvloop==0.19.0 +uvloop==0.21.0 # via uvicorn -watchfiles==0.21.0 +watchfiles==0.24.0 # via uvicorn -websockets==12.0 +websockets==13.1 # via uvicorn wrapt==1.16.0 # via @@ -395,11 +492,11 @@ wrapt==1.16.0 # opentelemetry-instrumentation wsproto==1.2.0 # via simple-websocket -yarl==1.9.4 +yarl==1.15.3 # via # -r requirements/../../../packages/postgres-database/requirements/_base.in # aio-pika # aiohttp # aiormq -zipp==3.20.1 +zipp==3.20.2 # via importlib-metadata diff --git a/services/dynamic-scheduler/requirements/_test.txt b/services/dynamic-scheduler/requirements/_test.txt index b48cff66d524..44dbc740669b 100644 --- a/services/dynamic-scheduler/requirements/_test.txt +++ b/services/dynamic-scheduler/requirements/_test.txt @@ -1,44 +1,44 @@ -anyio==4.3.0 +anyio==4.6.2.post1 # via # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in -certifi==2024.2.2 +certifi==2024.8.30 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # httpcore # httpx # requests -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 # via # -c requirements/_base.txt # requests -coverage==7.6.1 +coverage==7.6.3 # via # -r requirements/_test.in # pytest-cov docker==7.1.0 # via -r requirements/_test.in -faker==29.0.0 +faker==30.4.0 # via -r requirements/_test.in h11==0.14.0 # via # -c requirements/_base.txt # httpcore -httpcore==1.0.5 +httpcore==1.0.6 # via # -c requirements/_base.txt # httpx -httpx==0.27.0 +httpx==0.27.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # respx icdiff==2.0.7 # via pytest-icdiff -idna==3.6 +idna==3.10 # via # -c requirements/_base.txt # anyio @@ -46,7 +46,7 @@ idna==3.6 # requests iniconfig==2.0.0 # via pytest -packaging==24.0 +packaging==24.1 # via # -c requirements/_base.txt # pytest @@ -101,9 +101,13 @@ sniffio==1.3.1 # anyio # asgi-lifespan # httpx -termcolor==2.4.0 +termcolor==2.5.0 # via pytest-sugar -urllib3==2.2.2 +typing-extensions==4.12.2 + # via + # -c requirements/_base.txt + # faker +urllib3==2.2.3 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/services/dynamic-scheduler/requirements/_tools.txt b/services/dynamic-scheduler/requirements/_tools.txt index df53578298f9..d15ef99dd1fa 100644 --- a/services/dynamic-scheduler/requirements/_tools.txt +++ b/services/dynamic-scheduler/requirements/_tools.txt @@ -1,8 +1,8 @@ -astroid==3.3.4 +astroid==3.3.5 # via pylint -black==24.8.0 +black==24.10.0 # via -r requirements/../../../requirements/devenv.txt -build==1.2.2 +build==1.2.2.post1 # via pip-tools bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt @@ -13,9 +13,9 @@ click==8.1.7 # -c requirements/_base.txt # black # pip-tools -dill==0.3.8 +dill==0.3.9 # via pylint -distlib==0.3.8 +distlib==0.3.9 # via virtualenv filelock==3.16.1 # via virtualenv @@ -35,7 +35,7 @@ mypy-extensions==1.0.0 # mypy nodeenv==1.9.1 # via pre-commit -packaging==24.0 +packaging==24.1 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -52,32 +52,33 @@ platformdirs==4.3.6 # black # pylint # virtualenv -pre-commit==3.8.0 +pre-commit==4.0.1 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.0 +pylint==3.3.1 # via -r requirements/../../../requirements/devenv.txt -pyproject-hooks==1.1.0 +pyproject-hooks==1.2.0 # via # build # pip-tools -pyyaml==6.0.1 +pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # pre-commit -ruff==0.6.7 +ruff==0.6.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==74.0.0 +setuptools==75.2.0 # via # -c requirements/_base.txt # pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.10.0 +typing-extensions==4.12.2 # via # -c requirements/_base.txt + # -c requirements/_test.txt # mypy -virtualenv==20.26.5 +virtualenv==20.26.6 # via pre-commit wheel==0.44.0 # via pip-tools diff --git a/services/dynamic-scheduler/requirements/ci.txt b/services/dynamic-scheduler/requirements/ci.txt index 30fe94106245..6b762254f44f 100644 --- a/services/dynamic-scheduler/requirements/ci.txt +++ b/services/dynamic-scheduler/requirements/ci.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +simcore-common-library @ ../../packages/common-library simcore-models-library @ ../../packages/models-library simcore-postgres-database @ ../../packages/postgres-database pytest-simcore @ ../../packages/pytest-simcore diff --git a/services/dynamic-scheduler/requirements/dev.txt b/services/dynamic-scheduler/requirements/dev.txt index 89e4eb7519c7..60cb7217e538 100644 --- a/services/dynamic-scheduler/requirements/dev.txt +++ b/services/dynamic-scheduler/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/postgres-database --editable ../../packages/pytest-simcore diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/errors.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/errors.py index 260202d00f45..2677b7bc370d 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/errors.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/errors.py @@ -1,5 +1,5 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class BaseDynamicSchedulerError(PydanticErrorMixin, ValueError): - code = "simcore.service.dynamic.scheduler" +class BaseDynamicSchedulerError(OsparcErrorMixin, ValueError): + code = "simcore.service.dynamic.scheduler" # type:ignore[assignment] diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py index a60ccc504cd7..0c85785a596f 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py @@ -1,6 +1,7 @@ import datetime -from pydantic import Field, parse_obj_as, validator +from pydantic import AliasChoices, Field, TypeAdapter, field_validator +from pydantic_settings import SettingsConfigDict from settings_library.application import BaseApplicationSettings from settings_library.basic_types import LogLevel, VersionTag from settings_library.director_v2 import DirectorV2Settings @@ -18,20 +19,22 @@ class _BaseApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): # CODE STATICS --------------------------------------------------------- API_VERSION: str = API_VERSION APP_NAME: str = PROJECT_NAME - API_VTAG: VersionTag = parse_obj_as(VersionTag, API_VTAG) + API_VTAG: VersionTag = TypeAdapter(VersionTag).validate_python(API_VTAG) # RUNTIME ----------------------------------------------------------- DYNAMIC_SCHEDULER_LOGLEVEL: LogLevel = Field( default=LogLevel.INFO, - env=["DYNAMIC_SCHEDULER_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"], + validation_alias=AliasChoices( + "DYNAMIC_SCHEDULER_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL" + ), ) DYNAMIC_SCHEDULER_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=[ + validation_alias=AliasChoices( "DYNAMIC_SCHEDULER_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) @@ -43,11 +46,13 @@ class _BaseApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ), ) - @validator("DYNAMIC_SCHEDULER_LOGLEVEL", pre=True) + @field_validator("DYNAMIC_SCHEDULER_LOGLEVEL", mode="before") @classmethod def _validate_log_level(cls, value: str) -> str: return cls.validate_log_level(value) + model_config = SettingsConfigDict(extra="allow") + class ApplicationSettings(_BaseApplicationSettings): """Web app's environment variables @@ -56,11 +61,13 @@ class ApplicationSettings(_BaseApplicationSettings): """ DYNAMIC_SCHEDULER_RABBITMQ: RabbitSettings = Field( - auto_default_from_env=True, description="settings for service/rabbitmq" + json_schema_extra={"auto_default_from_env": True}, + description="settings for service/rabbitmq", ) DYNAMIC_SCHEDULER_REDIS: RedisSettings = Field( - auto_default_from_env=True, description="settings for service/redis" + json_schema_extra={"auto_default_from_env": True}, + description="settings for service/redis", ) DYNAMIC_SCHEDULER_SWAGGER_API_DOC_ENABLED: bool = Field( @@ -68,12 +75,14 @@ class ApplicationSettings(_BaseApplicationSettings): ) DYNAMIC_SCHEDULER_DIRECTOR_V2_SETTINGS: DirectorV2Settings = Field( - auto_default_from_env=True, description="settings for director-v2 service" + json_schema_extra={"auto_default_from_env": True}, + description="settings for director-v2 service", ) DYNAMIC_SCHEDULER_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True DYNAMIC_SCHEDULER_PROFILING: bool = False DYNAMIC_SCHEDULER_TRACING: TracingSettings | None = Field( - auto_default_from_env=True, description="settings for opentelemetry tracing" + json_schema_extra={"auto_default_from_env": True}, + description="settings for opentelemetry tracing", ) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/models/schemas/meta.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/models/schemas/meta.py index df9d3fa4315e..ad73c58ac708 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/models/schemas/meta.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/models/schemas/meta.py @@ -1,17 +1,17 @@ -from typing import Any, ClassVar - from models_library.api_schemas__common.meta import BaseMeta -from pydantic import HttpUrl +from pydantic import ConfigDict, HttpUrl class Meta(BaseMeta): docs_url: HttpUrl - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { - "example": { - "name": "simcore_service_dynamic_scheduler", - "version": "2.4.45", - "docs_url": "https://foo.io/doc", - } + model_config = ConfigDict( + json_schema_extra={ + "examples": [ + { + "name": "simcore_service_dynamic_scheduler", + "version": "2.4.45", + "docs_url": "https://foo.io/doc", + } + ] } + ) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py index fd5ce9a2cb25..5ee4ae3bcac8 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py @@ -8,6 +8,7 @@ ) from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle from models_library.projects_nodes_io import NodeID +from pydantic import TypeAdapter from servicelib.fastapi.app_state import SingletonInAppStateMixin from servicelib.fastapi.http_client import AttachLifespanMixin, HasClientSetupInterface from servicelib.fastapi.http_client_thin import UnexpectedStatusError @@ -43,9 +44,9 @@ async def get_status( # in case of legacy version # we need to transfer the correct format! if "data" in dict_response: - return NodeGet.parse_obj(dict_response["data"]) + return TypeAdapter(NodeGet).validate_python(dict_response["data"]) - return DynamicServiceGet.parse_obj(dict_response) + return TypeAdapter(DynamicServiceGet).validate_python(dict_response) except UnexpectedStatusError as e: if ( e.response.status_code # type: ignore[attr-defined] # pylint:disable=no-member @@ -62,9 +63,9 @@ async def run_dynamic_service( # legacy services if "data" in dict_response: - return NodeGet.parse_obj(dict_response["data"]) + return TypeAdapter(NodeGet).validate_python(dict_response["data"]) - return DynamicServiceGet.parse_obj(dict_response) + return TypeAdapter(DynamicServiceGet).validate_python(dict_response) async def stop_dynamic_service( self, diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_api.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_api.py index 1b1b4a0d9f8f..99215c69123e 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_api.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_api.py @@ -150,7 +150,7 @@ async def set_if_status_changed_for_service( model.scheduled_to_run = False # check if model changed - json_status = status.json() + json_status = status.model_dump_json() if model.service_status != json_status: model.service_status = json_status model.current_state = _get_current_scheduler_service_state( diff --git a/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__meta.py b/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__meta.py index 8d986dfe60ed..ccf9aeab9117 100644 --- a/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__meta.py +++ b/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__meta.py @@ -9,4 +9,4 @@ async def test_health(client: AsyncClient): response = await client.get(f"/{API_VTAG}/meta") assert response.status_code == status.HTTP_200_OK - assert Meta.parse_raw(response.text) + assert Meta.model_validate_json(response.text) diff --git a/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py b/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py index c484f722ff95..7ee876e9e4bf 100644 --- a/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py +++ b/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py @@ -18,6 +18,7 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.users import UserID +from pydantic import TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.rabbitmq import RabbitMQRPCClient, RPCServerError @@ -52,14 +53,16 @@ def node_not_found(faker: Faker) -> NodeID: @pytest.fixture def service_status_new_style() -> DynamicServiceGet: - return DynamicServiceGet.parse_obj( - DynamicServiceGet.Config.schema_extra["examples"][1] + return TypeAdapter(DynamicServiceGet).validate_python( + DynamicServiceGet.model_config["json_schema_extra"]["examples"][1] ) @pytest.fixture def service_status_legacy() -> NodeGet: - return NodeGet.parse_obj(NodeGet.Config.schema_extra["examples"][1]) + return TypeAdapter(NodeGet).validate_python( + NodeGet.model_config["json_schema_extra"]["examples"][1] + ) @pytest.fixture @@ -81,7 +84,9 @@ def mock_director_v0_service_state( ) as mock: mock.get(f"/fake-status/{node_id_legacy}").respond( status.HTTP_200_OK, - text=json.dumps(jsonable_encoder({"data": service_status_legacy.dict()})), + text=json.dumps( + jsonable_encoder({"data": service_status_legacy.model_dump()}) + ), ) # service was not found response @@ -104,7 +109,7 @@ def mock_director_v2_service_state( assert_all_mocked=True, # IMPORTANT: KEEP always True! ) as mock: mock.get(f"/dynamic_services/{node_id_new_style}").respond( - status.HTTP_200_OK, text=service_status_new_style.json() + status.HTTP_200_OK, text=service_status_new_style.model_dump_json() ) # emulate redirect response to director-v0 @@ -173,8 +178,8 @@ async def test_get_state( @pytest.fixture def dynamic_service_start() -> DynamicServiceStart: # one for legacy and one for new style? - return DynamicServiceStart.parse_obj( - DynamicServiceStart.Config.schema_extra["example"] + return TypeAdapter(DynamicServiceStart).validate_python( + DynamicServiceStart.model_config["json_schema_extra"]["example"] ) @@ -189,7 +194,9 @@ def mock_director_v0_service_run( ) as mock: mock.post("/fake-service-run").respond( status.HTTP_201_CREATED, - text=json.dumps(jsonable_encoder({"data": service_status_legacy.dict()})), + text=json.dumps( + jsonable_encoder({"data": service_status_legacy.model_dump()}) + ), ) yield None @@ -216,7 +223,7 @@ def mock_director_v2_service_run( else: request.respond( status.HTTP_201_CREATED, - text=service_status_new_style.json(), + text=service_status_new_style.model_dump_json(), ) yield None diff --git a/services/dynamic-scheduler/tests/unit/conftest.py b/services/dynamic-scheduler/tests/unit/conftest.py index 642ed2170ce1..a25596bd4f2f 100644 --- a/services/dynamic-scheduler/tests/unit/conftest.py +++ b/services/dynamic-scheduler/tests/unit/conftest.py @@ -7,14 +7,17 @@ DynamicServiceStop, ) from models_library.projects_nodes_io import NodeID +from pydantic import TypeAdapter @pytest.fixture def get_dynamic_service_start() -> Callable[[NodeID], DynamicServiceStart]: def _(node_id: NodeID) -> DynamicServiceStart: - dict_data = deepcopy(DynamicServiceStart.Config.schema_extra["example"]) + dict_data = deepcopy( + DynamicServiceStart.model_config["json_schema_extra"]["example"] + ) dict_data["service_uuid"] = f"{node_id}" - return DynamicServiceStart.parse_obj(dict_data) + return TypeAdapter(DynamicServiceStart).validate_python(dict_data) return _ @@ -22,8 +25,10 @@ def _(node_id: NodeID) -> DynamicServiceStart: @pytest.fixture def get_dynamic_service_stop() -> Callable[[NodeID], DynamicServiceStop]: def _(node_id: NodeID) -> DynamicServiceStop: - dict_data = deepcopy(DynamicServiceStop.Config.schema_extra["example"]) + dict_data = deepcopy( + DynamicServiceStop.model_config["json_schema_extra"]["example"] + ) dict_data["node_id"] = f"{node_id}" - return DynamicServiceStop.parse_obj(dict_data) + return TypeAdapter(DynamicServiceStop).validate_python(dict_data) return _ diff --git a/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py b/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py index 0755f7e5d786..489dd14f2b8e 100644 --- a/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py +++ b/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py @@ -17,7 +17,7 @@ from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle from models_library.projects_nodes_io import NodeID from models_library.services_enums import ServiceState -from pydantic import NonNegativeInt +from pydantic import NonNegativeInt, TypeAdapter from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.deferred_tasks import TaskUID from servicelib.utils import limited_gather @@ -115,12 +115,17 @@ async def test_services_tracer_workflow( @pytest.mark.parametrize( "status", [ - *[NodeGet.parse_obj(o) for o in NodeGet.Config.schema_extra["examples"]], *[ - DynamicServiceGet.parse_obj(o) - for o in DynamicServiceGet.Config.schema_extra["examples"] + NodeGet.model_validate(o) + for o in NodeGet.model_config["json_schema_extra"]["examples"] ], - NodeGetIdle.parse_obj(NodeGetIdle.Config.schema_extra["example"]), + *[ + DynamicServiceGet.model_validate(o) + for o in DynamicServiceGet.model_config["json_schema_extra"]["examples"] + ], + NodeGetIdle.model_validate( + NodeGetIdle.model_config["json_schema_extra"]["example"] + ), ], ) async def test_set_if_status_changed( @@ -138,7 +143,7 @@ async def test_set_if_status_changed( model = await get_tracked_service(app, node_id) assert model - assert model.service_status == status.json() + assert model.service_status == status.model_dump_json() async def test_set_service_status_task_uid( @@ -162,15 +167,22 @@ async def test_set_service_status_task_uid( "status, expected_poll_interval", [ ( - NodeGet.parse_obj(NodeGet.Config.schema_extra["examples"][1]), + TypeAdapter(NodeGet).validate_python( + NodeGet.model_config["json_schema_extra"]["examples"][1] + ), _LOW_RATE_POLL_INTERVAL, ), *[ - (DynamicServiceGet.parse_obj(o), NORMAL_RATE_POLL_INTERVAL) - for o in DynamicServiceGet.Config.schema_extra["examples"] + ( + TypeAdapter(DynamicServiceGet).validate_python(o), + NORMAL_RATE_POLL_INTERVAL, + ) + for o in DynamicServiceGet.model_config["json_schema_extra"]["examples"] ], ( - NodeGetIdle.parse_obj(NodeGetIdle.Config.schema_extra["example"]), + TypeAdapter(NodeGetIdle).validate_python( + NodeGetIdle.model_config["json_schema_extra"]["example"] + ), _LOW_RATE_POLL_INTERVAL, ), ], @@ -182,23 +194,25 @@ def test__get_poll_interval( def _get_node_get_from(service_state: ServiceState) -> NodeGet: - dict_data = NodeGet.Config.schema_extra["examples"][1] + dict_data = NodeGet.model_config["json_schema_extra"]["examples"][1] assert "service_state" in dict_data dict_data["service_state"] = service_state - return NodeGet.parse_obj(dict_data) + return TypeAdapter(NodeGet).validate_python(dict_data) def _get_dynamic_service_get_from( service_state: ServiceState, ) -> DynamicServiceGet: - dict_data = DynamicServiceGet.Config.schema_extra["examples"][1] + dict_data = DynamicServiceGet.model_config["json_schema_extra"]["examples"][1] assert "state" in dict_data dict_data["state"] = service_state - return DynamicServiceGet.parse_obj(dict_data) + return TypeAdapter(DynamicServiceGet).validate_python(dict_data) def _get_node_get_idle() -> NodeGetIdle: - return NodeGetIdle.parse_obj(NodeGetIdle.Config.schema_extra["example"]) + return TypeAdapter(NodeGetIdle).validate_python( + NodeGetIdle.model_config["json_schema_extra"]["example"] + ) def __get_flat_list(nested_list: list[list[Any]]) -> list[Any]: diff --git a/services/dynamic-scheduler/tests/unit/service_tracker/test__tracker.py b/services/dynamic-scheduler/tests/unit/service_tracker/test__tracker.py index 59739ddf8f60..20293f343b50 100644 --- a/services/dynamic-scheduler/tests/unit/service_tracker/test__tracker.py +++ b/services/dynamic-scheduler/tests/unit/service_tracker/test__tracker.py @@ -7,6 +7,7 @@ from fastapi import FastAPI from models_library.projects_nodes_io import NodeID from pydantic import NonNegativeInt +from pytest_mock import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.utils import logged_gather from settings_library.redis import RedisSettings @@ -24,8 +25,17 @@ ] +@pytest.fixture +def disable_monitor_task(mocker: MockerFixture) -> None: + mocker.patch( + "simcore_service_dynamic_scheduler.services.status_monitor._monitor.Monitor._worker_start_get_status_requests", + autospec=True, + ) + + @pytest.fixture def app_environment( + disable_monitor_task: None, disable_rabbitmq_setup: None, disable_deferred_manager_setup: None, disable_notifier_setup: None, diff --git a/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py b/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py index 2dd5270b627a..b1dfd7c0d1f6 100644 --- a/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py +++ b/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py @@ -22,7 +22,7 @@ ) from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle from models_library.projects_nodes_io import NodeID -from pydantic import NonNegativeInt +from pydantic import NonNegativeInt, TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from settings_library.rabbit import RabbitSettings @@ -69,7 +69,7 @@ def _add_to_dict(dict_data: dict, entries: list[tuple[str, Any]]) -> None: def _get_node_get_with(state: str, node_id: NodeID = _DEFAULT_NODE_ID) -> NodeGet: - dict_data = deepcopy(NodeGet.Config.schema_extra["examples"][1]) + dict_data = deepcopy(NodeGet.model_config["json_schema_extra"]["examples"][1]) _add_to_dict( dict_data, [ @@ -77,13 +77,15 @@ def _get_node_get_with(state: str, node_id: NodeID = _DEFAULT_NODE_ID) -> NodeGe ("service_uuid", f"{node_id}"), ], ) - return NodeGet.parse_obj(dict_data) + return TypeAdapter(NodeGet).validate_python(dict_data) def _get_dynamic_service_get_legacy_with( state: str, node_id: NodeID = _DEFAULT_NODE_ID ) -> DynamicServiceGet: - dict_data = deepcopy(DynamicServiceGet.Config.schema_extra["examples"][0]) + dict_data = deepcopy( + DynamicServiceGet.model_config["json_schema_extra"]["examples"][0] + ) _add_to_dict( dict_data, [ @@ -92,13 +94,15 @@ def _get_dynamic_service_get_legacy_with( ("node_uuid", f"{node_id}"), ], ) - return DynamicServiceGet.parse_obj(dict_data) + return TypeAdapter(DynamicServiceGet).validate_python(dict_data) def _get_dynamic_service_get_new_style_with( state: str, node_id: NodeID = _DEFAULT_NODE_ID ) -> DynamicServiceGet: - dict_data = deepcopy(DynamicServiceGet.Config.schema_extra["examples"][1]) + dict_data = deepcopy( + DynamicServiceGet.model_config["json_schema_extra"]["examples"][1] + ) _add_to_dict( dict_data, [ @@ -107,18 +111,18 @@ def _get_dynamic_service_get_new_style_with( ("node_uuid", f"{node_id}"), ], ) - return DynamicServiceGet.parse_obj(dict_data) + return TypeAdapter(DynamicServiceGet).validate_python(dict_data) def _get_node_get_idle(node_id: NodeID = _DEFAULT_NODE_ID) -> NodeGetIdle: - dict_data = NodeGetIdle.Config.schema_extra["example"] + dict_data = NodeGetIdle.model_config["json_schema_extra"]["example"] _add_to_dict( dict_data, [ ("service_uuid", f"{node_id}"), ], ) - return NodeGetIdle.parse_obj(dict_data) + return TypeAdapter(NodeGetIdle).validate_python(dict_data) class _ResponseTimeline: @@ -209,10 +213,12 @@ def _side_effect_node_status_response(request: Request) -> Response: if isinstance(service_status, NodeGet): return Response( status.HTTP_200_OK, - text=json.dumps(jsonable_encoder({"data": service_status.dict()})), + text=json.dumps( + jsonable_encoder({"data": service_status.model_dump()}) + ), ) if isinstance(service_status, DynamicServiceGet): - return Response(status.HTTP_200_OK, text=service_status.json()) + return Response(status.HTTP_200_OK, text=service_status.model_dump_json()) if isinstance(service_status, NodeGetIdle): return Response(status.HTTP_404_NOT_FOUND) diff --git a/services/dynamic-scheduler/tests/unit/test__model_examples.py b/services/dynamic-scheduler/tests/unit/test__model_examples.py index 858bcc66a4d9..e768927cfe4d 100644 --- a/services/dynamic-scheduler/tests/unit/test__model_examples.py +++ b/services/dynamic-scheduler/tests/unit/test__model_examples.py @@ -3,7 +3,7 @@ import pytest import simcore_service_dynamic_scheduler.models -from pydantic import BaseModel, ValidationError +from pydantic import BaseModel, TypeAdapter, ValidationError from pytest_simcore.pydantic_models import walk_model_examples_in_package @@ -15,7 +15,7 @@ def test_api_server_model_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): try: - assert model_cls.parse_obj(example_data) is not None + assert TypeAdapter(model_cls).validate_python(example_data) is not None except ValidationError as err: pytest.fail( f"\n{example_name}: {json.dumps(example_data, indent=1)}\nError: {err}" diff --git a/services/dynamic-scheduler/tests/unit/test_cli.py b/services/dynamic-scheduler/tests/unit/test_cli.py index 2e812f7e118a..85b2a5e2dcdc 100644 --- a/services/dynamic-scheduler/tests/unit/test_cli.py +++ b/services/dynamic-scheduler/tests/unit/test_cli.py @@ -39,8 +39,8 @@ def test_list_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): assert result.exit_code == os.EX_OK, result.output print(result.output) - settings = ApplicationSettings.parse_raw(result.output) - assert settings == ApplicationSettings.create_from_envs() + settings = ApplicationSettings.model_validate_json(result.output) + assert settings.model_dump() == ApplicationSettings.create_from_envs().model_dump() def test_main(app_environment: EnvVarsDict): diff --git a/services/dynamic-sidecar/.env-devel b/services/dynamic-sidecar/.env-devel index 39b904af4d36..9bccac9239e8 100644 --- a/services/dynamic-sidecar/.env-devel +++ b/services/dynamic-sidecar/.env-devel @@ -24,7 +24,7 @@ DY_SIDECAR_USER_SERVICES_HAVE_INTERNET_ACCESS=false # DOCKER REGISTRY DY_DEPLOYMENT_REGISTRY_SETTINGS='{"REGISTRY_AUTH":"false","REGISTRY_USER":"test","REGISTRY_PW":"test","REGISTRY_SSL":"false"}' -S3_ENDPOINT=http://145.456.25.54:12345 +S3_ENDPOINT=http://111.111.111.111:12345 S3_ACCESS_KEY=mocked S3_REGION=mocked S3_SECRET_KEY=mocked diff --git a/services/dynamic-sidecar/openapi.json b/services/dynamic-sidecar/openapi.json index d3bfc8ab2435..cccb9924cdc2 100644 --- a/services/dynamic-sidecar/openapi.json +++ b/services/dynamic-sidecar/openapi.json @@ -120,16 +120,16 @@ "operationId": "containers_docker_inspect_v1_containers_get", "parameters": [ { - "description": "if True only show the status of the container", + "name": "only_status", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "Only Status", "description": "if True only show the status of the container", - "default": false + "default": false, + "title": "Only Status" }, - "name": "only_status", - "in": "query" + "description": "if True only show the status of the container" } ], "responses": { @@ -166,14 +166,14 @@ "summary": "Starts the containers as defined in ContainerCreate by:\n- cleaning up resources from previous runs if any\n- starting the containers\n\nProgress may be obtained through URL\nProcess may be cancelled through URL", "operationId": "create_service_containers_task_v1_containers_post", "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ContainersCreate" } } - }, - "required": true + } }, "responses": { "202": { @@ -213,7 +213,15 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ActivityInfo" + "anyOf": [ + { + "$ref": "#/components/schemas/ActivityInfo" + }, + { + "type": "null" + } + ], + "title": "Response Get Containers Activity V1 Containers Activity Get" } } } @@ -231,16 +239,17 @@ "operationId": "get_container_logs_v1_containers__id__logs_get", "parameters": [ { + "name": "id", + "in": "path", "required": true, "schema": { "type": "string", "title": "Id" - }, - "name": "id", - "in": "path" + } }, { - "description": "Only return logs since this time, as a UNIX timestamp", + "name": "since", + "in": "query", "required": false, "schema": { "type": "integer", @@ -248,11 +257,11 @@ "description": "Only return logs since this time, as a UNIX timestamp", "default": 0 }, - "name": "since", - "in": "query" + "description": "Only return logs since this time, as a UNIX timestamp" }, { - "description": "Only return logs before this time, as a UNIX timestamp", + "name": "until", + "in": "query", "required": false, "schema": { "type": "integer", @@ -260,11 +269,11 @@ "description": "Only return logs before this time, as a UNIX timestamp", "default": 0 }, - "name": "until", - "in": "query" + "description": "Only return logs before this time, as a UNIX timestamp" }, { - "description": "Enabling this parameter will include timestamps in logs", + "name": "timestamps", + "in": "query", "required": false, "schema": { "type": "boolean", @@ -272,8 +281,7 @@ "description": "Enabling this parameter will include timestamps in logs", "default": false }, - "name": "timestamps", - "in": "query" + "description": "Enabling this parameter will include timestamps in logs" } ], "responses": { @@ -282,10 +290,10 @@ "content": { "application/json": { "schema": { + "type": "array", "items": { "type": "string" }, - "type": "array", "title": "Response Get Container Logs V1 Containers Id Logs Get" } } @@ -320,15 +328,15 @@ "operationId": "get_containers_name_v1_containers_name_get", "parameters": [ { - "description": "JSON encoded dictionary. FastAPI does not allow for dict as type in query parameters", + "name": "filters", + "in": "query", "required": true, "schema": { "type": "string", - "title": "Filters", - "description": "JSON encoded dictionary. FastAPI does not allow for dict as type in query parameters" + "description": "JSON encoded dictionary. FastAPI does not allow for dict as type in query parameters", + "title": "Filters" }, - "name": "filters", - "in": "query" + "description": "JSON encoded dictionary. FastAPI does not allow for dict as type in query parameters" } ], "responses": { @@ -369,13 +377,13 @@ "operationId": "inspect_container_v1_containers__id__get", "parameters": [ { + "name": "id", + "in": "path", "required": true, "schema": { "type": "string", "title": "Id" - }, - "name": "id", - "in": "path" + } } ], "responses": { @@ -486,24 +494,24 @@ "operationId": "attach_container_to_network_v1_containers__id__networks_attach_post", "parameters": [ { + "name": "id", + "in": "path", "required": true, "schema": { "type": "string", "title": "Id" - }, - "name": "id", - "in": "path" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/AttachContainerToNetworkItem" } } - }, - "required": true + } }, "responses": { "204": { @@ -531,24 +539,24 @@ "operationId": "detach_container_from_network_v1_containers__id__networks_detach_post", "parameters": [ { + "name": "id", + "in": "path", "required": true, "schema": { "type": "string", "title": "Id" - }, - "name": "id", - "in": "path" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/DetachContainerFromNetworkItem" } } - }, - "required": true + } }, "responses": { "204": { @@ -666,10 +674,17 @@ "content": { "application/json": { "schema": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Port Keys" } } @@ -711,10 +726,17 @@ "content": { "application/json": { "schema": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Port Keys" } } @@ -798,23 +820,23 @@ "operationId": "put_volume_state_v1_volumes__id__put", "parameters": [ { + "name": "id", + "in": "path", "required": true, "schema": { "$ref": "#/components/schemas/VolumeCategory" - }, - "name": "id", - "in": "path" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PutVolumeItem" } } - }, - "required": true + } }, "responses": { "204": { @@ -873,7 +895,14 @@ "default": true }, "error_message": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Error Message", "description": "in case of error this gets set" } @@ -909,8 +938,7 @@ "GPU", "MPI" ], - "title": "BootMode", - "description": "An enumeration." + "title": "BootMode" }, "ContainersComposeSpec": { "properties": { @@ -956,25 +984,60 @@ "CreateServiceMetricsAdditionalParams": { "properties": { "wallet_id": { - "type": "integer", - "exclusiveMinimum": true, - "title": "Wallet Id", - "minimum": 0 + "anyOf": [ + { + "type": "integer", + "exclusiveMinimum": true, + "minimum": 0 + }, + { + "type": "null" + } + ], + "title": "Wallet Id" }, "wallet_name": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Wallet Name" }, "pricing_plan_id": { - "type": "integer", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], "title": "Pricing Plan Id" }, "pricing_unit_id": { - "type": "integer", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], "title": "Pricing Unit Id" }, "pricing_unit_cost_id": { - "type": "integer", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], "title": "Pricing Unit Cost Id" }, "product_name": { @@ -1008,9 +1071,6 @@ "title": "Service Version" }, "service_resources": { - "additionalProperties": { - "$ref": "#/components/schemas/ImageResources" - }, "type": "object", "title": "Service Resources" }, @@ -1021,6 +1081,11 @@ }, "type": "object", "required": [ + "wallet_id", + "wallet_name", + "pricing_plan_id", + "pricing_unit_id", + "pricing_unit_cost_id", "product_name", "simcore_user_agent", "user_email", @@ -1033,20 +1098,20 @@ ], "title": "CreateServiceMetricsAdditionalParams", "example": { - "wallet_id": 1, - "wallet_name": "a private wallet for me", + "node_name": "the service of a lifetime _ *!", "pricing_plan_id": 1, - "pricing_unit_id": 1, "pricing_unit_detail_id": 1, + "pricing_unit_id": 1, "product_name": "osparc", - "simcore_user_agent": "undefined", - "user_email": "test@test.com", "project_name": "_!New Study", - "node_name": "the service of a lifetime _ *!", + "service_additional_metadata": {}, "service_key": "simcore/services/dynamic/test", - "service_version": "0.0.1", "service_resources": {}, - "service_additional_metadata": {} + "service_version": "0.0.1", + "simcore_user_agent": "undefined", + "user_email": "test@test.com", + "wallet_id": 1, + "wallet_name": "a private wallet for me" } }, "DetachContainerFromNetworkItem": { @@ -1095,6 +1160,7 @@ "$ref": "#/components/schemas/BootMode" }, "type": "array", + "title": "Boot Modes", "description": "describe how a service shall be booted, using CPU, MPI, openMP or GPU", "default": [ "CPU" @@ -1110,6 +1176,14 @@ "example": { "image": "simcore/service/dynamic/pretty-intense:1.0.0", "resources": { + "AIRAM": { + "limit": 1, + "reservation": 1 + }, + "ANY_resource": { + "limit": "some_value", + "reservation": "some_value" + }, "CPU": { "limit": 4, "reservation": 0.1 @@ -1121,14 +1195,6 @@ "VRAM": { "limit": 1, "reservation": 1 - }, - "AIRAM": { - "limit": 1, - "reservation": 1 - }, - "ANY_resource": { - "limit": "some_value", - "reservation": "some_value" } } } @@ -1222,7 +1288,14 @@ "ServiceOutput": { "properties": { "displayOrder": { - "type": "number", + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], "title": "Displayorder", "description": "DEPRECATED: new display order is taken from the item position. This will be removed.", "deprecated": true @@ -1230,14 +1303,12 @@ "label": { "type": "string", "title": "Label", - "description": "short name for the property", - "example": "Age" + "description": "short name for the property" }, "description": { "type": "string", "title": "Description", - "description": "description of the property", - "example": "Age in seconds since 1970" + "description": "description of the property" }, "type": { "type": "string", @@ -1246,32 +1317,51 @@ "description": "data type expected on this input glob matching for data type is allowed" }, "contentSchema": { - "type": "object", + "anyOf": [ + { + "type": "object" + }, + { + "type": "null" + } + ], "title": "Contentschema", "description": "jsonschema of this input/output. Required when type='ref_contentSchema'" }, "fileToKeyMap": { - "additionalProperties": { - "type": "string", - "pattern": "^[-_a-zA-Z0-9]+$" - }, - "type": "object", + "anyOf": [ + { + "type": "object" + }, + { + "type": "null" + } + ], "title": "Filetokeymap", "description": "Place the data associated with the named keys in files" }, "unit": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Unit", "description": "Units, when it refers to a physical quantity", "deprecated": true }, "widget": { - "allOf": [ + "anyOf": [ { "$ref": "#/components/schemas/Widget" + }, + { + "type": "null" } ], - "title": "Widget", "description": "custom widget to use instead of the default one determined from the data-type", "deprecated": true } @@ -1283,8 +1373,7 @@ "description", "type" ], - "title": "ServiceOutput", - "description": "Base class for service input/outputs" + "title": "ServiceOutput" }, "Structure": { "properties": { @@ -1389,11 +1478,7 @@ "Widget": { "properties": { "type": { - "allOf": [ - { - "$ref": "#/components/schemas/WidgetType" - } - ], + "$ref": "#/components/schemas/WidgetType", "description": "type of the property" }, "details": { @@ -1422,8 +1507,7 @@ "TextArea", "SelectBox" ], - "title": "WidgetType", - "description": "An enumeration." + "title": "WidgetType" } } } diff --git a/services/dynamic-sidecar/requirements/_base.in b/services/dynamic-sidecar/requirements/_base.in index 251ca3cedfce..66b47a481b55 100644 --- a/services/dynamic-sidecar/requirements/_base.in +++ b/services/dynamic-sidecar/requirements/_base.in @@ -7,6 +7,7 @@ # NOTE: These input-requirements under packages are tested using latest updates # NOTE: Make sure these packages are added in setup.install_requires +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in # service-library[fastapi] diff --git a/services/dynamic-sidecar/requirements/_base.txt b/services/dynamic-sidecar/requirements/_base.txt index 303234d7f0c1..6439dd71e971 100644 --- a/services/dynamic-sidecar/requirements/_base.txt +++ b/services/dynamic-sidecar/requirements/_base.txt @@ -25,17 +25,31 @@ aiofiles==23.2.1 # -r requirements/_base.in aiohttp==3.9.3 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -53,6 +67,8 @@ alembic==1.13.1 # via # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -87,17 +103,31 @@ bidict==0.23.1 # via python-socketio certifi==2024.2.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -122,22 +152,8 @@ email-validator==2.1.1 # via pydantic fast-depends==2.4.12 # via faststream -fastapi==0.99.1 +fastapi==0.115.2 # via - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator @@ -170,17 +186,31 @@ httpcore==1.0.5 # via httpx httpx==0.27.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -205,17 +235,31 @@ jsonschema-specifications==2023.7.1 # via jsonschema mako==1.3.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -291,17 +335,31 @@ opentelemetry-util-http==0.47b0 # opentelemetry-instrumentation-requests orjson==3.10.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -334,39 +392,86 @@ psycopg2-binary==2.9.9 # via # aiopg # sqlalchemy -pydantic==1.10.15 +pydantic==2.9.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/postgres-database/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # -r requirements/_base.in # fast-depends # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in pygments==2.17.2 # via rich pyinstrument==4.6.2 @@ -375,6 +480,8 @@ pyinstrument==4.6.2 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in python-dateutil==2.9.0.post0 # via arrow +python-dotenv==1.0.1 + # via pydantic-settings python-engineio==4.9.0 # via python-socketio python-magic==0.4.27 @@ -383,17 +490,31 @@ python-socketio==5.11.2 # via -r requirements/_base.in pyyaml==6.0.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -402,17 +523,31 @@ pyyaml==6.0.1 # -r requirements/_base.in redis==5.0.4 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -455,17 +590,31 @@ sniffio==1.3.1 # httpx sqlalchemy==1.4.52 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -473,19 +622,33 @@ sqlalchemy==1.4.52 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # aiopg # alembic -starlette==0.27.0 +starlette==0.39.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -525,22 +688,37 @@ typing-extensions==4.11.0 # opentelemetry-sdk # pint # pydantic + # pydantic-core # typer u-msgpack-python==2.8.0 # via -r requirements/_base.in urllib3==2.2.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt diff --git a/services/dynamic-sidecar/requirements/_test.txt b/services/dynamic-sidecar/requirements/_test.txt index 7bcc6a8243f3..3a87310dcdb3 100644 --- a/services/dynamic-sidecar/requirements/_test.txt +++ b/services/dynamic-sidecar/requirements/_test.txt @@ -105,7 +105,9 @@ python-dateutil==2.9.0.post0 # botocore # faker python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in requests==2.32.3 # via # -c requirements/_base.txt diff --git a/services/dynamic-sidecar/requirements/ci.txt b/services/dynamic-sidecar/requirements/ci.txt index 9c8e7a5ca7a7..827161faf6cf 100644 --- a/services/dynamic-sidecar/requirements/ci.txt +++ b/services/dynamic-sidecar/requirements/ci.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +simcore-common-library @ ../../packages/common-library/ simcore-models-library @ ../../packages/models-library/ simcore-postgres-database @ ../../packages/postgres-database/ pytest-simcore @ ../../packages/pytest-simcore/ diff --git a/services/dynamic-sidecar/requirements/dev.txt b/services/dynamic-sidecar/requirements/dev.txt index 2d1c00661ed6..ce064f44c52e 100644 --- a/services/dynamic-sidecar/requirements/dev.txt +++ b/services/dynamic-sidecar/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/postgres-database/ --editable ../../packages/pytest-simcore/ diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers.py index 4269646e9bbd..a07d5db2fef1 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers.py @@ -12,7 +12,7 @@ ActivityInfo, ActivityInfoOrNone, ) -from pydantic import parse_raw_as +from pydantic import TypeAdapter, ValidationError from servicelib.fastapi.requests_decorators import cancel_on_disconnect from ...core.docker_utils import docker_client @@ -174,8 +174,8 @@ async def get_containers_activity( return ActivityInfo(seconds_inactive=_INACTIVE_FOR_LONG_TIME) try: - return parse_raw_as(ActivityInfo, inactivity_response) - except json.JSONDecodeError: + return TypeAdapter(ActivityInfo).validate_json(inactivity_response) + except ValidationError: _logger.warning( "Could not parse command result '%s' as '%s'", inactivity_response, diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py index 8592afd24409..b669198a7157 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py @@ -131,16 +131,18 @@ def create_base_app() -> FastAPI: # settings settings = ApplicationSettings.create_from_envs() setup_logger(settings) - logger.debug(settings.json(indent=2)) + logger.debug(settings.model_dump_json(indent=2)) # minimal app = FastAPI( - debug=settings.SC_BOOT_MODE.is_devel_mode(), + debug=settings.SC_BOOT_MODE.is_devel_mode(), # pylint: disable=no-member title=PROJECT_NAME, description=SUMMARY, version=API_VERSION, openapi_url=f"/api/{API_VTAG}/openapi.json", - **get_common_oas_options(settings.SC_BOOT_MODE.is_devel_mode()), + **get_common_oas_options( + settings.SC_BOOT_MODE.is_devel_mode() # pylint: disable=no-member + ), ) override_fastapi_openapi_method(app) app.state.settings = settings @@ -190,8 +192,10 @@ def create_app(): setup_prometheus_metrics(app) # ERROR HANDLERS ------------ - app.add_exception_handler(NodeNotFound, node_not_found_error_handler) - app.add_exception_handler(BaseDynamicSidecarError, http_error_handler) + app.add_exception_handler( + NodeNotFound, node_not_found_error_handler # type: ignore[arg-type] + ) + app.add_exception_handler(BaseDynamicSidecarError, http_error_handler) # type: ignore[arg-type] # EVENTS --------------------- diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_utils.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_utils.py index 7804d3de35c9..43959d5fba51 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_utils.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_utils.py @@ -110,7 +110,7 @@ def are_all_containers_in_expected_states( states: Iterable[ContainerState | None], ) -> bool: return all( - s is not None and s.Status in _ACCEPTED_CONTAINER_STATUSES for s in states + s is not None and s.status in _ACCEPTED_CONTAINER_STATUSES for s in states ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py index b0fd128a9426..636260814a19 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py @@ -1,8 +1,8 @@ from typing import Any +from common_library.errors_classes import OsparcErrorMixin from fastapi import status from models_library.services import RunID -from pydantic.errors import PydanticErrorMixin class BaseDynamicSidecarError(Exception): @@ -35,8 +35,8 @@ def __init__(self, message: str, status_code: int) -> None: ) -class BaseError(PydanticErrorMixin, BaseDynamicSidecarError): - code = "dy_sidecar.error" +class BaseError(OsparcErrorMixin, BaseDynamicSidecarError): + code = "dy_sidecar.error" # type: ignore[assignment] class ContainerExecContainerNotFoundError(BaseError): diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/external_dependencies.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/external_dependencies.py index 12696fe13f08..278f29e7ad19 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/external_dependencies.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/external_dependencies.py @@ -1,5 +1,5 @@ +from common_library.errors_classes import OsparcErrorMixin from fastapi import FastAPI -from pydantic.errors import PydanticErrorMixin from servicelib.utils import logged_gather from .postgres import wait_for_postgres_liveness @@ -8,7 +8,7 @@ from .storage import wait_for_storage_liveness -class CouldNotReachExternalDependenciesError(PydanticErrorMixin, Exception): +class CouldNotReachExternalDependenciesError(OsparcErrorMixin, Exception): msg_template: str = ( "Could not start because the following external dependencies failed: {failed}" ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/reserved_space.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/reserved_space.py index 945aaccb8e5b..e43946f5375f 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/reserved_space.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/reserved_space.py @@ -3,14 +3,14 @@ from typing import Final from fastapi import FastAPI -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from .settings import ApplicationSettings _RESERVED_DISK_SPACE_NAME: Final[Path] = Path( "/tmp/reserved_disk_space" # nosec # noqa: S108 ) -_DEFAULT_CHUNK_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "8k") +_DEFAULT_CHUNK_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("8k") def _write_random_binary_file( diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py index 4e151e29c002..93a1f5e9cfb4 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py @@ -4,6 +4,7 @@ from pathlib import Path from typing import cast +from common_library.pydantic_validators import validate_numeric_string_as_timedelta from models_library.basic_types import BootModeEnum, PortInt from models_library.callbacks_mapping import CallbacksMapping from models_library.products import ProductName @@ -11,7 +12,14 @@ from models_library.projects_nodes_io import NodeID from models_library.services import DynamicServiceKey, RunID, ServiceVersion from models_library.users import UserID -from pydantic import ByteSize, Field, PositiveInt, parse_obj_as, validator +from pydantic import ( + AliasChoices, + ByteSize, + Field, + PositiveInt, + TypeAdapter, + field_validator, +) from settings_library.aws_s3_cli import AwsS3CliSettings from settings_library.base import BaseCustomSettings from settings_library.docker_registry import RegistrySettings @@ -31,6 +39,10 @@ class ResourceTrackingSettings(BaseCustomSettings): description="each time the status of the service is propagated", ) + _validate_resource_tracking_heartbeat_interval = ( + validate_numeric_string_as_timedelta("RESOURCE_TRACKING_HEARTBEAT_INTERVAL") + ) + class SystemMonitorSettings(BaseCustomSettings): DY_SIDECAR_SYSTEM_MONITOR_TELEMETRY_ENABLE: bool = Field( @@ -61,7 +73,10 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): # LOGGING LOG_LEVEL: str = Field( - default="WARNING", env=["DYNAMIC_SIDECAR_LOG_LEVEL", "LOG_LEVEL", "LOGLEVEL"] + default="WARNING", + validation_alias=AliasChoices( + "DYNAMIC_SIDECAR_LOG_LEVEL", "LOG_LEVEL", "LOGLEVEL" + ), ) # SERVICE SERVER (see : https://www.uvicorn.org/settings/) @@ -100,7 +115,7 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): ) DYNAMIC_SIDECAR_RESERVED_SPACE_SIZE: ByteSize = Field( - parse_obj_as(ByteSize, "10Mib"), + TypeAdapter(ByteSize).validate_python("10Mib"), description=( "Disk space reserve when the dy-sidecar is started. Can be freed at " "any time via an API call. Main reason to free this disk space is " @@ -130,7 +145,10 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): ) DY_SIDECAR_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=["DY_SIDECAR_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED"], + validation_alias=AliasChoices( + "DY_SIDECAR_LOG_FORMAT_LOCAL_DEV_ENABLED", + "LOG_FORMAT_LOCAL_DEV_ENABLED", + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) DY_SIDECAR_USER_ID: UserID @@ -144,32 +162,50 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): DY_SIDECAR_PRODUCT_NAME: ProductName | None = None NODE_PORTS_STORAGE_AUTH: StorageAuthSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} + ) + DY_SIDECAR_R_CLONE_SETTINGS: RCloneSettings = Field( + json_schema_extra={"auto_default_from_env": True} ) - DY_SIDECAR_R_CLONE_SETTINGS: RCloneSettings = Field(auto_default_from_env=True) DY_SIDECAR_AWS_S3_CLI_SETTINGS: AwsS3CliSettings | None = Field( None, description="AWS S3 settings are used for the AWS S3 CLI. If these settings are filled, the AWS S3 CLI is used instead of RClone.", ) - POSTGRES_SETTINGS: PostgresSettings = Field(auto_default_from_env=True) - RABBIT_SETTINGS: RabbitSettings = Field(auto_default_from_env=True) + POSTGRES_SETTINGS: PostgresSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) + RABBIT_SETTINGS: RabbitSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) DY_DEPLOYMENT_REGISTRY_SETTINGS: RegistrySettings = Field() - DY_DOCKER_HUB_REGISTRY_SETTINGS: RegistrySettings | None = Field() + DY_DOCKER_HUB_REGISTRY_SETTINGS: RegistrySettings | None = Field(default=None) - RESOURCE_TRACKING: ResourceTrackingSettings = Field(auto_default_from_env=True) + RESOURCE_TRACKING: ResourceTrackingSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) - SYSTEM_MONITOR_SETTINGS: SystemMonitorSettings = Field(auto_default_from_env=True) + SYSTEM_MONITOR_SETTINGS: SystemMonitorSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) @property def are_prometheus_metrics_enabled(self) -> bool: - return self.DY_SIDECAR_CALLBACKS_MAPPING.metrics is not None + return ( # pylint: disable=no-member + self.DY_SIDECAR_CALLBACKS_MAPPING.metrics is not None + ) - @validator("LOG_LEVEL", pre=True) + @field_validator("LOG_LEVEL", mode="before") @classmethod def _check_log_level(cls, value: str) -> str: return cls.validate_log_level(value) + _validate_dynamic_sidecar_telemetry_disk_usage_monitor_interval = ( + validate_numeric_string_as_timedelta( + "DYNAMIC_SIDECAR_TELEMETRY_DISK_USAGE_MONITOR_INTERVAL" + ) + ) + @lru_cache def get_settings() -> ApplicationSettings: diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/storage.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/storage.py index 9118711a5732..639ee0dd8102 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/storage.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/storage.py @@ -4,7 +4,7 @@ from fastapi import FastAPI, status from httpx import AsyncClient -from pydantic import AnyUrl, parse_obj_as +from pydantic import AnyUrl, TypeAdapter from servicelib.logging_utils import log_context from settings_library.node_ports import StorageAuthSettings @@ -33,8 +33,10 @@ def _get_auth_or_none(storage_auth_settings: StorageAuthSettings) -> _AuthTuple def _get_url(storage_auth_settings: StorageAuthSettings) -> str: - url: str = parse_obj_as(AnyUrl, f"{storage_auth_settings.api_base_url}/") - return url + url: AnyUrl = TypeAdapter(AnyUrl).validate_python( + f"{storage_auth_settings.api_base_url}/" + ) + return f"{url}" async def _is_storage_responsive(storage_auth_settings: StorageAuthSettings) -> bool: diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/models/shared_store.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/models/shared_store.py index 0745f595fa83..0ca422a3390c 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/models/shared_store.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/models/shared_store.py @@ -30,7 +30,7 @@ async def _persist_to_disk(self) -> None: async with aiofiles.open( self._shared_store_dir / STORE_FILE_NAME, "w" ) as data_file: - await data_file.write(self.json()) + await data_file.write(self.model_dump_json()) def post_init(self, shared_store_dir: Path): self._shared_store_dir = shared_store_dir @@ -66,6 +66,17 @@ class SharedStore(_StoreMixin): default_factory=dict, description="persist the state of each volume" ) + def __eq__(self, other: object) -> bool: + return all( + getattr(self, n, None) == getattr(other, n, None) + for n in ( + "compose_spec", + "container_names", + "original_to_container_names", + "volume_states", + ) + ) + async def _setup_initial_volume_states(self) -> None: async with self: for category, status in [ @@ -74,6 +85,7 @@ async def _setup_initial_volume_states(self) -> None: (VolumeCategory.OUTPUTS, VolumeStatus.CONTENT_NEEDS_TO_BE_SAVED), (VolumeCategory.STATES, VolumeStatus.CONTENT_NEEDS_TO_BE_SAVED), ]: + # pylint: disable=unsupported-assignment-operation self.volume_states[category] = VolumeState(status=status) @classmethod @@ -93,7 +105,7 @@ async def init_from_disk( async with aiofiles.open(shared_store_dir / store_file_name) as data_file: file_content = await data_file.read() - obj = cls.parse_raw(file_content) + obj = cls.model_validate_json(file_content) obj.post_init(shared_store_dir) return obj diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py index 0134d481f78e..6e7a7a190096 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py @@ -263,7 +263,7 @@ async def _send_resource_tracking_stop(platform_status: SimcorePlatformStatus): simcore_platform_status = platform_status if not containers_were_ok: any_container_oom_killed = any( - c.OOMKilled is True + c.oom_killed is True for c in container_states.values() if c is not None ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_directory_utils.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_directory_utils.py index 7cc13922244e..21f07bf15231 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_directory_utils.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_directory_utils.py @@ -1,7 +1,7 @@ import os from pathlib import Path -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter def get_directory_total_size(path: Path) -> ByteSize: @@ -10,7 +10,7 @@ def get_directory_total_size(path: Path) -> ByteSize: # until we do not hit 1 million it can be ignored # NOTE: file size has no impact on performance if not path.exists(): - return parse_obj_as(ByteSize, 0) + return TypeAdapter(ByteSize).validate_python(0) total = 0 for entry in os.scandir(path): @@ -18,4 +18,4 @@ def get_directory_total_size(path: Path) -> ByteSize: total += entry.stat().st_size elif entry.is_dir(): total += get_directory_total_size(Path(entry.path)) - return parse_obj_as(ByteSize, total) + return TypeAdapter(ByteSize).validate_python(total) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_event_filter.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_event_filter.py index 8490f9cd72eb..227358f4960e 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_event_filter.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_event_filter.py @@ -14,7 +14,7 @@ NonNegativeInt, PositiveFloat, PositiveInt, - parse_obj_as, + TypeAdapter, ) from servicelib.logging_utils import log_context from watchdog.observers.api import DEFAULT_OBSERVER_TIMEOUT @@ -27,8 +27,8 @@ logger = logging.getLogger(__name__) -_1_MB: Final[PositiveInt] = parse_obj_as(ByteSize, "1mib") -_500_MB: Final[PositiveInt] = parse_obj_as(ByteSize, "500mib") +_1_MB: Final[PositiveInt] = TypeAdapter(ByteSize).validate_python("1mib") +_500_MB: Final[PositiveInt] = TypeAdapter(ByteSize).validate_python("500mib") class BaseDelayPolicy(ABC): diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py index d4a8ac8d07ad..fc33fe660acb 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py @@ -10,7 +10,6 @@ from models_library.basic_types import IDStr from models_library.rabbitmq_messages import ProgressType from pydantic import PositiveFloat -from pydantic.errors import PydanticErrorMixin from servicelib import progress_bar from servicelib.background_task import start_periodic_task, stop_periodic_task from servicelib.logging_utils import log_catch, log_context @@ -22,18 +21,17 @@ from ..nodeports import upload_outputs from ._context import OutputsContext -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) async def _cancel_task(task: Task, task_cancellation_timeout_s: PositiveFloat) -> None: task.cancel() - with suppress(CancelledError): - with log_catch(logger, reraise=False): - await wait((task,), timeout=task_cancellation_timeout_s) + with suppress(CancelledError), log_catch(_logger, reraise=False): + await wait((task,), timeout=task_cancellation_timeout_s) -class UploadPortsFailed(PydanticErrorMixin, RuntimeError): - code: str = "dynamic_sidecar.outputs_manager.failed_while_uploading" +class UploadPortsFailedError(OsparcErrorMixin, RuntimeError): + code: str = "dynamic_sidecar.outputs_manager.failed_while_uploading" # type: ignore[assignment] msg_template: str = "Failed while uploading: failures={failures}" @@ -130,7 +128,9 @@ async def _uploading_task_start(self) -> None: assert len(port_keys) > 0 # nosec async def _upload_ports() -> None: - with log_context(logger, logging.INFO, f"Uploading port keys: {port_keys}"): + with log_context( + _logger, logging.INFO, f"Uploading port keys: {port_keys}" + ): async with progress_bar.ProgressBarData( num_steps=1, progress_report_cb=self.task_progress_cb, @@ -155,7 +155,7 @@ def _remove_downloads(future: Future) -> None: if future._exception.__traceback__ else "" ) - logger.warning( + _logger.warning( "%s ended with exception: %s%s", task_name, future._exception, @@ -200,7 +200,7 @@ async def start(self) -> None: ) async def shutdown(self) -> None: - with log_context(logger, logging.INFO, f"{OutputsManager.__name__} shutdown"): + with log_context(_logger, logging.INFO, f"{OutputsManager.__name__} shutdown"): await self._uploading_task_cancel() if self._task_scheduler_worker is not None: await stop_periodic_task( @@ -222,7 +222,7 @@ async def wait_for_all_uploads_to_finish(self) -> None: # always scheduling non file based ports for upload # there is no auto detection when these change for non_file_port_key in self.outputs_context.non_file_type_port_keys: - logger.info("Adding non file port key %s", non_file_port_key) + _logger.info("Adding non file port key %s", non_file_port_key) await self.port_key_content_changed(non_file_port_key) # NOTE: the file system watchdog was found unhealthy and to make @@ -232,7 +232,7 @@ async def wait_for_all_uploads_to_finish(self) -> None: # is missed. if self._schedule_all_ports_for_upload: self._schedule_all_ports_for_upload = False - logger.warning( + _logger.warning( "Scheduled %s for upload. The watchdog was rebooted. " "This is a safety measure to make sure no data is lost. ", self.outputs_context.outputs_path, @@ -240,10 +240,10 @@ async def wait_for_all_uploads_to_finish(self) -> None: for file_port_key in self.outputs_context.file_type_port_keys: await self.port_key_content_changed(file_port_key) - logger.info("Port status before waiting %s", f"{self._port_key_tracker}") + _logger.info("Port status before waiting %s", f"{self._port_key_tracker}") while not await self._port_key_tracker.no_tracked_ports(): await asyncio.sleep(self.task_monitor_interval_s) - logger.info("Port status after waiting %s", f"{self._port_key_tracker}") + _logger.info("Port status after waiting %s", f"{self._port_key_tracker}") # NOTE: checking if there were any errors during the last port upload, # for each port. If any error is detected this will raise. @@ -251,7 +251,7 @@ async def wait_for_all_uploads_to_finish(self) -> None: True for v in self._last_upload_error_tracker.values() if v is not None ) if any_failed_upload: - raise UploadPortsFailed(failures=self._last_upload_error_tracker) + raise UploadPortsFailedError(failures=self._last_upload_error_tracker) def setup_outputs_manager(app: FastAPI) -> None: @@ -264,7 +264,7 @@ async def on_startup() -> None: io_log_redirect_cb: LogRedirectCB | None = None if settings.RABBIT_SETTINGS: io_log_redirect_cb = partial(post_log_message, app, log_level=logging.INFO) - logger.debug( + _logger.debug( "setting up outputs manager %s", "with redirection of logs..." if io_log_redirect_cb else "...", ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/resource_tracking/_models.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/resource_tracking/_models.py index 4708e9c291f3..f87fa415a743 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/resource_tracking/_models.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/resource_tracking/_models.py @@ -1,10 +1,8 @@ from asyncio import Task -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict class ResourceTrackingState(BaseModel): heart_beat_task: Task | None = None - - class Config: - arbitrary_types_allowed = True + model_config = ConfigDict(arbitrary_types_allowed=True) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/service_liveness.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/service_liveness.py index 78976b53a868..33c0083861f5 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/service_liveness.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/service_liveness.py @@ -4,7 +4,7 @@ from datetime import timedelta from typing import Final -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin from tenacity import AsyncRetrying, RetryCallState, TryAgain from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed @@ -16,7 +16,7 @@ _DEFAULT_TIMEOUT_INTERVAL: Final[timedelta] = timedelta(seconds=30) -class CouldNotReachServiceError(PydanticErrorMixin, Exception): +class CouldNotReachServiceError(OsparcErrorMixin, Exception): msg_template: str = "Could not contact service '{service_name}' at '{endpoint}'. Look above for details." diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_db.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_db.py index f2cc53a0d9df..0d010794e235 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_db.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_db.py @@ -6,7 +6,7 @@ from models_library.user_preferences import PreferenceName from models_library.users import UserID from packaging.version import Version -from pydantic import parse_obj_as +from pydantic import TypeAdapter from simcore_postgres_database.utils_user_preferences import ( UserServicesUserPreferencesRepo, ) @@ -73,5 +73,7 @@ async def load_preferences( if payload is None: return - preference = parse_obj_as(preference_class, umsgpack.unpackb(payload)) + preference = TypeAdapter(preference_class).validate_python( + umsgpack.unpackb(payload) + ) await dir_from_bytes(preference.value, user_preferences_path) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_errors.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_errors.py index d1b373ac1f5d..b2c4a327c837 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_errors.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_errors.py @@ -1,8 +1,8 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class BaseServicesPreferencesError(PydanticErrorMixin, Exception): - code = "dynamic_sidecar.user_service_preferences" +class BaseServicesPreferencesError(OsparcErrorMixin, Exception): + code = "dynamic_sidecar.user_service_preferences" # type: ignore[assignment] class DestinationIsNotADirectoryError(BaseServicesPreferencesError): diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_packaging.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_packaging.py index 38f2eab71111..bdffd81a4a95 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_packaging.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_packaging.py @@ -2,13 +2,15 @@ from typing import Final import aiofiles -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from servicelib.archiving_utils import archive_dir, unarchive_dir from servicelib.file_utils import remove_directory from ._errors import DestinationIsNotADirectoryError, PreferencesAreTooBigError -_MAX_PREFERENCES_TOTAL_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "128kib") +_MAX_PREFERENCES_TOTAL_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python( + "128kib" +) async def dir_to_bytes(source: Path) -> bytes: diff --git a/services/dynamic-sidecar/tests/conftest.py b/services/dynamic-sidecar/tests/conftest.py index 8b4760b26dd0..0dc55477b47b 100644 --- a/services/dynamic-sidecar/tests/conftest.py +++ b/services/dynamic-sidecar/tests/conftest.py @@ -21,7 +21,7 @@ from models_library.services_creation import CreateServiceMetricsAdditionalParams from models_library.users import UserID from models_library.utils.json_serialization import json_dumps -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import ( EnvVarsDict, @@ -347,9 +347,10 @@ def mock_stop_heart_beat_task(mocker: MockerFixture) -> AsyncMock: @pytest.fixture def mock_metrics_params(faker: Faker) -> CreateServiceMetricsAdditionalParams: - return parse_obj_as( - CreateServiceMetricsAdditionalParams, - CreateServiceMetricsAdditionalParams.Config.schema_extra["example"], + return TypeAdapter(CreateServiceMetricsAdditionalParams).validate_python( + CreateServiceMetricsAdditionalParams.model_config["json_schema_extra"][ + "example" + ], ) diff --git a/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py b/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py index 5e70b0a6f79f..a1b64635e58b 100644 --- a/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py +++ b/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py @@ -23,7 +23,7 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimcoreS3FileID from models_library.users import UserID -from pydantic import AnyUrl, parse_obj_as +from pydantic import AnyUrl, TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.faker_factories import random_project from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict @@ -199,7 +199,7 @@ async def restore_legacy_state_archives( user_id=user_id, store_id=SIMCORE_LOCATION, store_name=None, - s3_object=parse_obj_as(SimcoreS3FileID, s3_path), + s3_object=TypeAdapter(SimcoreS3FileID).validate_python(s3_path), path_to_upload=legacy_archive_zip, io_log_redirect_cb=None, ) @@ -303,8 +303,7 @@ def s3_settings(app_state: AppState) -> S3Settings: @pytest.fixture def bucket_name(app_state: AppState) -> S3BucketName: - return parse_obj_as( - S3BucketName, + return TypeAdapter(S3BucketName).validate_python( app_state.settings.DY_SIDECAR_R_CLONE_SETTINGS.R_CLONE_S3.S3_BUCKET_NAME, ) diff --git a/services/dynamic-sidecar/tests/integration/test_modules_user_services_preferences.py b/services/dynamic-sidecar/tests/integration/test_modules_user_services_preferences.py index 094b30144047..9be0bbdebbf2 100644 --- a/services/dynamic-sidecar/tests/integration/test_modules_user_services_preferences.py +++ b/services/dynamic-sidecar/tests/integration/test_modules_user_services_preferences.py @@ -14,7 +14,7 @@ from models_library.projects import ProjectID from models_library.services import ServiceKey, ServiceVersion from models_library.users import UserID -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from pytest_simcore.helpers.postgres_tools import PostgresTestConfig from simcore_service_dynamic_sidecar.core.application import create_app @@ -46,17 +46,19 @@ def dy_sidecar_user_preferences_path(tmp_path: Path) -> Path: @pytest.fixture def service_key() -> ServiceKey: - return parse_obj_as(ServiceKey, "simcore/services/dynamic/test-service-34") + return TypeAdapter(ServiceKey).validate_python( + "simcore/services/dynamic/test-service-34" + ) @pytest.fixture def service_version() -> ServiceVersion: - return parse_obj_as(ServiceVersion, "1.0.0") + return TypeAdapter(ServiceVersion).validate_python("1.0.0") @pytest.fixture def product_name() -> ProductName: - return parse_obj_as(ProductName, "osparc") + return TypeAdapter(ProductName).validate_python("osparc") @pytest.fixture diff --git a/services/dynamic-sidecar/tests/unit/test_api_rest_containers.py b/services/dynamic-sidecar/tests/unit/test_api_rest_containers.py index f16b883de15f..0877e7ab0fe5 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_rest_containers.py +++ b/services/dynamic-sidecar/tests/unit/test_api_rest_containers.py @@ -23,8 +23,9 @@ from faker import Faker from fastapi import FastAPI, status from models_library.api_schemas_dynamic_sidecar.containers import ActivityInfo -from models_library.services import ServiceOutput from models_library.services_creation import CreateServiceMetricsAdditionalParams +from models_library.services_io import ServiceOutput +from pydantic import TypeAdapter from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.docker_constants import SUFFIX_EGRESS_PROXY_NAME @@ -266,11 +267,11 @@ def not_started_containers() -> list[str]: @pytest.fixture def mock_outputs_labels() -> dict[str, ServiceOutput]: return { - "output_port_1": ServiceOutput.parse_obj( - ServiceOutput.Config.schema_extra["examples"][3] + "output_port_1": TypeAdapter(ServiceOutput).validate_python( + ServiceOutput.model_config["json_schema_extra"]["examples"][3] ), - "output_port_2": ServiceOutput.parse_obj( - ServiceOutput.Config.schema_extra["examples"][3] + "output_port_2": TypeAdapter(ServiceOutput).validate_python( + ServiceOutput.model_config["json_schema_extra"]["examples"][3] ), } @@ -366,12 +367,12 @@ def test_ensure_api_vtag_is_v1(): async def test_start_same_space_twice(compose_spec: str, test_client: TestClient): settings = test_client.application.state.settings - settings_1 = settings.copy( + settings_1 = settings.model_copy( update={"DYNAMIC_SIDECAR_COMPOSE_NAMESPACE": "test_name_space_1"}, deep=True ) await _assert_compose_spec_pulled(compose_spec, settings_1) - settings_2 = settings.copy( + settings_2 = settings.model_copy( update={"DYNAMIC_SIDECAR_COMPOSE_NAMESPACE": "test_name_space_2"}, deep=True ) await _assert_compose_spec_pulled(compose_spec, settings_2) @@ -750,7 +751,10 @@ async def test_containers_activity_command_failed( ): response = await test_client.get(f"/{API_VTAG}/containers/activity") assert response.status_code == 200, response.text - assert response.json() == ActivityInfo(seconds_inactive=_INACTIVE_FOR_LONG_TIME) + assert ( + response.json() + == ActivityInfo(seconds_inactive=_INACTIVE_FOR_LONG_TIME).model_dump() + ) async def test_containers_activity_no_inactivity_defined( @@ -772,8 +776,8 @@ def mock_inactive_since_command_response( activity_response: ActivityInfo, ) -> None: mocker.patch( - "simcore_service_dynamic_sidecar.api.rest.containers.run_command_in_container", - return_value=activity_response.json(), + "simcore_service_dynamic_sidecar.api.containers.run_command_in_container", + return_value=activity_response.model_dump_json(), ) @@ -786,7 +790,7 @@ async def test_containers_activity_inactive_since( ): response = await test_client.get(f"/{API_VTAG}/containers/activity") assert response.status_code == 200, response.text - assert response.json() == activity_response + assert response.json() == activity_response.model_dump() @pytest.fixture @@ -805,4 +809,7 @@ async def test_containers_activity_unexpected_response( ): response = await test_client.get(f"/{API_VTAG}/containers/activity") assert response.status_code == 200, response.text - assert response.json() == ActivityInfo(seconds_inactive=_INACTIVE_FOR_LONG_TIME) + assert ( + response.json() + == ActivityInfo(seconds_inactive=_INACTIVE_FOR_LONG_TIME).model_dump() + ) diff --git a/services/dynamic-sidecar/tests/unit/test_api_rest_containers_long_running_tasks.py b/services/dynamic-sidecar/tests/unit/test_api_rest_containers_long_running_tasks.py index 9ea4de06dbb5..66a31f1c13ce 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_rest_containers_long_running_tasks.py +++ b/services/dynamic-sidecar/tests/unit/test_api_rest_containers_long_running_tasks.py @@ -16,6 +16,7 @@ from aiodocker.containers import DockerContainer from aiodocker.volumes import DockerVolume from asgi_lifespan import LifespanManager +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy from fastapi import FastAPI from fastapi.routing import APIRoute from httpx import AsyncClient @@ -24,7 +25,7 @@ ProgressPercent, ) from models_library.services_creation import CreateServiceMetricsAdditionalParams -from pydantic import AnyHttpUrl, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from servicelib.fastapi.long_running_tasks.client import ( @@ -157,7 +158,9 @@ def compose_spec(request: pytest.FixtureRequest) -> str: @pytest.fixture def backend_url() -> AnyHttpUrl: - return parse_obj_as(AnyHttpUrl, "http://backgroud.testserver.io") + return TypeAdapter(AnyHttpUrlLegacy).validate_python( + "http://backgroud.testserver.io" + ) @pytest.fixture @@ -187,7 +190,7 @@ async def httpx_async_client( # crete dir here async with AsyncClient( app=app, - base_url=backend_url, + base_url=f"{backend_url}", headers={"Content-Type": "application/json"}, ) as client: yield client @@ -197,7 +200,7 @@ async def httpx_async_client( def client( app: FastAPI, httpx_async_client: AsyncClient, backend_url: AnyHttpUrl ) -> Client: - return Client(app=app, async_client=httpx_async_client, base_url=backend_url) + return Client(app=app, async_client=httpx_async_client, base_url=f"{backend_url}") @pytest.fixture diff --git a/services/dynamic-sidecar/tests/unit/test_api_rest_prometheus_metrics.py b/services/dynamic-sidecar/tests/unit/test_api_rest_prometheus_metrics.py index 1c56b71db2e5..e8206dc47a90 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_rest_prometheus_metrics.py +++ b/services/dynamic-sidecar/tests/unit/test_api_rest_prometheus_metrics.py @@ -10,11 +10,12 @@ import pytest from aiodocker.volumes import DockerVolume from asgi_lifespan import LifespanManager +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy from fastapi import FastAPI, status from httpx import AsyncClient from models_library.callbacks_mapping import CallbacksMapping from models_library.services_creation import CreateServiceMetricsAdditionalParams -from pydantic import AnyHttpUrl, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.fastapi.long_running_tasks.client import ( Client, @@ -44,7 +45,7 @@ async def enable_prometheus_metrics( monkeypatch, { "DY_SIDECAR_CALLBACKS_MAPPING": json.dumps( - CallbacksMapping.Config.schema_extra["examples"][2] + CallbacksMapping.model_config["json_schema_extra"]["examples"][2] ) }, ) @@ -59,7 +60,9 @@ async def app(mock_rabbitmq_envs: EnvVarsDict, app: FastAPI) -> AsyncIterable[Fa @pytest.fixture def backend_url() -> AnyHttpUrl: - return parse_obj_as(AnyHttpUrl, "http://backgroud.testserver.io") + return TypeAdapter(AnyHttpUrlLegacy).validate_python( + "http://backgroud.testserver.io" + ) @pytest.fixture @@ -71,7 +74,7 @@ async def httpx_async_client( ) -> AsyncIterable[AsyncClient]: async with AsyncClient( app=app, - base_url=backend_url, + base_url=f"{backend_url}", headers={"Content-Type": "application/json"}, ) as client: yield client @@ -81,7 +84,7 @@ async def httpx_async_client( def client( app: FastAPI, httpx_async_client: AsyncClient, backend_url: AnyHttpUrl ) -> Client: - return Client(app=app, async_client=httpx_async_client, base_url=backend_url) + return Client(app=app, async_client=httpx_async_client, base_url=f"{backend_url}") @pytest.fixture diff --git a/services/dynamic-sidecar/tests/unit/test_api_rest_volumes.py b/services/dynamic-sidecar/tests/unit/test_api_rest_volumes.py index fe396d002ad4..40eab12336a3 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_rest_volumes.py +++ b/services/dynamic-sidecar/tests/unit/test_api_rest_volumes.py @@ -56,6 +56,4 @@ async def test_volumes_state_saved_error( ) assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text json_response = response.json() - assert ( - invalid_volume_category not in json_response["detail"][0]["ctx"]["enum_values"] - ) + assert invalid_volume_category not in json_response["detail"][0]["ctx"]["expected"] diff --git a/services/dynamic-sidecar/tests/unit/test_api_rest_workflow_service_metrics.py b/services/dynamic-sidecar/tests/unit/test_api_rest_workflow_service_metrics.py index 22400cb0e809..ddb5b18df1ae 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_rest_workflow_service_metrics.py +++ b/services/dynamic-sidecar/tests/unit/test_api_rest_workflow_service_metrics.py @@ -16,6 +16,7 @@ from aiodocker.utils import clean_filters from aiodocker.volumes import DockerVolume from asgi_lifespan import LifespanManager +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy from fastapi import FastAPI from httpx import AsyncClient from models_library.generated_models.docker_rest_api import ContainerState @@ -28,7 +29,7 @@ SimcorePlatformStatus, ) from models_library.services_creation import CreateServiceMetricsAdditionalParams -from pydantic import AnyHttpUrl, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.fastapi.long_running_tasks.client import ( @@ -79,7 +80,9 @@ def compose_spec(raw_compose_spec: dict[str, Any]) -> str: @pytest.fixture def backend_url() -> AnyHttpUrl: - return parse_obj_as(AnyHttpUrl, "http://backgroud.testserver.io") + return TypeAdapter(AnyHttpUrlLegacy).validate_python( + "http://backgroud.testserver.io" + ) @pytest.fixture @@ -113,7 +116,7 @@ async def httpx_async_client( ) -> AsyncIterable[AsyncClient]: # crete dir here async with AsyncClient( - app=app, base_url=backend_url, headers={"Content-Type": "application/json"} + app=app, base_url=f"{backend_url}", headers={"Content-Type": "application/json"} ) as client: yield client @@ -122,7 +125,7 @@ async def httpx_async_client( def client( app: FastAPI, httpx_async_client: AsyncClient, backend_url: AnyHttpUrl ) -> Client: - return Client(app=app, async_client=httpx_async_client, base_url=backend_url) + return Client(app=app, async_client=httpx_async_client, base_url=f"{backend_url}") @pytest.fixture @@ -189,7 +192,7 @@ async def _wait_for_containers_to_be_running(app: FastAPI) -> None: running_container_statuses = [ x for x in containers_statuses.values() - if x is not None and x.Status == ContainerStatus.running + if x is not None and x.status == ContainerStatus.running ] if len(running_container_statuses) != len(shared_store.container_names): @@ -361,8 +364,8 @@ async def _mocked_get_container_states( results = await get_container_states(container_names) for result in results.values(): if result: - result.OOMKilled = True - result.Status = ContainerStatus.exited + result.oom_killed = True + result.status = ContainerStatus.exited break return results diff --git a/services/dynamic-sidecar/tests/unit/test_core_reserved_space.py b/services/dynamic-sidecar/tests/unit/test_core_reserved_space.py index 123f21864a05..c78b800ce5a3 100644 --- a/services/dynamic-sidecar/tests/unit/test_core_reserved_space.py +++ b/services/dynamic-sidecar/tests/unit/test_core_reserved_space.py @@ -2,7 +2,7 @@ # pylint:disable=unused-argument -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from simcore_service_dynamic_sidecar.core.application import create_base_app from simcore_service_dynamic_sidecar.core.reserved_space import ( @@ -18,7 +18,9 @@ def test_reserved_disk_space_workflow( create_base_app() assert _RESERVED_DISK_SPACE_NAME.exists() - assert _RESERVED_DISK_SPACE_NAME.stat().st_size == parse_obj_as(ByteSize, "10MiB") + assert _RESERVED_DISK_SPACE_NAME.stat().st_size == TypeAdapter( + ByteSize + ).validate_python("10MiB") remove_reserved_disk_space() assert not _RESERVED_DISK_SPACE_NAME.exists() diff --git a/services/dynamic-sidecar/tests/unit/test_core_settings.py b/services/dynamic-sidecar/tests/unit/test_core_settings.py index 4512abf71d6f..9e581d90999d 100644 --- a/services/dynamic-sidecar/tests/unit/test_core_settings.py +++ b/services/dynamic-sidecar/tests/unit/test_core_settings.py @@ -61,6 +61,7 @@ def test_settings_with_node_ports_storage_auth( settings = ApplicationSettings.create_from_envs() assert settings.NODE_PORTS_STORAGE_AUTH + # pylint:disable=no-member assert settings.NODE_PORTS_STORAGE_AUTH.STORAGE_SECURE is True assert settings.NODE_PORTS_STORAGE_AUTH.STORAGE_HOST == "host" assert settings.NODE_PORTS_STORAGE_AUTH.STORAGE_PORT == 42 @@ -72,15 +73,10 @@ def test_settings_with_node_ports_storage_auth( assert ( settings.NODE_PORTS_STORAGE_AUTH.STORAGE_PASSWORD.get_secret_value() == "passwd" ) - assert "passwd" not in settings.NODE_PORTS_STORAGE_AUTH.json() + assert "passwd" not in settings.NODE_PORTS_STORAGE_AUTH.model_dump_json() -@pytest.mark.parametrize( - "envs", - [ - {}, - ], -) +@pytest.mark.parametrize("envs", [{}]) def test_settings_with_node_ports_storage_auth_as_missing( mock_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch, envs: dict[str, str] ): @@ -88,6 +84,7 @@ def test_settings_with_node_ports_storage_auth_as_missing( settings = ApplicationSettings.create_from_envs() assert settings.NODE_PORTS_STORAGE_AUTH is not None + # pylint:disable=no-member assert settings.NODE_PORTS_STORAGE_AUTH.auth_required is False assert settings.NODE_PORTS_STORAGE_AUTH.STORAGE_USERNAME is None assert settings.NODE_PORTS_STORAGE_AUTH.STORAGE_PASSWORD is None diff --git a/services/dynamic-sidecar/tests/unit/test_core_stroage.py b/services/dynamic-sidecar/tests/unit/test_core_stroage.py index c8f1d19405d0..0fdf000f7c01 100644 --- a/services/dynamic-sidecar/tests/unit/test_core_stroage.py +++ b/services/dynamic-sidecar/tests/unit/test_core_stroage.py @@ -12,6 +12,7 @@ import uvicorn from fastapi import Depends, FastAPI, HTTPException, status from fastapi.security import HTTPBasic, HTTPBasicCredentials +from pydantic import TypeAdapter from pytest_mock import MockerFixture from settings_library.node_ports import StorageAuthSettings from simcore_service_dynamic_sidecar.core.storage import ( @@ -60,7 +61,7 @@ async def unprotected_route(): def storage_auth_settings( username: str | None, password: str | None ) -> StorageAuthSettings: - return StorageAuthSettings.parse_obj( + return TypeAdapter(StorageAuthSettings).validate_python( { "STORAGE_HOST": "localhost", "STORAGE_PORT": 44332, diff --git a/services/dynamic-sidecar/tests/unit/test_models_shared_store.py b/services/dynamic-sidecar/tests/unit/test_models_shared_store.py index c72a8bdb85a8..2c2b474a0290 100644 --- a/services/dynamic-sidecar/tests/unit/test_models_shared_store.py +++ b/services/dynamic-sidecar/tests/unit/test_models_shared_store.py @@ -6,11 +6,12 @@ from pathlib import Path from typing import Any +import arrow import pytest from async_asgi_testclient import TestClient from fastapi import FastAPI from models_library.sidecar_volumes import VolumeCategory, VolumeState, VolumeStatus -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_mock.plugin import MockerFixture from servicelib.utils import logged_gather from simcore_service_dynamic_sidecar.core import application @@ -53,17 +54,17 @@ def mock_docker_compose(mocker: MockerFixture) -> None: {"volume_states": {}}, { "volume_states": { - VolumeCategory.OUTPUTS: parse_obj_as( - VolumeState, {"status": VolumeStatus.CONTENT_NO_SAVE_REQUIRED} + VolumeCategory.OUTPUTS: TypeAdapter(VolumeState).validate_python( + {"status": VolumeStatus.CONTENT_NO_SAVE_REQUIRED} ), - VolumeCategory.INPUTS: parse_obj_as( - VolumeState, {"status": VolumeStatus.CONTENT_NEEDS_TO_BE_SAVED} + VolumeCategory.INPUTS: TypeAdapter(VolumeState).validate_python( + {"status": VolumeStatus.CONTENT_NEEDS_TO_BE_SAVED} ), - VolumeCategory.STATES: parse_obj_as( - VolumeState, {"status": VolumeStatus.CONTENT_WAS_SAVED} + VolumeCategory.STATES: TypeAdapter(VolumeState).validate_python( + {"status": VolumeStatus.CONTENT_WAS_SAVED} ), - VolumeCategory.SHARED_STORE: parse_obj_as( - VolumeState, {"status": VolumeStatus.CONTENT_NO_SAVE_REQUIRED} + VolumeCategory.SHARED_STORE: TypeAdapter(VolumeState).validate_python( + {"status": VolumeStatus.CONTENT_NO_SAVE_REQUIRED} ), } }, @@ -88,7 +89,18 @@ async def test_shared_store_updates( # check the contes of the file should be the same as the shared_store's assert store_file_path.exists() is True - assert shared_store == SharedStore.parse_raw(store_file_path.read_text()) + + def _normalize_datetimes(shared_store: SharedStore) -> None: + for state in shared_store.volume_states.values(): + state.last_changed = arrow.get(state.last_changed.isoformat()).datetime + + shared_store_from_file = SharedStore.model_validate_json( + store_file_path.read_text() + ) + _normalize_datetimes(shared_store) + _normalize_datetimes(shared_store_from_file) + + assert shared_store == shared_store_from_file async def test_no_concurrency_with_parallel_writes( @@ -119,12 +131,20 @@ async def test_init_from_disk_with_legacy_data_format(project_tests_dir: Path): # if file is missing it correctly loaded the storage_file assert (MOCKS_DIR / STORE_FILE_NAME).exists() is False - # ensure object objects are compatible - parsed_legacy_format = json.loads(disk_shared_store.json()) + def _normalize_datetimes(data: dict[str, Any]) -> None: + for state in data["volume_states"].values(): + state["last_changed"] = arrow.get( + state["last_changed"] + ).datetime.isoformat() - assert parsed_legacy_format == json.loads( - (MOCKS_DIR / LEGACY_SHARED_STORE).read_text() - ) + # ensure objects are compatible + parsed_legacy_format = json.loads(disk_shared_store.model_dump_json()) + load_raw_from_disk = json.loads((MOCKS_DIR / LEGACY_SHARED_STORE).read_text()) + + _normalize_datetimes(parsed_legacy_format) + _normalize_datetimes(load_raw_from_disk) + + assert parsed_legacy_format == load_raw_from_disk async def test_init_from_disk_no_file_present(tmp_path: Path): diff --git a/services/dynamic-sidecar/tests/unit/test_modules_notifier.py b/services/dynamic-sidecar/tests/unit/test_modules_notifier.py index eabd1114083f..51855ffd20fe 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_notifier.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_notifier.py @@ -33,7 +33,7 @@ from models_library.projects_nodes_io import NodeID from models_library.services_types import ServicePortKey from models_library.users import UserID -from pydantic import ByteSize, NonNegativeInt, parse_obj_as +from pydantic import ByteSize, NonNegativeInt, TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.utils import logged_gather @@ -133,7 +133,7 @@ def _get_mocked_disk_usage(byte_size_str: str) -> DiskUsage: return DiskUsage( total=ByteSize.validate(byte_size_str), used=ByteSize(0), - free=ByteSize.validate(byte_size_str), + free=TypeAdapter(ByteSize).validate_python(byte_size_str), used_percent=0, ) @@ -144,7 +144,7 @@ def _get_on_service_disk_usage_spy( # emulates front-end receiving message async def on_service_status(data): - assert parse_obj_as(ServiceDiskUsage, data) is not None + assert TypeAdapter(ServiceDiskUsage).validate_python(data) is not None on_event_spy = AsyncMock(wraps=on_service_status) socketio_client.on(SOCKET_IO_SERVICE_DISK_USAGE_EVENT, on_event_spy) @@ -231,7 +231,7 @@ def _get_on_input_port_spy( # emulates front-end receiving message async def on_service_status(data): - assert parse_obj_as(ServiceDiskUsage, data) is not None + assert TypeAdapter(ServiceDiskUsage).validate_python(data) is not None on_event_spy = AsyncMock(wraps=on_service_status) socketio_client.on(SOCKET_IO_STATE_INPUT_PORTS_EVENT, on_event_spy) @@ -320,7 +320,7 @@ def _get_on_output_port_spy( # emulates front-end receiving message async def on_service_status(data): - assert parse_obj_as(ServiceDiskUsage, data) is not None + assert TypeAdapter(ServiceDiskUsage).validate_python(data) is not None on_event_spy = AsyncMock(wraps=on_service_status) socketio_client.on(SOCKET_IO_STATE_OUTPUT_PORTS_EVENT, on_event_spy) diff --git a/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_filter.py b/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_filter.py index 38b217bab8f5..0a275d1d70bb 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_filter.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_filter.py @@ -7,7 +7,7 @@ from unittest.mock import AsyncMock import pytest -from pydantic import ByteSize, NonNegativeFloat, NonNegativeInt, parse_obj_as +from pydantic import ByteSize, NonNegativeFloat, NonNegativeInt, TypeAdapter from pytest_mock.plugin import MockerFixture from simcore_service_dynamic_sidecar.modules.notifications._notifications_ports import ( PortNotifier, @@ -230,8 +230,8 @@ def test_default_delay_policy(): wait_policy = DefaultDelayPolicy() # below items are defined by the default policy - LOWER_BOUND = parse_obj_as(ByteSize, "1mib") - UPPER_BOUND = parse_obj_as(ByteSize, "500mib") + LOWER_BOUND = TypeAdapter(ByteSize).validate_python("1mib") + UPPER_BOUND = TypeAdapter(ByteSize).validate_python("500mib") assert wait_policy.get_min_interval() == 1.0 @@ -243,4 +243,7 @@ def test_default_delay_policy(): assert wait_policy.get_wait_interval(UPPER_BOUND - 1) < 10.0 assert wait_policy.get_wait_interval(UPPER_BOUND) == 10.0 assert wait_policy.get_wait_interval(UPPER_BOUND + 1) == 10.0 - assert wait_policy.get_wait_interval(parse_obj_as(ByteSize, "1Tib")) == 10.0 + assert ( + wait_policy.get_wait_interval(TypeAdapter(ByteSize).validate_python("1Tib")) + == 10.0 + ) diff --git a/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py b/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py index 3bf17d09f925..a38658f222bd 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py @@ -31,7 +31,7 @@ ) from simcore_service_dynamic_sidecar.modules.outputs._manager import ( OutputsManager, - UploadPortsFailed, + UploadPortsFailedError, _PortKeyTracker, setup_outputs_manager, ) @@ -234,7 +234,7 @@ async def test_recovers_after_raising_error( assert await outputs_manager._port_key_tracker.no_tracked_ports() is False await asyncio.sleep(outputs_manager.task_monitor_interval_s * 10) - with pytest.raises(UploadPortsFailed) as exec_info: + with pytest.raises(UploadPortsFailedError) as exec_info: await outputs_manager.wait_for_all_uploads_to_finish() assert set(exec_info.value.failures.keys()) == set(port_keys) | set( diff --git a/services/dynamic-sidecar/tests/unit/test_modules_outputs_watcher.py b/services/dynamic-sidecar/tests/unit/test_modules_outputs_watcher.py index 7f9b81587c25..ffa4dfbef45e 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_outputs_watcher.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_outputs_watcher.py @@ -22,7 +22,7 @@ NonNegativeFloat, NonNegativeInt, PositiveFloat, - parse_obj_as, + TypeAdapter, ) from pytest_mock import MockerFixture from simcore_service_dynamic_sidecar.modules.mounted_fs import MountedVolumes @@ -172,20 +172,20 @@ class FileGenerationInfo: @pytest.fixture( params=[ FileGenerationInfo( - size=parse_obj_as(ByteSize, "100b"), - chunk_size=parse_obj_as(ByteSize, "1b"), + size=TypeAdapter(ByteSize).validate_python("100b"), + chunk_size=TypeAdapter(ByteSize).validate_python("1b"), ), FileGenerationInfo( - size=parse_obj_as(ByteSize, "100kib"), - chunk_size=parse_obj_as(ByteSize, "1kib"), + size=TypeAdapter(ByteSize).validate_python("100kib"), + chunk_size=TypeAdapter(ByteSize).validate_python("1kib"), ), FileGenerationInfo( - size=parse_obj_as(ByteSize, "100mib"), - chunk_size=parse_obj_as(ByteSize, "1mib"), + size=TypeAdapter(ByteSize).validate_python("100mib"), + chunk_size=TypeAdapter(ByteSize).validate_python("1mib"), ), FileGenerationInfo( - size=parse_obj_as(ByteSize, "100mib"), - chunk_size=parse_obj_as(ByteSize, "10mib"), + size=TypeAdapter(ByteSize).validate_python("100mib"), + chunk_size=TypeAdapter(ByteSize).validate_python("10mib"), ), ] ) diff --git a/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__disk_usage.py b/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__disk_usage.py index e423d588480a..97a65d4553b5 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__disk_usage.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__disk_usage.py @@ -19,7 +19,7 @@ from models_library.users import UserID from models_library.utils.json_serialization import json_dumps from psutil._common import sdiskusage -from pydantic import ByteSize +from pydantic import ByteSize, TypeAdapter from pytest_mock import MockerFixture from simcore_service_dynamic_sidecar.modules.mounted_fs import MountedVolumes from simcore_service_dynamic_sidecar.modules.system_monitor._disk_usage import ( @@ -98,7 +98,7 @@ def _get_entry(mock: Mock, *, index: int) -> dict[Path, DiskUsage]: def _get_byte_size(byte_size_as_str: str) -> ByteSize: - return ByteSize.validate(byte_size_as_str) + return TypeAdapter(ByteSize).validate_python(byte_size_as_str) def _get_mocked_disk_usage(byte_size_as_str: str) -> DiskUsage: diff --git a/services/dynamic-sidecar/tests/unit/test_modules_user_services_preferences_user_preference.py b/services/dynamic-sidecar/tests/unit/test_modules_user_services_preferences_user_preference.py index 08b6f3da05ed..9496bfceb784 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_user_services_preferences_user_preference.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_user_services_preferences_user_preference.py @@ -3,7 +3,7 @@ import pytest from models_library.services import ServiceKey -from pydantic import parse_obj_as +from pydantic import TypeAdapter from simcore_service_dynamic_sidecar.modules.user_services_preferences._user_preference import ( get_model_class, ) @@ -11,7 +11,9 @@ @pytest.fixture def service_key() -> ServiceKey: - return parse_obj_as(ServiceKey, "simcore/services/dynamic/test-service-34") + return TypeAdapter(ServiceKey).validate_python( + "simcore/services/dynamic/test-service-34" + ) def test_get_model_class_only_defined_once(service_key: ServiceKey): diff --git a/services/efs-guardian/requirements/_base.in b/services/efs-guardian/requirements/_base.in index 90fc6e24ac64..247616533ede 100644 --- a/services/efs-guardian/requirements/_base.in +++ b/services/efs-guardian/requirements/_base.in @@ -6,6 +6,7 @@ --constraint ./constraints.txt # intra-repo required dependencies +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in --requirement ../../../packages/aws-library/requirements/_base.in diff --git a/services/efs-guardian/requirements/_base.txt b/services/efs-guardian/requirements/_base.txt index 0e7970349859..35f30a99e5a5 100644 --- a/services/efs-guardian/requirements/_base.txt +++ b/services/efs-guardian/requirements/_base.txt @@ -51,6 +51,8 @@ aiosignal==1.3.1 # via aiohttp alembic==1.13.3 # via -r requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic anyio==4.6.2.post1 # via # fast-depends @@ -122,21 +124,8 @@ email-validator==2.2.0 # via pydantic fast-depends==2.4.12 # via faststream -fastapi==0.99.1 +fastapi==0.115.0 # via - # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator @@ -325,7 +314,7 @@ psutil==6.1.0 # -r requirements/../../../packages/service-library/requirements/_base.in psycopg2-binary==2.9.10 # via sqlalchemy -pydantic==1.10.18 +pydantic==2.9.2 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -337,7 +326,6 @@ pydantic==1.10.18 # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -355,6 +343,26 @@ pydantic==1.10.18 # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==5.0.0 @@ -365,6 +373,8 @@ python-dateutil==2.9.0.post0 # via # arrow # botocore +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.2 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -453,7 +463,7 @@ sqlalchemy==1.4.54 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # alembic -starlette==0.27.0 +starlette==0.38.6 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -502,11 +512,13 @@ types-python-dateutil==2.9.0.20241003 typing-extensions==4.12.2 # via # aiodebug + # aiodocker # alembic # fastapi # faststream # opentelemetry-sdk # pydantic + # pydantic-core # typer # types-aiobotocore # types-aiobotocore-ec2 diff --git a/services/efs-guardian/requirements/_test.txt b/services/efs-guardian/requirements/_test.txt index f188e8071de3..3a082363fd5a 100644 --- a/services/efs-guardian/requirements/_test.txt +++ b/services/efs-guardian/requirements/_test.txt @@ -15,6 +15,10 @@ aiosignal==1.3.1 # via # -c requirements/_base.txt # aiohttp +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic antlr4-python3-runtime==4.13.2 # via moto anyio==4.6.2.post1 @@ -208,11 +212,15 @@ py-partiql-parser==0.5.6 # via moto pycparser==2.22 # via cffi -pydantic==1.10.18 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic pyparsing==3.2.0 # via moto pytest==8.3.3 @@ -238,7 +246,9 @@ python-dateutil==2.9.0.post0 # faker # moto python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt diff --git a/services/efs-guardian/requirements/ci.txt b/services/efs-guardian/requirements/ci.txt index 193365cdddc3..163b032f3469 100644 --- a/services/efs-guardian/requirements/ci.txt +++ b/services/efs-guardian/requirements/ci.txt @@ -13,6 +13,7 @@ # installs this repo's packages simcore-aws-library @ ../../packages/aws-library +simcore-common-library @ ../../packages/common-library simcore-models-library @ ../../packages/models-library pytest-simcore @ ../../packages/pytest-simcore simcore-service-library[fastapi] @ ../../packages/service-library diff --git a/services/efs-guardian/requirements/dev.txt b/services/efs-guardian/requirements/dev.txt index 0c832fb84112..35e2f508112d 100644 --- a/services/efs-guardian/requirements/dev.txt +++ b/services/efs-guardian/requirements/dev.txt @@ -13,6 +13,7 @@ # installs this repo's packages --editable ../../packages/aws-library +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/pytest-simcore --editable ../../packages/service-library[fastapi] diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py b/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py index f52120966f21..c0c99bd5a940 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py @@ -27,7 +27,7 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: app_settings = settings or ApplicationSettings.create_from_envs() - logger.info("app settings: %s", app_settings.json(indent=1)) + logger.info("app settings: %s", app_settings.model_dump_json(indent=1)) app = FastAPI( debug=app_settings.EFS_GUARDIAN_DEBUG, diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py b/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py index 7856d991a436..c383a3f8d14a 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py @@ -9,7 +9,7 @@ LogLevel, VersionTag, ) -from pydantic import ByteSize, Field, PositiveInt, parse_obj_as, validator +from pydantic import AliasChoices, ByteSize, Field, PositiveInt, field_validator from settings_library.base import BaseCustomSettings from settings_library.efs import AwsEfsSettings from settings_library.postgres import PostgresSettings @@ -70,33 +70,45 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): # RUNTIME ----------------------------------------------------------- EFS_GUARDIAN_DEBUG: bool = Field( - default=False, description="Debug mode", env=["EFS_GUARDIAN_DEBUG", "DEBUG"] + default=False, + description="Debug mode", + validation_alias=AliasChoices("EFS_GUARDIAN_DEBUG", "DEBUG"), ) EFS_GUARDIAN_LOGLEVEL: LogLevel = Field( - LogLevel.INFO, env=["EFS_GUARDIAN_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"] + LogLevel.INFO, + validation_alias=AliasChoices("EFS_GUARDIAN_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), ) EFS_GUARDIAN_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=[ + validation_alias=AliasChoices( "EFS_GUARDIAN_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) - EFS_GUARDIAN_AWS_EFS_SETTINGS: AwsEfsSettings = Field(auto_default_from_env=True) - EFS_GUARDIAN_POSTGRES: PostgresSettings = Field(auto_default_from_env=True) - EFS_GUARDIAN_RABBITMQ: RabbitSettings = Field(auto_default_from_env=True) - EFS_GUARDIAN_REDIS: RedisSettings = Field(auto_default_from_env=True) + EFS_GUARDIAN_AWS_EFS_SETTINGS: AwsEfsSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) + EFS_GUARDIAN_POSTGRES: PostgresSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) + EFS_GUARDIAN_RABBITMQ: RabbitSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) + EFS_GUARDIAN_REDIS: RedisSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) EFS_GUARDIAN_TRACING: TracingSettings | None = Field( - auto_default_from_env=True, description="settings for opentelemetry tracing" + description="settings for opentelemetry tracing", + json_schema_extra={"auto_default_from_env": True}, ) @cached_property def LOG_LEVEL(self) -> LogLevel: # noqa: N802 return self.EFS_GUARDIAN_LOGLEVEL - @validator("EFS_GUARDIAN_LOGLEVEL", pre=True) + @field_validator("EFS_GUARDIAN_LOGLEVEL", mode="before") @classmethod def valid_log_level(cls, value: str) -> str: return cls.validate_log_level(value) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/_base.py b/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/_base.py index 61a92118c927..9c8c45d09332 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/_base.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/_base.py @@ -1,8 +1,5 @@ -from typing import Any - -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class EfsGuardianBaseError(OsparcErrorMixin, Exception): - def __init__(self, **ctx: Any) -> None: - super().__init__(**ctx) + """EFS guardian base error class.""" diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/process_messages.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/process_messages.py index 11c7781bbaee..d1e3b67353aa 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/services/process_messages.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/process_messages.py @@ -2,7 +2,6 @@ from fastapi import FastAPI from models_library.rabbitmq_messages import DynamicServiceRunningMessage -from pydantic import parse_raw_as from servicelib.logging_utils import log_context from simcore_service_efs_guardian.services.modules.redis import get_redis_lock_client @@ -14,8 +13,8 @@ async def process_dynamic_service_running_message(app: FastAPI, data: bytes) -> bool: assert app # nosec - rabbit_message: DynamicServiceRunningMessage = parse_raw_as( - DynamicServiceRunningMessage, data + rabbit_message: DynamicServiceRunningMessage = ( + DynamicServiceRunningMessage.model_validate_json(data) ) _logger.debug( "Process dynamic service running msg, project ID: %s node ID: %s, current user: %s", diff --git a/services/invitations/openapi.json b/services/invitations/openapi.json index 96630799e46e..508191b0419f 100644 --- a/services/invitations/openapi.json +++ b/services/invitations/openapi.json @@ -175,18 +175,32 @@ "description": "Invitee's email. Note that the registration can ONLY be used with this email" }, "trial_account_days": { - "type": "integer", - "exclusiveMinimum": true, + "anyOf": [ + { + "type": "integer", + "exclusiveMinimum": true, + "minimum": 0 + }, + { + "type": "null" + } + ], "title": "Trial Account Days", - "description": "If set, this invitation will activate a trial account.Sets the number of days from creation until the account expires", - "minimum": 0 + "description": "If set, this invitation will activate a trial account.Sets the number of days from creation until the account expires" }, "extra_credits_in_usd": { - "type": "integer", - "exclusiveMinimum": true, + "anyOf": [ + { + "type": "integer", + "exclusiveMinimum": true, + "minimum": 0 + }, + { + "type": "null" + } + ], "title": "Extra Credits In Usd", - "description": "If set, the account's primary wallet will add extra credits corresponding to this ammount in USD", - "minimum": 0 + "description": "If set, the account's primary wallet will add extra credits corresponding to this ammount in USD" }, "product": { "type": "string", @@ -208,13 +222,12 @@ "created" ], "title": "ApiInvitationContent", - "description": "Data in an invitation", "example": { - "issuer": "issuerid", + "created": "2023-01-11 13:11:47.293595", "guest": "invitedguest@company.com", - "trial_account_days": 2, + "issuer": "issuerid", "product": "osparc", - "created": "2023-01-11 13:11:47.293595" + "trial_account_days": 2 } }, "ApiInvitationContentAndLink": { @@ -233,18 +246,32 @@ "description": "Invitee's email. Note that the registration can ONLY be used with this email" }, "trial_account_days": { - "type": "integer", - "exclusiveMinimum": true, + "anyOf": [ + { + "type": "integer", + "exclusiveMinimum": true, + "minimum": 0 + }, + { + "type": "null" + } + ], "title": "Trial Account Days", - "description": "If set, this invitation will activate a trial account.Sets the number of days from creation until the account expires", - "minimum": 0 + "description": "If set, this invitation will activate a trial account.Sets the number of days from creation until the account expires" }, "extra_credits_in_usd": { - "type": "integer", - "exclusiveMinimum": true, + "anyOf": [ + { + "type": "integer", + "exclusiveMinimum": true, + "minimum": 0 + }, + { + "type": "null" + } + ], "title": "Extra Credits In Usd", - "description": "If set, the account's primary wallet will add extra credits corresponding to this ammount in USD", - "minimum": 0 + "description": "If set, the account's primary wallet will add extra credits corresponding to this ammount in USD" }, "product": { "type": "string", @@ -275,14 +302,13 @@ "invitation_url" ], "title": "ApiInvitationContentAndLink", - "description": "Data in an invitation", "example": { - "issuer": "issuerid", + "created": "2023-01-11 13:11:47.293595", "guest": "invitedguest@company.com", - "trial_account_days": 2, + "invitation_url": "https://foo.com/#/registration?invitation=1234", + "issuer": "issuerid", "product": "osparc", - "created": "2023-01-11 13:11:47.293595", - "invitation_url": "https://foo.com/#/registration?invitation=1234" + "trial_account_days": 2 } }, "ApiInvitationInputs": { @@ -301,21 +327,42 @@ "description": "Invitee's email. Note that the registration can ONLY be used with this email" }, "trial_account_days": { - "type": "integer", - "exclusiveMinimum": true, + "anyOf": [ + { + "type": "integer", + "exclusiveMinimum": true, + "minimum": 0 + }, + { + "type": "null" + } + ], "title": "Trial Account Days", - "description": "If set, this invitation will activate a trial account.Sets the number of days from creation until the account expires", - "minimum": 0 + "description": "If set, this invitation will activate a trial account.Sets the number of days from creation until the account expires" }, "extra_credits_in_usd": { - "type": "integer", - "exclusiveMinimum": true, + "anyOf": [ + { + "type": "integer", + "exclusiveMinimum": true, + "minimum": 0 + }, + { + "type": "null" + } + ], "title": "Extra Credits In Usd", - "description": "If set, the account's primary wallet will add extra credits corresponding to this ammount in USD", - "minimum": 0 + "description": "If set, the account's primary wallet will add extra credits corresponding to this ammount in USD" }, "product": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Product", "description": "If None, it will use INVITATIONS_DEFAULT_PRODUCT" } @@ -326,10 +373,9 @@ "guest" ], "title": "ApiInvitationInputs", - "description": "Input data necessary to create an invitation", "example": { - "issuer": "issuerid", "guest": "invitedguest@company.com", + "issuer": "issuerid", "trial_account_days": 2 } }, diff --git a/services/invitations/requirements/_base.in b/services/invitations/requirements/_base.in index 5ee2336503c6..2a775800f932 100644 --- a/services/invitations/requirements/_base.in +++ b/services/invitations/requirements/_base.in @@ -6,6 +6,7 @@ --constraint ./constraints.txt # intra-repo required dependencies +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in # service-library[fastapi] diff --git a/services/invitations/requirements/_base.txt b/services/invitations/requirements/_base.txt index e0f4fcf73b61..636facc1b1f8 100644 --- a/services/invitations/requirements/_base.txt +++ b/services/invitations/requirements/_base.txt @@ -21,6 +21,8 @@ aiormq==6.8.0 # via aio-pika aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -80,14 +82,8 @@ email-validator==2.1.1 # via pydantic fast-depends==2.4.12 # via faststream -fastapi==0.99.1 +fastapi==0.115.0 # via - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator @@ -224,12 +220,11 @@ psutil==6.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in pycparser==2.21 # via cffi -pydantic==1.10.14 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -240,6 +235,20 @@ pydantic==1.10.14 # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.17.2 # via rich pyinstrument==4.6.2 @@ -247,7 +256,9 @@ pyinstrument==4.6.2 python-dateutil==2.9.0.post0 # via arrow python-dotenv==1.0.1 - # via uvicorn + # via + # pydantic-settings + # uvicorn pyyaml==6.0.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -295,7 +306,7 @@ sniffio==1.3.1 # via # anyio # httpx -starlette==0.27.0 +starlette==0.38.6 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -326,6 +337,7 @@ typing-extensions==4.10.0 # faststream # opentelemetry-sdk # pydantic + # pydantic-core # typer urllib3==2.2.2 # via diff --git a/services/invitations/requirements/ci.txt b/services/invitations/requirements/ci.txt index 739339bee8cd..bae114603766 100644 --- a/services/invitations/requirements/ci.txt +++ b/services/invitations/requirements/ci.txt @@ -12,10 +12,11 @@ --requirement _tools.txt # installs this repo's packages +simcore-common-library @ ../../packages/common-library +simcore-models-library @ ../../packages/models-library pytest-simcore @ ../../packages/pytest-simcore simcore-service-library[fastapi] @ ../../packages/service-library simcore-settings-library @ ../../packages/settings-library -simcore-models-library @ ../../packages/models-library # installs current package simcore-service-invitations @ . diff --git a/services/invitations/requirements/dev.txt b/services/invitations/requirements/dev.txt index a45c4db89180..1de98a1f08ab 100644 --- a/services/invitations/requirements/dev.txt +++ b/services/invitations/requirements/dev.txt @@ -12,10 +12,12 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library +--editable ../../packages/models-library --editable ../../packages/pytest-simcore --editable ../../packages/service-library[fastapi] --editable ../../packages/settings-library ---editable ../../packages/models-library + # installs current package --editable . diff --git a/services/invitations/src/simcore_service_invitations/api/_invitations.py b/services/invitations/src/simcore_service_invitations/api/_invitations.py index 4422bda31358..0a680189c61c 100644 --- a/services/invitations/src/simcore_service_invitations/api/_invitations.py +++ b/services/invitations/src/simcore_service_invitations/api/_invitations.py @@ -48,11 +48,11 @@ async def create_invitation( default_product=settings.INVITATIONS_DEFAULT_PRODUCT, ) invitation = ApiInvitationContentAndLink( - **invitation_content.dict(), + **invitation_content.model_dump(), invitation_url=invitation_link, ) - _logger.info("New invitation: %s", f"{invitation.json(indent=1)}") + _logger.info("New invitation: %s", f"{invitation.model_dump_json(indent=1)}") return invitation diff --git a/services/invitations/src/simcore_service_invitations/api/_meta.py b/services/invitations/src/simcore_service_invitations/api/_meta.py index 621f3b4445e3..49d3f4b48cf9 100644 --- a/services/invitations/src/simcore_service_invitations/api/_meta.py +++ b/services/invitations/src/simcore_service_invitations/api/_meta.py @@ -2,7 +2,8 @@ from collections.abc import Callable from fastapi import APIRouter, Depends -from pydantic import BaseModel, HttpUrl +from pydantic import BaseModel +from common_library.pydantic_networks_extension import HttpUrlLegacy from .._meta import API_VERSION, PROJECT_NAME from ._dependencies import get_reverse_url_mapper @@ -20,7 +21,7 @@ class _Meta(BaseModel): name: str version: str - docs_url: HttpUrl + docs_url: HttpUrlLegacy # diff --git a/services/invitations/src/simcore_service_invitations/cli.py b/services/invitations/src/simcore_service_invitations/cli.py index dffb1dca32f9..67838b046155 100644 --- a/services/invitations/src/simcore_service_invitations/cli.py +++ b/services/invitations/src/simcore_service_invitations/cli.py @@ -5,7 +5,7 @@ from cryptography.fernet import Fernet from models_library.emails import LowerCaseEmailStr from models_library.invitations import InvitationContent, InvitationInputs -from pydantic import EmailStr, HttpUrl, ValidationError, parse_obj_as +from pydantic import EmailStr, HttpUrl, TypeAdapter, ValidationError from rich.console import Console from servicelib.utils_secrets import generate_password from settings_library.utils_cli import ( @@ -96,19 +96,19 @@ def invite( ctx: typer.Context, email: str = typer.Argument( ..., - callback=lambda v: parse_obj_as(LowerCaseEmailStr, v), + callback=lambda v: TypeAdapter(LowerCaseEmailStr).validate_python(v), help="Custom invitation for a given guest", ), issuer: str = typer.Option( - ..., help=InvitationInputs.__fields__["issuer"].field_info.description + ..., help=InvitationInputs.model_fields["issuer"].description ), trial_account_days: int = typer.Option( None, - help=InvitationInputs.__fields__["trial_account_days"].field_info.description, + help=InvitationInputs.model_fields["trial_account_days"].description, ), product: str = typer.Option( None, - help=InvitationInputs.__fields__["product"].field_info.description, + help=InvitationInputs.model_fields["product"].description, ), ): """Creates an invitation link for user with 'email' and issued by 'issuer'""" @@ -117,7 +117,7 @@ def invite( invitation_data = InvitationInputs( issuer=issuer, - guest=parse_obj_as(EmailStr, email), + guest=TypeAdapter(EmailStr).validate_python(email), trial_account_days=trial_account_days, extra_credits_in_usd=None, product=product, @@ -125,7 +125,7 @@ def invite( invitation_link, _ = create_invitation_link_and_content( invitation_data=invitation_data, - secret_key=settings.INVITATIONS_SECRET_KEY.get_secret_value().encode(), + secret_key=settings.INVITATIONS_SECRET_KEY.get_secret_value().encode(), # pylint:disable=no-member base_url=settings.INVITATIONS_OSPARC_URL, default_product=settings.INVITATIONS_DEFAULT_PRODUCT, ) @@ -142,14 +142,14 @@ def extract(ctx: typer.Context, invitation_url: str): try: invitation: InvitationContent = extract_invitation_content( invitation_code=extract_invitation_code_from_query( - parse_obj_as(HttpUrl, invitation_url) + TypeAdapter(HttpUrl).validate_python(invitation_url) ), - secret_key=settings.INVITATIONS_SECRET_KEY.get_secret_value().encode(), + secret_key=settings.INVITATIONS_SECRET_KEY.get_secret_value().encode(), # pylint:disable=no-member default_product=settings.INVITATIONS_DEFAULT_PRODUCT, ) assert invitation.product is not None # nosec - print(invitation.json(indent=1)) # noqa: T201 + print(invitation.model_dump_json(indent=1)) # noqa: T201 except (InvalidInvitationCodeError, ValidationError): _err_console.print("[bold red]Invalid code[/bold red]") diff --git a/services/invitations/src/simcore_service_invitations/core/settings.py b/services/invitations/src/simcore_service_invitations/core/settings.py index 35bdd32ab317..ea3b04c33ac6 100644 --- a/services/invitations/src/simcore_service_invitations/core/settings.py +++ b/services/invitations/src/simcore_service_invitations/core/settings.py @@ -1,7 +1,14 @@ from functools import cached_property from models_library.products import ProductName -from pydantic import Field, HttpUrl, PositiveInt, SecretStr, validator +from pydantic import ( + AliasChoices, + Field, + HttpUrl, + PositiveInt, + SecretStr, + field_validator, +) from settings_library.base import BaseCustomSettings from settings_library.basic_types import BuildTargetEnum, LogLevel, VersionTag from settings_library.tracing import TracingSettings @@ -39,14 +46,15 @@ class _BaseApplicationSettings(BaseCustomSettings, MixinLoggingSettings): # RUNTIME ----------------------------------------------------------- INVITATIONS_LOGLEVEL: LogLevel = Field( - default=LogLevel.INFO, env=["INVITATIONS_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"] + default=LogLevel.INFO, + validation_alias=AliasChoices("INVITATIONS_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), ) INVITATIONS_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=[ + validation_alias=AliasChoices( "INVITATIONS_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) @@ -54,7 +62,7 @@ class _BaseApplicationSettings(BaseCustomSettings, MixinLoggingSettings): def LOG_LEVEL(self): return self.INVITATIONS_LOGLEVEL - @validator("INVITATIONS_LOGLEVEL", pre=True) + @field_validator("INVITATIONS_LOGLEVEL", mode="before") @classmethod def valid_log_level(cls, value: str) -> str: return cls.validate_log_level(value) @@ -104,5 +112,6 @@ class ApplicationSettings(MinimalApplicationSettings): ) INVITATIONS_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True INVITATIONS_TRACING: TracingSettings | None = Field( - auto_default_from_env=True, description="settings for opentelemetry tracing" + json_schema_extra={"auto_default_from_env": True}, + description="settings for opentelemetry tracing", ) diff --git a/services/invitations/src/simcore_service_invitations/services/invitations.py b/services/invitations/src/simcore_service_invitations/services/invitations.py index d5bed8662cfb..1cdc33f40855 100644 --- a/services/invitations/src/simcore_service_invitations/services/invitations.py +++ b/services/invitations/src/simcore_service_invitations/services/invitations.py @@ -1,18 +1,22 @@ import base64 import binascii import logging -from typing import Any, ClassVar, cast from urllib import parse +from common_library.pydantic_networks_extension import HttpUrlLegacy from cryptography.fernet import Fernet, InvalidToken from models_library.invitations import InvitationContent, InvitationInputs from models_library.products import ProductName -from pydantic import HttpUrl, ValidationError, parse_obj_as +from pydantic import ConfigDict, HttpUrl, TypeAdapter, ValidationError from starlette.datastructures import URL _logger = logging.getLogger(__name__) +def _to_initial(v: str): + return v[0] + + class InvalidInvitationCodeError(Exception): ... @@ -23,9 +27,9 @@ class _ContentWithShortNames(InvitationContent): @classmethod def serialize(cls, model_obj: InvitationContent) -> str: """Exports to json using *short* aliases and values in order to produce shorter codes""" - model_w_short_aliases_json: str = cls.construct( - **model_obj.dict(exclude_unset=True) - ).json(exclude_unset=True, by_alias=True) + model_w_short_aliases_json: str = cls.model_construct( + **model_obj.model_dump(exclude_unset=True) + ).model_dump_json(exclude_unset=True, by_alias=True) # NOTE: json arguments try to minimize the amount of data # serialized. The CONS is that it relies on models in the code # that might change over time. This might lead to some datasets in codes @@ -35,36 +39,18 @@ def serialize(cls, model_obj: InvitationContent) -> str: @classmethod def deserialize(cls, raw_json: str) -> InvitationContent: """Parses a json string and returns InvitationContent model""" - model_w_short_aliases = cls.parse_raw(raw_json) - return InvitationContent.construct( - **model_w_short_aliases.dict(exclude_unset=True) + model_w_short_aliases = cls.model_validate_json(raw_json) + return InvitationContent.model_construct( + **model_w_short_aliases.model_dump(exclude_unset=True) ) - class Config: - allow_population_by_field_name = True # NOTE: can parse using field names - allow_mutation = False - anystr_strip_whitespace = True + model_config = ConfigDict( # NOTE: Can export with alias: short aliases to minimize the size of serialization artifact - fields: ClassVar[dict[str, Any]] = { - "issuer": { - "alias": "i", - }, - "guest": { - "alias": "g", - }, - "trial_account_days": { - "alias": "t", - }, - "extra_credits_in_usd": { - "alias": "e", - }, - "product": { - "alias": "p", - }, - "created": { - "alias": "c", - }, - } + alias_generator=_to_initial, + populate_by_name=True, # NOTE: can parse using field names + frozen=True, + str_strip_whitespace=True, + ) # @@ -79,9 +65,9 @@ def _build_link( r = URL("/registration").include_query_params(invitation=code_url_safe) # Adds query to fragment - base_url = f"{base_url.rstrip('/')}/" + base_url = f"{base_url}/" url = URL(base_url).replace(fragment=f"{r}") - return cast(HttpUrl, parse_obj_as(HttpUrl, f"{url}")) + return TypeAdapter(HttpUrlLegacy).validate_python(f"{url}") def _fernet_encrypt_as_urlsafe_code( @@ -124,7 +110,7 @@ def create_invitation_link_and_content( code = _create_invitation_code(content, secret_key) # Adds message as the invitation in query link = _build_link( - base_url=base_url, + base_url=f"{base_url}", code_url_safe=code.decode(), ) return link, content diff --git a/services/invitations/tests/unit/api/conftest.py b/services/invitations/tests/unit/api/conftest.py index f4151fcc5193..c558ac496ad6 100644 --- a/services/invitations/tests/unit/api/conftest.py +++ b/services/invitations/tests/unit/api/conftest.py @@ -18,7 +18,7 @@ def client(app_environment: EnvVarsDict) -> Iterator[TestClient]: print(f"app_environment={json.dumps(app_environment)}") app = create_app() - print("settings:\n", app.state.settings.json(indent=1)) + print("settings:\n", app.state.settings.model_dump_json(indent=1)) with TestClient(app, base_url="http://testserver.test") as client: yield client diff --git a/services/invitations/tests/unit/api/test_api_invitations.py b/services/invitations/tests/unit/api/test_api_invitations.py index 572f30f8173b..95d5eeeece0f 100644 --- a/services/invitations/tests/unit/api/test_api_invitations.py +++ b/services/invitations/tests/unit/api/test_api_invitations.py @@ -23,7 +23,7 @@ @settings(suppress_health_check=[HealthCheck.function_scoped_fixture]) -@given(invitation_input=st.builds(InvitationInputs)) +@given(invitation_input=st.builds(InvitationInputs, guest=st.emails())) def test_create_invitation( invitation_input: InvitationInputs, client: TestClient, @@ -31,7 +31,7 @@ def test_create_invitation( ): response = client.post( f"/{API_VTAG}/invitations", - json=invitation_input.dict(exclude_none=True), + json=invitation_input.model_dump(exclude_none=True), auth=basic_auth, ) assert response.status_code == status.HTTP_200_OK, f"{response.json()=}" @@ -64,20 +64,20 @@ def test_check_invitation( # up ot here, identifcal to above. # Let's use invitation link - invitation_url = ApiInvitationContentAndLink.parse_obj( + invitation_url = ApiInvitationContentAndLink.model_validate( response.json() ).invitation_url # check invitation_url response = client.post( f"/{API_VTAG}/invitations:extract", - json={"invitation_url": invitation_url}, + json={"invitation_url": f"{invitation_url}"}, auth=basic_auth, ) assert response.status_code == 200, f"{response.json()=}" # decrypted invitation should be identical to request above - invitation = InvitationContent.parse_obj(response.json()) + invitation = InvitationContent.model_validate(response.json()) assert invitation.issuer == invitation_data.issuer assert invitation.guest == invitation_data.guest assert invitation.trial_account_days == invitation_data.trial_account_days @@ -106,7 +106,7 @@ def test_check_valid_invitation( assert response.status_code == 200, f"{response.json()=}" # decrypted invitation should be identical to request above - invitation = InvitationContent.parse_obj(response.json()) + invitation = InvitationContent.model_validate(response.json()) assert invitation.issuer == invitation_data.issuer assert invitation.guest == invitation_data.guest diff --git a/services/invitations/tests/unit/api/test_api_meta.py b/services/invitations/tests/unit/api/test_api_meta.py index cee4afd13c95..4fe4f39b22ca 100644 --- a/services/invitations/tests/unit/api/test_api_meta.py +++ b/services/invitations/tests/unit/api/test_api_meta.py @@ -19,7 +19,7 @@ def test_healthcheck(client: TestClient): def test_meta(client: TestClient): response = client.get(f"/{API_VTAG}/meta") assert response.status_code == status.HTTP_200_OK - meta = _Meta.parse_obj(response.json()) + meta = _Meta.model_validate(response.json()) - response = client.get(meta.docs_url) + response = client.get(f"{meta.docs_url}") assert response.status_code == status.HTTP_200_OK diff --git a/services/invitations/tests/unit/conftest.py b/services/invitations/tests/unit/conftest.py index 1b6ea4ee6e9e..fde239d53326 100644 --- a/services/invitations/tests/unit/conftest.py +++ b/services/invitations/tests/unit/conftest.py @@ -109,4 +109,4 @@ def invitation_data( if product: kwargs["product"] = product - return InvitationInputs.parse_obj(kwargs) + return InvitationInputs.model_validate(kwargs) diff --git a/services/invitations/tests/unit/test__model_examples.py b/services/invitations/tests/unit/test__model_examples.py index 31ed0dfc603e..78dfdd96669c 100644 --- a/services/invitations/tests/unit/test__model_examples.py +++ b/services/invitations/tests/unit/test__model_examples.py @@ -26,4 +26,4 @@ def test_model_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): print(example_name, ":", json.dumps(example_data)) - assert model_cls.parse_obj(example_data) + assert model_cls.model_validate(example_data) diff --git a/services/invitations/tests/unit/test_cli.py b/services/invitations/tests/unit/test_cli.py index e77247115cf1..0ca095f19154 100644 --- a/services/invitations/tests/unit/test_cli.py +++ b/services/invitations/tests/unit/test_cli.py @@ -8,6 +8,7 @@ import pytest from faker import Faker from models_library.products import ProductName +from pydantic import TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import load_dotenv, setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_invitations._meta import API_VERSION @@ -71,7 +72,10 @@ def test_invite_user_and_check_invitation( env=environs, ) assert result.exit_code == os.EX_OK, result.output - assert expected == InvitationInputs.parse_raw(result.stdout).dict() + assert ( + expected + == TypeAdapter(InvitationInputs).validate_json(result.stdout).model_dump() + ) def test_echo_dotenv(cli_runner: CliRunner, monkeypatch: pytest.MonkeyPatch): @@ -82,7 +86,7 @@ def test_echo_dotenv(cli_runner: CliRunner, monkeypatch: pytest.MonkeyPatch): environs = load_dotenv(result.stdout) envs = setenvs_from_dict(monkeypatch, environs) - settings_from_obj = ApplicationSettings.parse_obj(envs) + settings_from_obj = ApplicationSettings.model_validate(envs) settings_from_envs = ApplicationSettings.create_from_envs() assert settings_from_envs == settings_from_obj @@ -93,5 +97,5 @@ def test_list_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): assert result.exit_code == os.EX_OK, result.output print(result.output) - settings = ApplicationSettings.parse_raw(result.output) + settings = ApplicationSettings.model_validate_json(result.output) assert settings == ApplicationSettings.create_from_envs() diff --git a/services/invitations/tests/unit/test_invitations.py b/services/invitations/tests/unit/test_invitations.py index edd9c01556b0..0e6ea34b18cd 100644 --- a/services/invitations/tests/unit/test_invitations.py +++ b/services/invitations/tests/unit/test_invitations.py @@ -5,6 +5,7 @@ import binascii from datetime import datetime, timezone +from typing import Counter from urllib import parse import cryptography.fernet @@ -28,7 +29,7 @@ def test_all_invitation_fields_have_short_and_unique_aliases(): # all have short alias all_alias = [] - for field in _ContentWithShortNames.__fields__.values(): + for field in _ContentWithShortNames.model_fields.values(): assert field.alias assert field.alias not in all_alias all_alias.append(field.alias) @@ -38,7 +39,7 @@ def test_import_and_export_invitation_alias_by_alias( invitation_data: InvitationInputs, ): expected_content = InvitationContent( - **invitation_data.dict(), + **invitation_data.model_dump(), created=datetime.now(tz=timezone.utc), ) raw_data = _ContentWithShortNames.serialize(expected_content) @@ -51,13 +52,13 @@ def test_export_by_alias_produces_smaller_strings( invitation_data: InvitationInputs, ): content = InvitationContent( - **invitation_data.dict(), + **invitation_data.model_dump(), created=datetime.now(tz=timezone.utc), ) raw_data = _ContentWithShortNames.serialize(content) # export by alias produces smaller strings - assert len(raw_data) < len(content.json()) + assert len(raw_data) < len(content.model_dump_json()) def test_create_and_decrypt_invitation( @@ -72,8 +73,8 @@ def test_create_and_decrypt_invitation( base_url=faker.url(), default_product=default_product, ) - assert invitation_link.fragment - query_params = dict(parse.parse_qsl(URL(invitation_link.fragment).query)) + assert URL(invitation_link).fragment + query_params = dict(parse.parse_qsl((URL(URL(invitation_link).fragment).query))) # will raise TokenError or ValidationError invitation = decrypt_invitation( @@ -85,9 +86,9 @@ def test_create_and_decrypt_invitation( assert isinstance(invitation, InvitationContent) assert invitation.product is not None - expected = invitation_data.dict(exclude_none=True) + expected = invitation_data.model_dump(exclude_none=True) expected.setdefault("product", default_product) - assert invitation.dict(exclude={"created"}, exclude_none=True) == expected + assert invitation.model_dump(exclude={"created"}, exclude_none=True) == expected # @@ -116,9 +117,9 @@ def test_valid_invitation_code( default_product=default_product, ) - expected = invitation_data.dict(exclude_none=True) + expected = invitation_data.model_dump(exclude_none=True) expected.setdefault("product", default_product) - assert invitation.dict(exclude={"created"}, exclude_none=True) == expected + assert invitation.model_dump(exclude={"created"}, exclude_none=True) == expected def test_invalid_invitation_encoding( @@ -176,7 +177,7 @@ class OtherModel(BaseModel): secret = secret_key.encode() other_code = _fernet_encrypt_as_urlsafe_code( - data=OtherModel().json().encode(), secret_key=secret + data=OtherModel().model_dump_json().encode(), secret_key=secret ) with pytest.raises(ValidationError): @@ -192,3 +193,7 @@ class OtherModel(BaseModel): secret_key=secret, default_product=default_product, ) + + +def test_aliases_uniqueness(): + assert not [item for item, count in Counter([field.alias for field in _ContentWithShortNames.model_fields.values()]).items() if count > 1] #nosec diff --git a/services/payments/openapi.json b/services/payments/openapi.json index b4b49c630e3f..47e509314add 100644 --- a/services/payments/openapi.json +++ b/services/payments/openapi.json @@ -99,28 +99,33 @@ "summary": "Acknowledge Payment", "description": "completes (ie. ack) request initated by `/init` on the payments-gateway API", "operationId": "acknowledge_payment_v1_payments__payment_id__ack_post", + "security": [ + { + "OAuth2PasswordBearer": [] + } + ], "parameters": [ { + "name": "payment_id", + "in": "path", "required": true, "schema": { "type": "string", - "maxLength": 100, "minLength": 1, + "maxLength": 100, "title": "Payment Id" - }, - "name": "payment_id", - "in": "path" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/AckPayment" } } - }, - "required": true + } }, "responses": { "200": { @@ -141,12 +146,7 @@ } } } - }, - "security": [ - { - "OAuth2PasswordBearer": [] - } - ] + } } }, "/v1/payments-methods/{payment_method_id}:ack": { @@ -157,28 +157,33 @@ "summary": "Acknowledge Payment Method", "description": "completes (ie. ack) request initated by `/payments-methods:init` on the payments-gateway API", "operationId": "acknowledge_payment_method_v1_payments_methods__payment_method_id__ack_post", + "security": [ + { + "OAuth2PasswordBearer": [] + } + ], "parameters": [ { + "name": "payment_method_id", + "in": "path", "required": true, "schema": { "type": "string", - "maxLength": 100, "minLength": 1, + "maxLength": 100, "title": "Payment Method Id" - }, - "name": "payment_method_id", - "in": "path" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/AckPaymentMethod" } } - }, - "required": true + } }, "responses": { "200": { @@ -199,12 +204,7 @@ } } } - }, - "security": [ - { - "OAuth2PasswordBearer": [] - } - ] + } } } }, @@ -217,53 +217,97 @@ "title": "Success" }, "message": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Message" }, "provider_payment_id": { - "type": "string", - "maxLength": 100, - "minLength": 1, + "anyOf": [ + { + "type": "string", + "maxLength": 100, + "minLength": 1 + }, + { + "type": "null" + } + ], "title": "Provider Payment Id", "description": "Payment ID from the provider (e.g. stripe payment ID)" }, "invoice_url": { - "type": "string", - "maxLength": 2083, - "minLength": 1, - "format": "uri", + "anyOf": [ + { + "type": "string", + "maxLength": 2083, + "minLength": 1, + "format": "uri" + }, + { + "type": "null" + } + ], "title": "Invoice Url", "description": "Link to invoice is required when success=true" }, "invoice_pdf": { - "type": "string", - "maxLength": 2083, - "minLength": 1, - "format": "uri", + "anyOf": [ + { + "type": "string", + "maxLength": 2083, + "minLength": 1, + "format": "uri" + }, + { + "type": "null" + } + ], "title": "Invoice Pdf", "description": "Link to invoice PDF" }, "stripe_invoice_id": { - "type": "string", - "maxLength": 100, - "minLength": 1, + "anyOf": [ + { + "type": "string", + "maxLength": 100, + "minLength": 1 + }, + { + "type": "null" + } + ], "title": "Stripe Invoice Id", "description": "Stripe invoice ID" }, "stripe_customer_id": { - "type": "string", - "maxLength": 100, - "minLength": 1, + "anyOf": [ + { + "type": "string", + "maxLength": 100, + "minLength": 1 + }, + { + "type": "null" + } + ], "title": "Stripe Customer Id", "description": "Stripe customer ID" }, "saved": { - "allOf": [ + "anyOf": [ { "$ref": "#/components/schemas/SavedPaymentMethod" + }, + { + "type": "null" } ], - "title": "Saved", "description": "Gets the payment-method if user opted to save it during payment.If used did not opt to save of payment-method was already saved, then it defaults to None" } }, @@ -273,13 +317,13 @@ ], "title": "AckPayment", "example": { - "success": true, - "provider_payment_id": "pi_123ABC", "invoice_url": "https://invoices.com/id=12345", + "provider_payment_id": "pi_123ABC", "saved": { - "success": true, - "payment_method_id": "3FA85F64-5717-4562-B3FC-2C963F66AFA6" - } + "payment_method_id": "3FA85F64-5717-4562-B3FC-2C963F66AFA6", + "success": true + }, + "success": true } }, "AckPaymentMethod": { @@ -289,7 +333,14 @@ "title": "Success" }, "message": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Message" } }, @@ -302,8 +353,15 @@ "Body_login_to_create_access_token": { "properties": { "grant_type": { - "type": "string", - "pattern": "password", + "anyOf": [ + { + "type": "string", + "pattern": "password" + }, + { + "type": "null" + } + ], "title": "Grant Type" }, "username": { @@ -320,11 +378,25 @@ "default": "" }, "client_id": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Client Id" }, "client_secret": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Client Secret" } }, @@ -360,11 +432,18 @@ "title": "Version" }, "released": { - "additionalProperties": { - "type": "string", - "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$" - }, - "type": "object", + "anyOf": [ + { + "additionalProperties": { + "type": "string", + "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$" + }, + "type": "object" + }, + { + "type": "null" + } + ], "title": "Released", "description": "Maps every route's path tag with a released version" }, @@ -384,9 +463,9 @@ ], "title": "Meta", "example": { + "docs_url": "https://foo.io/doc", "name": "simcore_service_payments", - "version": "2.4.45", - "docs_url": "https://foo.io/doc" + "version": "2.4.45" } }, "SavedPaymentMethod": { @@ -396,7 +475,14 @@ "title": "Success" }, "message": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Message" }, "payment_method_id": { @@ -424,6 +510,7 @@ "enum": [ "bearer" ], + "const": "bearer", "title": "Token Type" } }, diff --git a/services/payments/requirements/_base.in b/services/payments/requirements/_base.in index da3813cc2bb1..6c79c0abca7c 100644 --- a/services/payments/requirements/_base.in +++ b/services/payments/requirements/_base.in @@ -6,6 +6,7 @@ --constraint ./constraints.txt # intra-repo required dependencies +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in diff --git a/services/payments/requirements/_base.txt b/services/payments/requirements/_base.txt index 28bfb2a340b2..812797c67e2a 100644 --- a/services/payments/requirements/_base.txt +++ b/services/payments/requirements/_base.txt @@ -12,11 +12,18 @@ aiohappyeyeballs==2.3.4 # via aiohttp aiohttp==3.10.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # aiodocker @@ -28,6 +35,8 @@ aiosmtplib==3.0.2 # via -r requirements/_base.in alembic==1.13.2 # via -r requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic anyio==4.4.0 # via # fast-depends @@ -55,11 +64,18 @@ bidict==0.23.1 # via python-socketio certifi==2024.7.4 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # httpcore @@ -75,11 +91,18 @@ click==8.1.7 # uvicorn cryptography==43.0.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in @@ -97,15 +120,8 @@ email-validator==2.2.0 # via pydantic fast-depends==2.4.12 # via faststream -fastapi==0.99.1 +fastapi==0.115.2 # via - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator @@ -134,11 +150,18 @@ httptools==0.6.1 # via uvicorn httpx==0.27.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in @@ -154,11 +177,18 @@ importlib-metadata==8.0.0 # via opentelemetry-api jinja2==3.1.4 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in @@ -170,11 +200,18 @@ jsonschema-specifications==2023.7.1 # via jsonschema mako==1.3.5 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # alembic @@ -245,11 +282,18 @@ opentelemetry-util-http==0.47b0 # opentelemetry-instrumentation-requests orjson==3.10.6 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in @@ -278,24 +322,51 @@ pyasn1==0.6.0 # rsa pycparser==2.22 # via cffi -pydantic==1.10.17 +pydantic==2.9.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/postgres-database/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.6.2 @@ -303,7 +374,9 @@ pyinstrument==4.6.2 python-dateutil==2.9.0.post0 # via arrow python-dotenv==1.0.1 - # via uvicorn + # via + # pydantic-settings + # uvicorn python-engineio==4.9.1 # via python-socketio python-jose==3.3.0 @@ -314,22 +387,36 @@ python-socketio==5.11.3 # via -r requirements/_base.in pyyaml==6.0.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_base.in # uvicorn redis==5.0.4 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_base.in @@ -353,11 +440,18 @@ rpds-py==0.19.1 # referencing rsa==4.9 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # python-jose @@ -377,22 +471,36 @@ sniffio==1.3.1 # httpx sqlalchemy==1.4.53 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # alembic -starlette==0.27.0 +starlette==0.40.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # fastapi @@ -418,14 +526,22 @@ typing-extensions==4.12.2 # faststream # opentelemetry-sdk # pydantic + # pydantic-core # typer urllib3==2.2.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # requests diff --git a/services/payments/requirements/ci.txt b/services/payments/requirements/ci.txt index e20936f6623c..562c7eb6d849 100644 --- a/services/payments/requirements/ci.txt +++ b/services/payments/requirements/ci.txt @@ -12,10 +12,12 @@ --requirement _tools.txt # installs this repo's packages +simcore-common-library @ ../../packages/common-library simcore-models-library @ ../../packages/models-library simcore-postgres-database @ ../../packages/postgres-database pytest-simcore @ ../../packages/pytest-simcore simcore-service-library[fastapi] @ ../../packages/service-library simcore-settings-library @ ../../packages/settings-library + # installs current package simcore-service-payments @ . diff --git a/services/payments/requirements/dev.txt b/services/payments/requirements/dev.txt index b20ade64f27f..80aeaf26dbed 100644 --- a/services/payments/requirements/dev.txt +++ b/services/payments/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/postgres-database --editable ../../packages/pytest-simcore diff --git a/services/payments/scripts/example_payment_gateway.py b/services/payments/scripts/example_payment_gateway.py index aca2643a5553..bc1e93bc59fe 100755 --- a/services/payments/scripts/example_payment_gateway.py +++ b/services/payments/scripts/example_payment_gateway.py @@ -171,7 +171,7 @@ async def ack_payment(id_: PaymentID, acked: AckPayment, settings: Settings): async with httpx.AsyncClient() as client: await client.post( f"{settings.PAYMENTS_SERVICE_API_BASE_URL}/v1/payments/{id_}:ack", - json=acked.dict(), + json=acked.model_dump(), auth=PaymentsAuth( username=settings.PAYMENTS_USERNAME, password=settings.PAYMENTS_PASSWORD.get_secret_value(), @@ -185,7 +185,7 @@ async def ack_payment_method( async with httpx.AsyncClient() as client: await client.post( f"{settings.PAYMENTS_SERVICE_API_BASE_URL}/v1/payments-methods/{id_}:ack", - json=acked.dict(), + json=acked.model_dump(), auth=PaymentsAuth( username=settings.PAYMENTS_USERNAME, password=settings.PAYMENTS_PASSWORD.get_secret_value(), diff --git a/services/payments/src/simcore_service_payments/cli.py b/services/payments/src/simcore_service_payments/cli.py index 64c67d00e8ff..db884e5f99f3 100644 --- a/services/payments/src/simcore_service_payments/cli.py +++ b/services/payments/src/simcore_service_payments/cli.py @@ -3,6 +3,7 @@ import os import typer +from models_library.utils.json_serialization import json_dumps from servicelib.utils_secrets import generate_password, generate_token_secret_key from settings_library.postgres import PostgresSettings from settings_library.rabbit import RabbitSettings @@ -20,7 +21,9 @@ main = typer.Typer(name=PROJECT_NAME) main.command()( - create_settings_command(settings_cls=ApplicationSettings, logger=_logger) + create_settings_command( + settings_cls=ApplicationSettings, logger=_logger, json_serializer=json_dumps + ) ) main.callback()(create_version_callback(__version__)) diff --git a/services/payments/src/simcore_service_payments/core/errors.py b/services/payments/src/simcore_service_payments/core/errors.py index 5fce135e8005..8b5113891fc5 100644 --- a/services/payments/src/simcore_service_payments/core/errors.py +++ b/services/payments/src/simcore_service_payments/core/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class _BaseAppError(PydanticErrorMixin, ValueError): +class _BaseAppError(OsparcErrorMixin, ValueError): @classmethod def get_full_class_name(cls) -> str: # Can be used as unique code identifier diff --git a/services/payments/src/simcore_service_payments/core/settings.py b/services/payments/src/simcore_service_payments/core/settings.py index b56c79322cbc..4b7965f45581 100644 --- a/services/payments/src/simcore_service_payments/core/settings.py +++ b/services/payments/src/simcore_service_payments/core/settings.py @@ -1,14 +1,16 @@ from functools import cached_property +from common_library.pydantic_networks_extension import HttpUrlLegacy from models_library.basic_types import NonNegativeDecimal from pydantic import ( + AliasChoices, + ConfigDict, EmailStr, Field, - HttpUrl, PositiveFloat, SecretStr, - parse_obj_as, - validator, + TypeAdapter, + field_validator, ) from settings_library.application import BaseApplicationSettings from settings_library.basic_types import LogLevel, VersionTag @@ -28,19 +30,19 @@ class _BaseApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): # CODE STATICS --------------------------------------------------------- API_VERSION: str = API_VERSION APP_NAME: str = PROJECT_NAME - API_VTAG: VersionTag = parse_obj_as(VersionTag, API_VTAG) + API_VTAG: VersionTag = TypeAdapter(VersionTag).validate_python(API_VTAG) # RUNTIME ----------------------------------------------------------- PAYMENTS_LOGLEVEL: LogLevel = Field( - default=LogLevel.INFO, env=["PAYMENTS_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"] + default=LogLevel.INFO, + validation_alias=AliasChoices("PAYMENTS_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), ) PAYMENTS_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=[ - "PAYMENTS_LOG_FORMAT_LOCAL_DEV_ENABLED", - "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + validation_alias=AliasChoices( + "LOG_FORMAT_LOCAL_DEV_ENABLED", "PAYMENTS_LOG_FORMAT_LOCAL_DEV_ENABLED" + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) @@ -48,11 +50,13 @@ class _BaseApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): def LOG_LEVEL(self): # noqa: N802 return self.PAYMENTS_LOGLEVEL - @validator("PAYMENTS_LOGLEVEL", pre=True) + @field_validator("PAYMENTS_LOGLEVEL", mode="before") @classmethod def valid_log_level(cls, value: str) -> str: return cls.validate_log_level(value) + model_config = ConfigDict(extra="allow") # type:ignore[assignment] + class ApplicationSettings(_BaseApplicationSettings): """Web app's environment variables @@ -60,7 +64,7 @@ class ApplicationSettings(_BaseApplicationSettings): These settings includes extra configuration for the http-API """ - PAYMENTS_GATEWAY_URL: HttpUrl = Field( + PAYMENTS_GATEWAY_URL: HttpUrlLegacy = Field( ..., description="Base url to the payment gateway" ) @@ -111,18 +115,21 @@ class ApplicationSettings(_BaseApplicationSettings): ) PAYMENTS_RABBITMQ: RabbitSettings = Field( - auto_default_from_env=True, description="settings for service/rabbitmq" + json_schema_extra={"auto_default_from_env": True}, + description="settings for service/rabbitmq", ) PAYMENTS_TRACING: TracingSettings | None = Field( - auto_default_from_env=True, description="settings for opentelemetry tracing" + json_schema_extra={"auto_default_from_env": True}, + description="settings for opentelemetry tracing", ) PAYMENTS_POSTGRES: PostgresSettings = Field( - auto_default_from_env=True, description="settings for postgres service" + json_schema_extra={"auto_default_from_env": True}, + description="settings for postgres service", ) - PAYMENTS_STRIPE_URL: HttpUrl = Field( + PAYMENTS_STRIPE_URL: HttpUrlLegacy = Field( ..., description="Base url to the payment Stripe" ) PAYMENTS_STRIPE_API_SECRET: SecretStr = Field( @@ -134,12 +141,13 @@ class ApplicationSettings(_BaseApplicationSettings): ) PAYMENTS_RESOURCE_USAGE_TRACKER: ResourceUsageTrackerSettings = Field( - auto_default_from_env=True, description="settings for RUT service" + json_schema_extra={"auto_default_from_env": True}, + description="settings for RUT service", ) PAYMENTS_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True PAYMENTS_EMAIL: SMTPSettings | None = Field( - auto_default_from_env=True, + json_schema_extra={"auto_default_from_env": True}, description="optional email (see notifier_email service)", ) diff --git a/services/payments/src/simcore_service_payments/db/auto_recharge_repo.py b/services/payments/src/simcore_service_payments/db/auto_recharge_repo.py index 4e7b25d228ef..aa98896cf133 100644 --- a/services/payments/src/simcore_service_payments/db/auto_recharge_repo.py +++ b/services/payments/src/simcore_service_payments/db/auto_recharge_repo.py @@ -5,7 +5,7 @@ from models_library.basic_types import NonNegativeDecimal from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import BaseModel, PositiveInt +from pydantic import BaseModel, ConfigDict, PositiveInt from simcore_postgres_database.utils_payments_autorecharge import AutoRechargeStmts from .base import BaseRepository @@ -19,9 +19,7 @@ class PaymentsAutorechargeDB(BaseModel): primary_payment_method_id: PaymentMethodID top_up_amount_in_usd: NonNegativeDecimal monthly_limit_in_usd: NonNegativeDecimal | None - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) class AutoRechargeRepo(BaseRepository): @@ -38,7 +36,7 @@ async def get_wallet_autorecharge( stmt = AutoRechargeStmts.get_wallet_autorecharge(wallet_id) result = await conn.execute(stmt) row = result.first() - return PaymentsAutorechargeDB.from_orm(row) if row else None + return PaymentsAutorechargeDB.model_validate(row) if row else None async def replace_wallet_autorecharge( self, @@ -73,4 +71,4 @@ async def replace_wallet_autorecharge( result = await conn.execute(stmt) row = result.first() assert row # nosec - return PaymentsAutorechargeDB.from_orm(row) + return PaymentsAutorechargeDB.model_validate(row) diff --git a/services/payments/src/simcore_service_payments/db/payments_methods_repo.py b/services/payments/src/simcore_service_payments/db/payments_methods_repo.py index 79e4b6d7ae4b..4eb43b667b13 100644 --- a/services/payments/src/simcore_service_payments/db/payments_methods_repo.py +++ b/services/payments/src/simcore_service_payments/db/payments_methods_repo.py @@ -11,7 +11,7 @@ from models_library.api_schemas_webserver.wallets import PaymentMethodID from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import parse_obj_as +from pydantic import TypeAdapter from simcore_postgres_database.models.payments_methods import ( InitPromptAckFlowState, payments_methods, @@ -91,7 +91,7 @@ async def update_ack_payment_method( row = result.first() assert row, "execute above should have caught this" # nosec - return PaymentsMethodsDB.from_orm(row) + return PaymentsMethodsDB.model_validate(row) async def insert_payment_method( self, @@ -132,7 +132,7 @@ async def list_user_payment_methods( .order_by(payments_methods.c.created.desc()) ) # newest first rows = result.fetchall() or [] - return parse_obj_as(list[PaymentsMethodsDB], rows) + return TypeAdapter(list[PaymentsMethodsDB]).validate_python(rows) async def get_payment_method_by_id( self, @@ -149,7 +149,7 @@ async def get_payment_method_by_id( if row is None: raise PaymentMethodNotFoundError(payment_method_id=payment_method_id) - return PaymentsMethodsDB.from_orm(row) + return PaymentsMethodsDB.model_validate(row) async def get_payment_method( self, @@ -171,7 +171,7 @@ async def get_payment_method( if row is None: raise PaymentMethodNotFoundError(payment_method_id=payment_method_id) - return PaymentsMethodsDB.from_orm(row) + return PaymentsMethodsDB.model_validate(row) async def delete_payment_method( self, @@ -191,4 +191,4 @@ async def delete_payment_method( .returning(sa.literal_column("*")) ) row = result.first() - return row if row is None else PaymentsMethodsDB.from_orm(row) + return row if row is None else PaymentsMethodsDB.model_validate(row) diff --git a/services/payments/src/simcore_service_payments/db/payments_transactions_repo.py b/services/payments/src/simcore_service_payments/db/payments_transactions_repo.py index fc3ba5b3b105..8b2eef6f2286 100644 --- a/services/payments/src/simcore_service_payments/db/payments_transactions_repo.py +++ b/services/payments/src/simcore_service_payments/db/payments_transactions_repo.py @@ -12,7 +12,7 @@ from models_library.products import ProductName from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import HttpUrl, PositiveInt, parse_obj_as +from pydantic import HttpUrl, PositiveInt, TypeAdapter from simcore_postgres_database import errors as pg_errors from simcore_postgres_database.models.payments_transactions import ( PaymentTransactionState, @@ -114,9 +114,9 @@ async def update_ack_payment_transaction( .values( completed_at=sa.func.now(), state=completion_state, - invoice_url=invoice_url, + invoice_url=f"{invoice_url}" if invoice_url else None, stripe_invoice_id=stripe_invoice_id, - invoice_pdf_url=invoice_pdf_url, + invoice_pdf_url=f"{invoice_pdf_url}" if invoice_pdf_url else None, **optional, ) .where(payments_transactions.c.payment_id == f"{payment_id}") @@ -125,7 +125,7 @@ async def update_ack_payment_transaction( row = result.first() assert row, "execute above should have caught this" # nosec - return PaymentsTransactionsDB.from_orm(row) + return PaymentsTransactionsDB.model_validate(row) async def list_user_payment_transactions( self, @@ -171,8 +171,9 @@ async def list_user_payment_transactions( result = await connection.execute(stmt) rows = result.fetchall() - return total_number_of_items, parse_obj_as( - list[PaymentsTransactionsDB], rows + return ( + total_number_of_items, + TypeAdapter(list[PaymentsTransactionsDB]).validate_python(rows), ) async def get_payment_transaction( @@ -189,7 +190,7 @@ async def get_payment_transaction( ) ) row = result.fetchone() - return PaymentsTransactionsDB.from_orm(row) if row else None + return PaymentsTransactionsDB.model_validate(row) if row else None async def sum_current_month_dollars(self, *, wallet_id: WalletID) -> Decimal: _current_timestamp = datetime.now(tz=timezone.utc) @@ -229,4 +230,4 @@ async def get_last_payment_transaction_for_wallet( .limit(1) ) row = result.fetchone() - return PaymentsTransactionsDB.from_orm(row) if row else None + return PaymentsTransactionsDB.model_validate(row) if row else None diff --git a/services/payments/src/simcore_service_payments/models/db.py b/services/payments/src/simcore_service_payments/models/db.py index 8d69b8de70da..8811dbcc1392 100644 --- a/services/payments/src/simcore_service_payments/models/db.py +++ b/services/payments/src/simcore_service_payments/models/db.py @@ -1,6 +1,5 @@ import datetime from decimal import Decimal -from typing import Any, ClassVar from models_library.api_schemas_webserver.wallets import PaymentID, PaymentMethodID from models_library.emails import LowerCaseEmailStr @@ -8,7 +7,7 @@ from models_library.products import ProductName from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import BaseModel, HttpUrl +from pydantic import BaseModel, ConfigDict, HttpUrl from simcore_postgres_database.models.payments_methods import InitPromptAckFlowState from simcore_postgres_database.models.payments_transactions import ( PaymentTransactionState, @@ -47,15 +46,14 @@ class PaymentsTransactionsDB(BaseModel): completed_at: datetime.datetime | None state: PaymentTransactionState state_message: str | None - - class Config: - orm_mode = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + from_attributes=True, + json_schema_extra={ "examples": [ - _EXAMPLE_AFTER_INIT, + _EXAMPLE_AFTER_INIT, # type:ignore[list-item] # successful completion { - **_EXAMPLE_AFTER_INIT, + **_EXAMPLE_AFTER_INIT, # type:ignore[dict-item] "invoice_url": "https://my-fake-pdf-link.com", "stripe_invoice_id": "12345", "invoice_pdf_url": "https://my-fake-pdf-link.com", @@ -64,7 +62,8 @@ class Config: "state_message": "Payment completed successfully", }, ] - } + }, + ) _EXAMPLE_AFTER_INIT_PAYMENT_METHOD = { @@ -86,18 +85,18 @@ class PaymentsMethodsDB(BaseModel): completed_at: datetime.datetime | None state: InitPromptAckFlowState state_message: str | None - - class Config: - orm_mode = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + from_attributes=True, + json_schema_extra={ "examples": [ - _EXAMPLE_AFTER_INIT_PAYMENT_METHOD, + _EXAMPLE_AFTER_INIT_PAYMENT_METHOD, # type:ignore[list-item] # successful completion { - **_EXAMPLE_AFTER_INIT_PAYMENT_METHOD, + **_EXAMPLE_AFTER_INIT_PAYMENT_METHOD, # type:ignore[dict-item] "completed_at": "2023-09-27T10:00:15", "state": "SUCCESS", "state_message": "Payment method completed successfully", }, ] - } + }, + ) diff --git a/services/payments/src/simcore_service_payments/models/db_to_api.py b/services/payments/src/simcore_service_payments/models/db_to_api.py index d3ffa832ed9e..c6c791953832 100644 --- a/services/payments/src/simcore_service_payments/models/db_to_api.py +++ b/services/payments/src/simcore_service_payments/models/db_to_api.py @@ -15,7 +15,7 @@ def to_payments_api_model(transaction: PaymentsTransactionsDB) -> PaymentTransac "osparc_credits": transaction.osparc_credits, "wallet_id": transaction.wallet_id, "created_at": transaction.initiated_at, - "state": transaction.state, + "state": f"{transaction.state}", "completed_at": transaction.completed_at, } diff --git a/services/payments/src/simcore_service_payments/models/payments_gateway.py b/services/payments/src/simcore_service_payments/models/payments_gateway.py index e0d7481df585..dc1b35256336 100644 --- a/services/payments/src/simcore_service_payments/models/payments_gateway.py +++ b/services/payments/src/simcore_service_payments/models/payments_gateway.py @@ -7,7 +7,7 @@ from models_library.basic_types import AmountDecimal, IDStr from models_library.payments import UserInvoiceAddress from models_library.products import StripePriceID, StripeTaxRateID -from pydantic import BaseModel, EmailStr, Extra, Field +from pydantic import BaseModel, ConfigDict, EmailStr, Field COUNTRIES_WITH_VAT = ["CH", "LI"] @@ -30,7 +30,9 @@ class InitPayment(BaseModel): amount_dollars: AmountDecimal # metadata to store for billing or reference credits_: AmountDecimal = Field( - ..., alias="credits", describe="This is equal to `quantity` field in Stripe" + ..., + alias="credits", + json_schema_extra={"describe": "This is equal to `quantity` field in Stripe"}, ) user_name: IDStr user_email: EmailStr @@ -39,9 +41,7 @@ class InitPayment(BaseModel): stripe_price_id: StripePriceID stripe_tax_rate_id: StripeTaxRateID stripe_tax_exempt_value: StripeTaxExempt - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class PaymentInitiated(BaseModel): @@ -58,9 +58,7 @@ class InitPaymentMethod(BaseModel): user_name: IDStr user_email: EmailStr wallet_name: IDStr - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class PaymentMethodInitiated(BaseModel): diff --git a/services/payments/src/simcore_service_payments/models/schemas/acknowledgements.py b/services/payments/src/simcore_service_payments/models/schemas/acknowledgements.py index 5b73282cc3c7..17e7cf3cf81f 100644 --- a/services/payments/src/simcore_service_payments/models/schemas/acknowledgements.py +++ b/services/payments/src/simcore_service_payments/models/schemas/acknowledgements.py @@ -1,18 +1,19 @@ # mypy: disable-error-code=truthy-function -from typing import Any, ClassVar +from typing import Any from models_library.api_schemas_webserver.wallets import PaymentID, PaymentMethodID from models_library.basic_types import IDStr -from pydantic import BaseModel, Field, HttpUrl, validator +from pydantic import BaseModel, ConfigDict, Field, HttpUrl, field_validator +from pydantic_core.core_schema import ValidationInfo class _BaseAck(BaseModel): success: bool - message: str = Field(default=None) + message: str | None = Field(default=None) class _BaseAckPayment(_BaseAck): - provider_payment_id: IDStr = Field( + provider_payment_id: IDStr | None = Field( default=None, description="Payment ID from the provider (e.g. stripe payment ID)", ) @@ -87,17 +88,17 @@ class AckPayment(_BaseAckPayment): description="Gets the payment-method if user opted to save it during payment." "If used did not opt to save of payment-method was already saved, then it defaults to None", ) - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": _EXAMPLES[1].copy(), # shown in openapi.json - "examples": _EXAMPLES, + "examples": _EXAMPLES, # type:ignore[dict-item] } + ) - @validator("invoice_url") + @field_validator("invoice_url") @classmethod - def success_requires_invoice(cls, v, values): - success = values.get("success") + def success_requires_invoice(cls, v, info: ValidationInfo): + success = info.data.get("success") if success and not v: msg = "Invoice required on successful payments" raise ValueError(msg) @@ -112,14 +113,14 @@ class AckPaymentWithPaymentMethod(_BaseAckPayment): payment_id: PaymentID = Field( default=None, description="Payment ID from the gateway" ) - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { **_ONE_TIME_SUCCESS, "payment_id": "D19EE68B-B007-4B61-A8BC-32B7115FB244", }, # shown in openapi.json } + ) assert PaymentID # nosec diff --git a/services/payments/src/simcore_service_payments/models/schemas/meta.py b/services/payments/src/simcore_service_payments/models/schemas/meta.py index 06352b54ba14..cf5e7c649a2f 100644 --- a/services/payments/src/simcore_service_payments/models/schemas/meta.py +++ b/services/payments/src/simcore_service_payments/models/schemas/meta.py @@ -1,17 +1,15 @@ -from typing import Any, ClassVar - from models_library.api_schemas__common.meta import BaseMeta -from pydantic import HttpUrl +from pydantic import ConfigDict, HttpUrl class Meta(BaseMeta): docs_url: HttpUrl - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "name": "simcore_service_payments", "version": "2.4.45", "docs_url": "https://foo.io/doc", } } + ) diff --git a/services/payments/src/simcore_service_payments/services/auto_recharge_process_message.py b/services/payments/src/simcore_service_payments/services/auto_recharge_process_message.py index 9507b32bf0ac..d300bbf881b1 100644 --- a/services/payments/src/simcore_service_payments/services/auto_recharge_process_message.py +++ b/services/payments/src/simcore_service_payments/services/auto_recharge_process_message.py @@ -14,7 +14,7 @@ from models_library.rabbitmq_basic_types import RPCMethodName from models_library.rabbitmq_messages import WalletCreditsMessage from models_library.wallets import WalletID -from pydantic import parse_obj_as, parse_raw_as +from pydantic import TypeAdapter from simcore_service_payments.db.auto_recharge_repo import AutoRechargeRepo from simcore_service_payments.db.payments_methods_repo import PaymentsMethodsRepo from simcore_service_payments.db.payments_transactions_repo import ( @@ -36,7 +36,7 @@ async def process_message(app: FastAPI, data: bytes) -> bool: - rabbit_message = parse_raw_as(WalletCreditsMessage, data) + rabbit_message = TypeAdapter(WalletCreditsMessage).validate_json(data) _logger.debug("Process msg: %s", rabbit_message) settings: ApplicationSettings = app.state.settings @@ -142,12 +142,12 @@ async def _perform_auto_recharge( result = await rabbitmq_rpc_client.request( WEBSERVER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_invoice_data"), + TypeAdapter(RPCMethodName).validate_python("get_invoice_data"), user_id=payment_method_db.user_id, dollar_amount=wallet_auto_recharge.top_up_amount_in_usd, product_name=rabbit_message.product_name, ) - invoice_data_get = parse_obj_as(InvoiceDataGet, result) + invoice_data_get = TypeAdapter(InvoiceDataGet).validate_python(result) await pay_with_payment_method( gateway=PaymentsGatewayApi.get_from_app_state(app), diff --git a/services/payments/src/simcore_service_payments/services/payments_gateway.py b/services/payments/src/simcore_service_payments/services/payments_gateway.py index 0b1097492c61..a1ac9d0b061b 100644 --- a/services/payments/src/simcore_service_payments/services/payments_gateway.py +++ b/services/payments/src/simcore_service_payments/services/payments_gateway.py @@ -12,12 +12,12 @@ from contextlib import suppress import httpx +from common_library.errors_classes import OsparcErrorMixin from fastapi import FastAPI from fastapi.encoders import jsonable_encoder from httpx import URL, HTTPStatusError from models_library.api_schemas_webserver.wallets import PaymentID, PaymentMethodID -from pydantic import ValidationError, parse_raw_as -from pydantic.errors import PydanticErrorMixin +from pydantic import TypeAdapter, ValidationError from servicelib.fastapi.app_state import SingletonInAppStateMixin from servicelib.fastapi.http_client import ( AttachLifespanMixin, @@ -48,11 +48,11 @@ def _parse_raw_as_or_none(cls: type, text: str | None): if text: with suppress(ValidationError): - return parse_raw_as(cls, text) + return TypeAdapter(cls).validate_python(text) return None -class PaymentsGatewayError(PydanticErrorMixin, ValueError): +class PaymentsGatewayError(OsparcErrorMixin, ValueError): msg_template = "{operation_id} error {status_code}: {reason}" @classmethod @@ -124,10 +124,10 @@ class PaymentsGatewayApi( async def init_payment(self, payment: InitPayment) -> PaymentInitiated: response = await self.client.post( "/init", - json=jsonable_encoder(payment.dict(exclude_none=True, by_alias=True)), + json=jsonable_encoder(payment.model_dump(exclude_none=True, by_alias=True)), ) response.raise_for_status() - return PaymentInitiated.parse_obj(response.json()) + return PaymentInitiated.model_validate(response.json()) def get_form_payment_url(self, id_: PaymentID) -> URL: return self.client.base_url.copy_with(path="/pay", params={"id": f"{id_}"}) @@ -141,7 +141,7 @@ async def cancel_payment( json=jsonable_encoder(payment_initiated), ) response.raise_for_status() - return PaymentCancelled.parse_obj(response.json()) + return PaymentCancelled.model_validate(response.json()) # # api: payment method workflows @@ -157,7 +157,7 @@ async def init_payment_method( json=jsonable_encoder(payment_method), ) response.raise_for_status() - return PaymentMethodInitiated.parse_obj(response.json()) + return PaymentMethodInitiated.model_validate(response.json()) def get_form_payment_method_url(self, id_: PaymentMethodID) -> URL: return self.client.base_url.copy_with( @@ -177,13 +177,13 @@ async def get_many_payment_methods( json=jsonable_encoder(BatchGetPaymentMethods(payment_methods_ids=ids_)), ) response.raise_for_status() - return PaymentMethodsBatch.parse_obj(response.json()).items + return PaymentMethodsBatch.model_validate(response.json()).items @_handle_status_errors async def get_payment_method(self, id_: PaymentMethodID) -> GetPaymentMethod: response = await self.client.get(f"/payment-methods/{id_}") response.raise_for_status() - return GetPaymentMethod.parse_obj(response.json()) + return GetPaymentMethod.model_validate(response.json()) @_handle_status_errors async def delete_payment_method(self, id_: PaymentMethodID) -> None: @@ -198,10 +198,10 @@ async def pay_with_payment_method( ) -> AckPaymentWithPaymentMethod: response = await self.client.post( f"/payment-methods/{id_}:pay", - json=jsonable_encoder(payment.dict(exclude_none=True, by_alias=True)), + json=jsonable_encoder(payment.model_dump(exclude_none=True, by_alias=True)), ) response.raise_for_status() - return AckPaymentWithPaymentMethod.parse_obj(response.json()) + return AckPaymentWithPaymentMethod.model_validate(response.json()) def setup_payments_gateway(app: FastAPI): diff --git a/services/payments/src/simcore_service_payments/services/resource_usage_tracker.py b/services/payments/src/simcore_service_payments/services/resource_usage_tracker.py index e66f650fe7bf..0051bc6035fc 100644 --- a/services/payments/src/simcore_service_payments/services/resource_usage_tracker.py +++ b/services/payments/src/simcore_service_payments/services/resource_usage_tracker.py @@ -63,7 +63,7 @@ async def create_credit_transaction( ) ), ) - credit_transaction = CreditTransactionCreated.parse_raw(response.text) + credit_transaction = CreditTransactionCreated.model_validate_json(response.text) return credit_transaction.credit_transaction_id diff --git a/services/payments/src/simcore_service_payments/services/stripe.py b/services/payments/src/simcore_service_payments/services/stripe.py index 38cc21fab0e2..c640c10f7160 100644 --- a/services/payments/src/simcore_service_payments/services/stripe.py +++ b/services/payments/src/simcore_service_payments/services/stripe.py @@ -81,7 +81,7 @@ async def get_invoice( response = await self.client.get(f"/v1/invoices/{stripe_invoice_id}") response.raise_for_status() - return InvoiceData.parse_raw(response.text) + return InvoiceData.model_validate_json(response.text) def setup_stripe(app: FastAPI): diff --git a/services/payments/tests/conftest.py b/services/payments/tests/conftest.py index 042ac85f968a..3691f3c43d29 100644 --- a/services/payments/tests/conftest.py +++ b/services/payments/tests/conftest.py @@ -10,7 +10,7 @@ import simcore_service_payments from faker import Faker from models_library.users import GroupID -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.utils_secrets import generate_token_secret_key @@ -89,4 +89,4 @@ def app_environment( @pytest.fixture def user_primary_group_id(faker: Faker) -> GroupID: - return parse_obj_as(GroupID, faker.pyint()) + return TypeAdapter(GroupID).validate_python(faker.pyint()) diff --git a/services/payments/tests/unit/api/test__one_time_payment_workflows.py b/services/payments/tests/unit/api/test__one_time_payment_workflows.py index 753432ac6d69..126116a5dc27 100644 --- a/services/payments/tests/unit/api/test__one_time_payment_workflows.py +++ b/services/payments/tests/unit/api/test__one_time_payment_workflows.py @@ -9,6 +9,7 @@ import pytest from faker import Faker from fastapi import FastAPI, status +from fastapi.encoders import jsonable_encoder from models_library.api_schemas_webserver.wallets import WalletPaymentInitiated from models_library.basic_types import IDStr from models_library.payments import UserInvoiceAddress @@ -16,7 +17,7 @@ from models_library.rabbitmq_basic_types import RPCMethodName from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import EmailStr, parse_obj_as +from pydantic import EmailStr, TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict @@ -90,7 +91,7 @@ async def test_successful_one_time_payment_workflow( # ACK via api/rest inited = await rpc_client.request( PAYMENTS_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "init_payment"), + TypeAdapter(RPCMethodName).validate_python("init_payment"), amount_dollars=1000, target_credits=10000, product_name="osparc", @@ -111,7 +112,9 @@ async def test_successful_one_time_payment_workflow( # ACK response = await client.post( f"/v1/payments/{inited.payment_id}:ack", - json=AckPayment(success=True, invoice_url=faker.url()).dict(), + json=jsonable_encoder( + AckPayment(success=True, invoice_url=faker.url()).model_dump() + ), headers=auth_headers, ) @@ -121,7 +124,7 @@ async def test_successful_one_time_payment_workflow( # LIST payments via api/rest got = await rpc_client.request( PAYMENTS_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_payments_page"), + TypeAdapter(RPCMethodName).validate_python("get_payments_page"), user_id=user_id, product_name="osparc", timeout_s=None if is_pdb_enabled else RPC_REQUEST_DEFAULT_TIMEOUT_S, diff --git a/services/payments/tests/unit/api/test__payment_method_workflows.py b/services/payments/tests/unit/api/test__payment_method_workflows.py index 76640384f7b1..697ddfd08f33 100644 --- a/services/payments/tests/unit/api/test__payment_method_workflows.py +++ b/services/payments/tests/unit/api/test__payment_method_workflows.py @@ -9,6 +9,7 @@ import pytest from faker import Faker from fastapi import FastAPI, status +from fastapi.encoders import jsonable_encoder from models_library.api_schemas_webserver.wallets import ( PaymentMethodGet, PaymentMethodInitiated, @@ -17,7 +18,7 @@ from models_library.rabbitmq_basic_types import RPCMethodName from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import EmailStr, parse_obj_as +from pydantic import EmailStr, TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict @@ -89,7 +90,7 @@ async def test_successful_create_payment_method_workflow( # INIT via api/rpc inited = await rpc_client.request( PAYMENTS_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "init_creation_of_payment_method"), + TypeAdapter(RPCMethodName).validate_python("init_creation_of_payment_method"), wallet_id=wallet_id, wallet_name=wallet_name, user_id=user_id, @@ -104,7 +105,9 @@ async def test_successful_create_payment_method_workflow( # ACK via api/rest response = await client.post( f"/v1/payments-methods/{inited.payment_method_id}:ack", - json=AckPayment(success=True, invoice_url=faker.url()).dict(), + json=jsonable_encoder( + AckPayment(success=True, invoice_url=faker.url()).model_dump() + ), headers=auth_headers, ) @@ -114,7 +117,7 @@ async def test_successful_create_payment_method_workflow( # GET via api/rpc got = await rpc_client.request( PAYMENTS_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_payment_method"), + TypeAdapter(RPCMethodName).validate_python("get_payment_method"), payment_method_id=inited.payment_method_id, user_id=user_id, wallet_id=wallet_id, diff --git a/services/payments/tests/unit/api/test_rest_acknowledgements.py b/services/payments/tests/unit/api/test_rest_acknowledgements.py index 4df30829f93c..b7254f22d425 100644 --- a/services/payments/tests/unit/api/test_rest_acknowledgements.py +++ b/services/payments/tests/unit/api/test_rest_acknowledgements.py @@ -11,6 +11,7 @@ import pytest from faker import Faker from fastapi import FastAPI, status +from fastapi.encoders import jsonable_encoder from models_library.api_schemas_payments.errors import ( PaymentMethodNotFoundError, PaymentNotFoundError, @@ -93,7 +94,9 @@ async def test_payments_api_authentication( auth_headers: dict[str, str], ): payments_id = faker.uuid4() - payment_ack = AckPayment(success=True, invoice_url=faker.url()).dict() + payment_ack = jsonable_encoder( + AckPayment(success=True, invoice_url=faker.url()).model_dump() + ) # w/o header response = await client.post( @@ -108,7 +111,7 @@ async def test_payments_api_authentication( ) assert response.status_code == status.HTTP_404_NOT_FOUND, response.json() - error = DefaultApiError.parse_obj(response.json()) + error = DefaultApiError.model_validate(response.json()) assert PaymentNotFoundError.msg_template.format(payment_id=payments_id) == str( error.detail ) @@ -121,7 +124,9 @@ async def test_payments_methods_api_authentication( auth_headers: dict[str, str], ): payment_method_id = faker.uuid4() - payment_method_ack = AckPaymentMethod(success=True, message=faker.word()).dict() + payment_method_ack = AckPaymentMethod( + success=True, message=faker.word() + ).model_dump() # w/o header response = await client.post( @@ -138,7 +143,7 @@ async def test_payments_methods_api_authentication( ) assert response.status_code == status.HTTP_404_NOT_FOUND, response.json() - error = DefaultApiError.parse_obj(response.json()) + error = DefaultApiError.model_validate(response.json()) assert PaymentMethodNotFoundError.msg_template.format( payment_method_id=payment_method_id ) == str(error.detail) diff --git a/services/payments/tests/unit/api/test_rest_auth.py b/services/payments/tests/unit/api/test_rest_auth.py index eef8ee23cd23..2139f99d233c 100644 --- a/services/payments/tests/unit/api/test_rest_auth.py +++ b/services/payments/tests/unit/api/test_rest_auth.py @@ -17,7 +17,9 @@ async def test_bearer_token(httpbin_base_url: HttpUrl, faker: Faker): bearer_token = faker.word() headers = {"Authorization": f"Bearer {bearer_token}"} - async with httpx.AsyncClient(base_url=httpbin_base_url, headers=headers) as client: + async with httpx.AsyncClient( + base_url=f"{httpbin_base_url}", headers=headers + ) as client: response = await client.get("/bearer") assert response.json() == {"authenticated": True, "token": bearer_token} diff --git a/services/payments/tests/unit/api/test_rest_meta.py b/services/payments/tests/unit/api/test_rest_meta.py index 1ca81c9b0a23..3a6acf2b0209 100644 --- a/services/payments/tests/unit/api/test_rest_meta.py +++ b/services/payments/tests/unit/api/test_rest_meta.py @@ -60,7 +60,7 @@ async def test_meta( ): response = await client.get(f"/{API_VTAG}/meta", headers=auth_headers) assert response.status_code == status.HTTP_200_OK - meta = Meta.parse_obj(response.json()) + meta = Meta.model_validate(response.json()) - response = await client.get(meta.docs_url) + response = await client.get(f"{meta.docs_url}") assert response.status_code == status.HTTP_200_OK diff --git a/services/payments/tests/unit/conftest.py b/services/payments/tests/unit/conftest.py index 63f4ed97bbde..be55736c6f69 100644 --- a/services/payments/tests/unit/conftest.py +++ b/services/payments/tests/unit/conftest.py @@ -23,7 +23,7 @@ from models_library.payments import StripeInvoiceID from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.faker_factories import random_payment_method_view from pytest_simcore.helpers.typing_env import EnvVarsDict @@ -209,7 +209,7 @@ def mock_payments_gateway_service_api_base(app: FastAPI) -> Iterator[MockRouter] def mock_payments_routes(faker: Faker) -> Callable: def _mock(mock_router: MockRouter): def _init_200(request: httpx.Request): - assert InitPayment.parse_raw(request.content) is not None + assert InitPayment.model_validate_json(request.content) is not None assert "*" not in request.headers["X-Init-Api-Secret"] return httpx.Response( @@ -218,7 +218,7 @@ def _init_200(request: httpx.Request): ) def _cancel_200(request: httpx.Request): - assert PaymentInitiated.parse_raw(request.content) is not None + assert PaymentInitiated.model_validate_json(request.content) is not None assert "*" not in request.headers["X-Init-Api-Secret"] # responds with an empty authough it can also contain a message @@ -244,7 +244,7 @@ def no_funds_payment_method_id(faker: Faker) -> PaymentMethodID: USE create_fake_payment_method_in_db to inject this payment-method in DB Emulates https://stripe.com/docs/testing#declined-payments """ - return parse_obj_as(PaymentMethodID, "no_funds_payment_method_id") + return TypeAdapter(PaymentMethodID).validate_python("no_funds_payment_method_id") @pytest.fixture @@ -263,7 +263,7 @@ def _init(request: httpx.Request): pm_id = faker.uuid4() _payment_methods[pm_id] = PaymentMethodInfoTuple( - init=InitPaymentMethod.parse_raw(request.content), + init=InitPaymentMethod.model_validate_json(request.content), get=GetPaymentMethod(**random_payment_method_view(id=pm_id)), ) @@ -294,7 +294,7 @@ def _del(request: httpx.Request, pm_id: PaymentMethodID): def _batch_get(request: httpx.Request): assert "*" not in request.headers["X-Init-Api-Secret"] - batch = BatchGetPaymentMethods.parse_raw(request.content) + batch = BatchGetPaymentMethods.model_validate_json(request.content) try: items = [_payment_methods[pm].get for pm in batch.payment_methods_ids] @@ -308,7 +308,7 @@ def _batch_get(request: httpx.Request): def _pay(request: httpx.Request, pm_id: PaymentMethodID): assert "*" not in request.headers["X-Init-Api-Secret"] - assert InitPayment.parse_raw(request.content) is not None + assert InitPayment.model_validate_json(request.content) is not None # checks _get(request, pm_id) diff --git a/services/payments/tests/unit/test__model_examples.py b/services/payments/tests/unit/test__model_examples.py index beab80e794d9..6e072aa658a4 100644 --- a/services/payments/tests/unit/test__model_examples.py +++ b/services/payments/tests/unit/test__model_examples.py @@ -21,7 +21,7 @@ def test_api_server_model_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): try: - assert model_cls.parse_obj(example_data) is not None + assert model_cls.model_validate(example_data) is not None except ValidationError as err: pytest.fail( f"\n{example_name}: {json.dumps(example_data, indent=1)}\nError: {err}" diff --git a/services/payments/tests/unit/test_cli.py b/services/payments/tests/unit/test_cli.py index 2d01fd0fc310..84c654ee1594 100644 --- a/services/payments/tests/unit/test_cli.py +++ b/services/payments/tests/unit/test_cli.py @@ -4,8 +4,10 @@ # pylint: disable=too-many-arguments import os +import traceback import pytest +from click.testing import Result from pytest_simcore.helpers.monkeypatch_envs import load_dotenv, setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_payments._meta import API_VERSION @@ -14,10 +16,16 @@ from typer.testing import CliRunner +def _format_cli_error(result: Result) -> str: + assert result.exception + tb_message = "\n".join(traceback.format_tb(result.exception.__traceback__)) + return f"Below exception was raised by the cli:\n{tb_message}" + + def test_cli_help_and_version(cli_runner: CliRunner): # simcore-service-payments --help result = cli_runner.invoke(cli_main, "--help") - assert result.exit_code == os.EX_OK, result.output + assert result.exit_code == os.EX_OK, _format_cli_error(result) result = cli_runner.invoke(cli_main, "--version") assert result.exit_code == os.EX_OK, result.output @@ -27,7 +35,7 @@ def test_cli_help_and_version(cli_runner: CliRunner): def test_echo_dotenv(cli_runner: CliRunner, monkeypatch: pytest.MonkeyPatch): # simcore-service-payments echo-dotenv --auto-password result = cli_runner.invoke(cli_main, "echo-dotenv --auto-password") - assert result.exit_code == os.EX_OK, result.output + assert result.exit_code == os.EX_OK, _format_cli_error(result) environs = load_dotenv(result.stdout) @@ -39,11 +47,11 @@ def test_echo_dotenv(cli_runner: CliRunner, monkeypatch: pytest.MonkeyPatch): def test_list_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): # simcore-service-payments settings --show-secrets --as-json result = cli_runner.invoke(cli_main, ["settings", "--show-secrets", "--as-json"]) - assert result.exit_code == os.EX_OK, result.output + assert result.exit_code == os.EX_OK, _format_cli_error(result) print(result.output) - settings = ApplicationSettings.parse_raw(result.output) - assert settings == ApplicationSettings.create_from_envs() + settings = ApplicationSettings.model_validate_json(result.output) + assert settings.model_dump() == ApplicationSettings.create_from_envs().model_dump() def test_main(app_environment: EnvVarsDict): diff --git a/services/payments/tests/unit/test_db_payments_methods_repo.py b/services/payments/tests/unit/test_db_payments_methods_repo.py index f64570cf598a..47595bb5557c 100644 --- a/services/payments/tests/unit/test_db_payments_methods_repo.py +++ b/services/payments/tests/unit/test_db_payments_methods_repo.py @@ -43,7 +43,9 @@ def app_environment( async def test_create_payments_method_annotations_workflow(app: FastAPI): - fake = PaymentsMethodsDB(**PaymentsMethodsDB.Config.schema_extra["examples"][1]) + fake = PaymentsMethodsDB( + **PaymentsMethodsDB.model_config["json_schema_extra"]["examples"][1] + ) repo = PaymentsMethodsRepo(app.state.engine) diff --git a/services/payments/tests/unit/test_db_payments_transactions_repo.py b/services/payments/tests/unit/test_db_payments_transactions_repo.py index 62e217a9e7a4..d4e728d14c5e 100644 --- a/services/payments/tests/unit/test_db_payments_transactions_repo.py +++ b/services/payments/tests/unit/test_db_payments_transactions_repo.py @@ -49,7 +49,7 @@ def app_environment( async def test_one_time_payment_annotations_workflow(app: FastAPI): fake = PaymentsTransactionsDB( - **PaymentsTransactionsDB.Config.schema_extra["examples"][1] + **PaymentsTransactionsDB.model_config["json_schema_extra"]["examples"][1] ) repo = PaymentsTransactionsRepo(app.state.engine) diff --git a/services/payments/tests/unit/test_rpc_payments.py b/services/payments/tests/unit/test_rpc_payments.py index 3e5b3ad1c2ad..b755acf7d08b 100644 --- a/services/payments/tests/unit/test_rpc_payments.py +++ b/services/payments/tests/unit/test_rpc_payments.py @@ -13,7 +13,7 @@ from models_library.api_schemas_webserver.wallets import WalletPaymentInitiated from models_library.payments import UserInvoiceAddress from models_library.rabbitmq_basic_types import RPCMethodName -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from respx import MockRouter @@ -83,7 +83,7 @@ async def test_rpc_init_payment_fail( with pytest.raises(RPCServerError) as exc_info: await rpc_client.request( PAYMENTS_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "init_payment"), + TypeAdapter(RPCMethodName).validate_python("init_payment"), **init_payment_kwargs, ) @@ -107,7 +107,7 @@ async def test_webserver_one_time_payment_workflow( result = await rpc_client.request( PAYMENTS_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "init_payment"), + TypeAdapter(RPCMethodName).validate_python("init_payment"), **init_payment_kwargs, ) @@ -118,7 +118,7 @@ async def test_webserver_one_time_payment_workflow( result = await rpc_client.request( PAYMENTS_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "cancel_payment"), + TypeAdapter(RPCMethodName).validate_python("cancel_payment"), payment_id=result.payment_id, user_id=init_payment_kwargs["user_id"], wallet_id=init_payment_kwargs["wallet_id"], @@ -145,7 +145,7 @@ async def test_cancel_invalid_payment_id( with pytest.raises(PaymentNotFoundError) as exc_info: await rpc_client.request( PAYMENTS_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "cancel_payment"), + TypeAdapter(RPCMethodName).validate_python("cancel_payment"), payment_id=invalid_payment_id, user_id=init_payment_kwargs["user_id"], wallet_id=init_payment_kwargs["wallet_id"], diff --git a/services/payments/tests/unit/test_rpc_payments_methods.py b/services/payments/tests/unit/test_rpc_payments_methods.py index ef60bfa6c425..e3a6d377e277 100644 --- a/services/payments/tests/unit/test_rpc_payments_methods.py +++ b/services/payments/tests/unit/test_rpc_payments_methods.py @@ -19,7 +19,7 @@ from models_library.rabbitmq_basic_types import RPCMethodName from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import EmailStr, parse_obj_as +from pydantic import EmailStr, TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from respx import MockRouter @@ -87,7 +87,7 @@ async def test_webserver_init_and_cancel_payment_method_workflow( initiated = await rpc_client.request( PAYMENTS_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "init_creation_of_payment_method"), + TypeAdapter(RPCMethodName).validate_python("init_creation_of_payment_method"), wallet_id=wallet_id, wallet_name=wallet_name, user_id=user_id, @@ -104,7 +104,7 @@ async def test_webserver_init_and_cancel_payment_method_workflow( cancelled = await rpc_client.request( PAYMENTS_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "cancel_creation_of_payment_method"), + TypeAdapter(RPCMethodName).validate_python("cancel_creation_of_payment_method"), payment_method_id=initiated.payment_method_id, user_id=user_id, wallet_id=wallet_id, @@ -135,7 +135,7 @@ async def test_webserver_crud_payment_method_workflow( inited = await rpc_client.request( PAYMENTS_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "init_creation_of_payment_method"), + TypeAdapter(RPCMethodName).validate_python("init_creation_of_payment_method"), wallet_id=wallet_id, wallet_name=wallet_name, user_id=user_id, @@ -161,7 +161,7 @@ async def test_webserver_crud_payment_method_workflow( listed = await rpc_client.request( PAYMENTS_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "list_payment_methods"), + TypeAdapter(RPCMethodName).validate_python("list_payment_methods"), user_id=user_id, wallet_id=wallet_id, timeout_s=None if is_pdb_enabled else RPC_REQUEST_DEFAULT_TIMEOUT_S, @@ -175,7 +175,7 @@ async def test_webserver_crud_payment_method_workflow( got = await rpc_client.request( PAYMENTS_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_payment_method"), + TypeAdapter(RPCMethodName).validate_python("get_payment_method"), payment_method_id=inited.payment_method_id, user_id=user_id, wallet_id=wallet_id, @@ -187,7 +187,7 @@ async def test_webserver_crud_payment_method_workflow( await rpc_client.request( PAYMENTS_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "delete_payment_method"), + TypeAdapter(RPCMethodName).validate_python("delete_payment_method"), payment_method_id=inited.payment_method_id, user_id=user_id, wallet_id=wallet_id, @@ -230,7 +230,7 @@ async def test_webserver_pay_with_payment_method_workflow( transaction = await rpc_client.request( PAYMENTS_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "pay_with_payment_method"), + TypeAdapter(RPCMethodName).validate_python("pay_with_payment_method"), payment_method_id=created.payment_method_id, amount_dollars=faker.pyint(), target_credits=faker.pyint(), diff --git a/services/payments/tests/unit/test_services_auto_recharge_listener.py b/services/payments/tests/unit/test_services_auto_recharge_listener.py index 196c1cdcd98d..aaf143ad7b7a 100644 --- a/services/payments/tests/unit/test_services_auto_recharge_listener.py +++ b/services/payments/tests/unit/test_services_auto_recharge_listener.py @@ -161,8 +161,8 @@ def wallet_id(faker: Faker): async def mocked_pay_with_payment_method(mocker: MockerFixture) -> mock.AsyncMock: return mocker.patch( "simcore_service_payments.services.payments.PaymentsGatewayApi.pay_with_payment_method", - return_value=AckPaymentWithPaymentMethod.construct( - **AckPaymentWithPaymentMethod.Config.schema_extra["example"] + return_value=AckPaymentWithPaymentMethod.model_construct( + **AckPaymentWithPaymentMethod.model_config["json_schema_extra"]["example"] ), ) @@ -200,8 +200,8 @@ async def get_invoice_data( dollar_amount: Decimal, product_name: ProductName, ) -> InvoiceDataGet: - return InvoiceDataGet.parse_obj( - InvoiceDataGet.Config.schema_extra["examples"][0] + return InvoiceDataGet.model_validate( + InvoiceDataGet.model_config["json_schema_extra"]["examples"][0] ) await rpc_server.register_router(router, namespace=WEBSERVER_RPC_NAMESPACE) @@ -220,7 +220,7 @@ async def _assert_payments_transactions_db_row(postgres_db) -> PaymentsTransacti result = con.execute(sa.select(payments_transactions)) row = result.first() assert row - return PaymentsTransactionsDB.from_orm(row) + return PaymentsTransactionsDB.model_validate(row) async def test_process_message__whole_autorecharge_flow_success( diff --git a/services/payments/tests/unit/test_services_notifier.py b/services/payments/tests/unit/test_services_notifier.py index 5aab90f9f0fb..ee55afa9be31 100644 --- a/services/payments/tests/unit/test_services_notifier.py +++ b/services/payments/tests/unit/test_services_notifier.py @@ -4,10 +4,9 @@ # pylint: disable=too-many-arguments -import asyncio -import threading from collections.abc import AsyncIterable, Callable from contextlib import _AsyncGeneratorContextManager +from typing import Awaitable from unittest.mock import AsyncMock import arrow @@ -20,7 +19,7 @@ from models_library.api_schemas_webserver.socketio import SocketIORoomStr from models_library.api_schemas_webserver.wallets import PaymentTransaction from models_library.users import GroupID, UserID -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.faker_factories import random_payment_transaction from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict @@ -31,8 +30,7 @@ from simcore_service_payments.services.notifier import NotifierService from simcore_service_payments.services.rabbitmq import get_rabbitmq_settings from socketio import AsyncServer -from tenacity import AsyncRetrying -from tenacity.stop import stop_after_attempt +from tenacity import AsyncRetrying, stop_after_delay from tenacity.wait import wait_fixed pytest_simcore_core_services_selection = [ @@ -103,7 +101,7 @@ async def socketio_client_events( # emulates front-end receiving message async def on_payment(data): - assert parse_obj_as(PaymentTransaction, data) is not None + assert TypeAdapter(PaymentTransaction).validate_python(data) is not None on_event_spy = AsyncMock(wraps=on_payment) socketio_client.on(SOCKET_IO_PAYMENT_COMPLETED_EVENT, on_event_spy) @@ -112,8 +110,10 @@ async def on_payment(data): @pytest.fixture -async def notify_payment(app: FastAPI, user_id: UserID) -> Callable: - async def _(): +async def notify_payment( + app: FastAPI, user_id: UserID +) -> Callable[[], Awaitable[None]]: + async def _() -> None: transaction = PaymentsTransactionsDB( **random_payment_transaction( user_id=user_id, completed_at=arrow.utcnow().datetime @@ -127,23 +127,28 @@ async def _(): return _ +async def _assert_called_once(mock: AsyncMock) -> None: + async for attempt in AsyncRetrying( + wait=wait_fixed(0.1), stop=stop_after_delay(5), reraise=True + ): + with attempt: + assert mock.call_count == 1 + + async def test_emit_message_as_external_process_to_frontend_client( socketio_server_events: dict[str, AsyncMock], socketio_client: socketio.AsyncClient, socketio_client_events: dict[str, AsyncMock], - notify_payment: Callable, + notify_payment: Callable[[], Awaitable[None]], + socketio_client_factory: Callable[ + [], _AsyncGeneratorContextManager[socketio.AsyncClient] + ], ): """ front-end -> socketio client (many different clients) webserver -> socketio server (one/more replicas) payments -> Sends messages to clients from external processes (one/more replicas) """ - # Used iusntead of a fix asyncio.sleep - context_switch_retry_kwargs = { - "wait": wait_fixed(0.1), - "stop": stop_after_attempt(5), - "reraise": True, - } # web server spy events server_connect = socketio_server_events["connect"] @@ -160,20 +165,9 @@ async def test_emit_message_as_external_process_to_frontend_client( # client emits await socketio_client.emit("check", data="hoi") - async for attempt in AsyncRetrying(**context_switch_retry_kwargs): - with attempt: - assert server_on_check.called + await _assert_called_once(server_on_check) # payment server emits - def _(lp): - asyncio.run_coroutine_threadsafe(notify_payment(), lp) + await notify_payment() - threading.Thread( - target=_, - args=(asyncio.get_event_loop(),), - daemon=False, - ).start() - - async for attempt in AsyncRetrying(**context_switch_retry_kwargs): - with attempt: - assert client_on_payment.called + await _assert_called_once(client_on_payment) diff --git a/services/payments/tests/unit/test_services_notifier_email.py b/services/payments/tests/unit/test_services_notifier_email.py index c554c7a2c281..79edb79498cc 100644 --- a/services/payments/tests/unit/test_services_notifier_email.py +++ b/services/payments/tests/unit/test_services_notifier_email.py @@ -86,7 +86,7 @@ def mocked_get_invoice_pdf_response( text=f"{request.fixturename} is set to '{request.param}'", ) - respx_mock.get(transaction.invoice_pdf_url).mock(return_value=response) + respx_mock.get(f"{transaction.invoice_pdf_url}").mock(return_value=response) return respx_mock @@ -97,7 +97,7 @@ def transaction( ) -> PaymentsTransactionsDB: kwargs = { k: successful_transaction[k] - for k in PaymentsTransactionsDB.__fields__ + for k in PaymentsTransactionsDB.model_fields if k in successful_transaction } return PaymentsTransactionsDB(**kwargs) diff --git a/services/payments/tests/unit/test_services_payments__get_invoice.py b/services/payments/tests/unit/test_services_payments__get_invoice.py index 7a391b223519..57c71945bf45 100644 --- a/services/payments/tests/unit/test_services_payments__get_invoice.py +++ b/services/payments/tests/unit/test_services_payments__get_invoice.py @@ -18,6 +18,7 @@ from models_library.users import UserID from models_library.wallets import WalletID from pydantic import HttpUrl +from pydantic_core import Url from pytest_simcore.helpers.faker_factories import random_payment_transaction from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict @@ -118,4 +119,4 @@ async def test_get_payment_invoice_url( payment_id=populate_payment_transaction_db, ) assert invoice_url - assert type(invoice_url) is HttpUrl + assert isinstance(invoice_url, Url) diff --git a/services/payments/tests/unit/test_services_payments_gateway.py b/services/payments/tests/unit/test_services_payments_gateway.py index f210d1e72587..e89c502894b4 100644 --- a/services/payments/tests/unit/test_services_payments_gateway.py +++ b/services/payments/tests/unit/test_services_payments_gateway.py @@ -23,6 +23,7 @@ _raise_as_payments_gateway_error, setup_payments_gateway, ) +from yarl import URL async def test_setup_payment_gateway_api(app_environment: EnvVarsDict): @@ -120,7 +121,7 @@ async def test_one_time_payment_workflow( ) app_settings: ApplicationSettings = app.state.settings - assert submission_link.host == app_settings.PAYMENTS_GATEWAY_URL.host + assert submission_link.host == URL(app_settings.PAYMENTS_GATEWAY_URL).host # cancel payment_canceled = await payment_gateway_api.cancel_payment(payment_initiated) @@ -159,7 +160,7 @@ async def test_payment_methods_workflow( ) app_settings: ApplicationSettings = app.state.settings - assert form_link.host == app_settings.PAYMENTS_GATEWAY_URL.host + assert form_link.host == URL(app_settings.PAYMENTS_GATEWAY_URL).host # CRUD payment_method_id = initiated.payment_method_id @@ -169,7 +170,7 @@ async def test_payment_methods_workflow( payment_method_id ) assert got_payment_method.id == payment_method_id - print(got_payment_method.json(indent=2)) + print(got_payment_method.model_dump_json(indent=2)) # list payment-methods items = await payments_gateway_api.get_many_payment_methods([payment_method_id]) diff --git a/services/payments/tests/unit/test_services_resource_usage_tracker.py b/services/payments/tests/unit/test_services_resource_usage_tracker.py index 0959f535cc19..1010f3e3b003 100644 --- a/services/payments/tests/unit/test_services_resource_usage_tracker.py +++ b/services/payments/tests/unit/test_services_resource_usage_tracker.py @@ -72,7 +72,7 @@ async def test_add_credits_to_wallet( user_id=faker.pyint(), user_email=faker.email(), osparc_credits=100, - payment_transaction_id=faker.pyint(), + payment_transaction_id=faker.pystr(), created_at=datetime.now(tz=timezone.utc), ) > 0 diff --git a/services/resource-usage-tracker/openapi.json b/services/resource-usage-tracker/openapi.json index 0df986b36a59..6aa53c7118cf 100644 --- a/services/resource-usage-tracker/openapi.json +++ b/services/resource-usage-tracker/openapi.json @@ -54,24 +54,24 @@ "operationId": "get_credit_transactions_sum_v1_credit_transactions_credits_sum_post", "parameters": [ { + "name": "product_name", + "in": "query", "required": true, "schema": { "type": "string", "title": "Product Name" - }, - "name": "product_name", - "in": "query" + } }, { + "name": "wallet_id", + "in": "query", "required": true, "schema": { "type": "integer", "exclusiveMinimum": true, "title": "Wallet Id", "minimum": 0 - }, - "name": "wallet_id", - "in": "query" + } } ], "responses": { @@ -149,33 +149,33 @@ "operationId": "get_service_default_pricing_plan", "parameters": [ { + "name": "service_key", + "in": "path", "required": true, "schema": { "type": "string", "pattern": "^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$", "title": "Service Key" - }, - "name": "service_key", - "in": "path" + } }, { + "name": "service_version", + "in": "path", "required": true, "schema": { "type": "string", "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", "title": "Service Version" - }, - "name": "service_version", - "in": "path" + } }, { + "name": "product_name", + "in": "query", "required": true, "schema": { "type": "string", "title": "Product Name" - }, - "name": "product_name", - "in": "query" + } } ], "responses": { @@ -212,35 +212,35 @@ "operationId": "list_service_pricing_plans", "parameters": [ { + "name": "pricing_plan_id", + "in": "path", "required": true, "schema": { "type": "integer", "exclusiveMinimum": true, "title": "Pricing Plan Id", "minimum": 0 - }, - "name": "pricing_plan_id", - "in": "path" + } }, { + "name": "pricing_unit_id", + "in": "path", "required": true, "schema": { "type": "integer", "exclusiveMinimum": true, "title": "Pricing Unit Id", "minimum": 0 - }, - "name": "pricing_unit_id", - "in": "path" + } }, { + "name": "product_name", + "in": "query", "required": true, "schema": { "type": "string", "title": "Product Name" - }, - "name": "product_name", - "in": "query" + } } ], "responses": { @@ -297,7 +297,14 @@ "title": "User Email" }, "osparc_credits": { - "type": "number", + "anyOf": [ + { + "type": "number" + }, + { + "type": "string" + } + ], "title": "Osparc Credits" }, "payment_transaction_id": { @@ -373,8 +380,8 @@ "enum": [ "TIER" ], - "title": "PricingPlanClassification", - "description": "An enumeration." + "const": "TIER", + "title": "PricingPlanClassification" }, "PricingPlanGet": { "properties": { @@ -405,10 +412,17 @@ "title": "Pricing Plan Key" }, "pricing_units": { - "items": { - "$ref": "#/components/schemas/PricingUnitGet" - }, - "type": "array", + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/PricingUnitGet" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Pricing Units" }, "is_active": { @@ -424,6 +438,7 @@ "classification", "created_at", "pricing_plan_key", + "pricing_units", "is_active" ], "title": "PricingPlanGet" @@ -482,13 +497,16 @@ }, "RAM": { "type": "integer", + "minimum": 0, "title": "Ram" }, "VRAM": { "type": "integer", + "minimum": 0, "title": "Vram" } }, + "additionalProperties": true, "type": "object", "required": [ "CPU", diff --git a/services/resource-usage-tracker/requirements/_base.in b/services/resource-usage-tracker/requirements/_base.in index c71570d1fee7..1ed4215b64f2 100644 --- a/services/resource-usage-tracker/requirements/_base.in +++ b/services/resource-usage-tracker/requirements/_base.in @@ -7,6 +7,7 @@ # intra-repo required dependencies --requirement ../../../packages/aws-library/requirements/_base.in +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in diff --git a/services/resource-usage-tracker/requirements/_base.txt b/services/resource-usage-tracker/requirements/_base.txt index 7fbdfdb92e61..857c0f0d7416 100644 --- a/services/resource-usage-tracker/requirements/_base.txt +++ b/services/resource-usage-tracker/requirements/_base.txt @@ -50,6 +50,8 @@ aiosignal==1.3.1 # via aiohttp alembic==1.13.1 # via -r requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -128,21 +130,8 @@ email-validator==2.1.1 # via pydantic fast-depends==2.4.12 # via faststream -fastapi==0.99.1 +fastapi==0.115.0 # via - # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator @@ -353,7 +342,7 @@ psutil==6.0.0 # -r requirements/../../../packages/service-library/requirements/_base.in psycopg2-binary==2.9.9 # via sqlalchemy -pydantic==1.10.14 +pydantic==2.9.2 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -365,7 +354,6 @@ pydantic==1.10.14 # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -383,6 +371,26 @@ pydantic==1.10.14 # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.17.2 # via rich pyinstrument==4.6.2 @@ -399,7 +407,9 @@ python-dateutil==2.9.0.post0 # matplotlib # pandas python-dotenv==1.0.1 - # via uvicorn + # via + # pydantic-settings + # uvicorn pytz==2024.1 # via # dateparser @@ -500,7 +510,7 @@ sqlalchemy==1.4.52 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # alembic -starlette==0.27.0 +starlette==0.38.6 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -557,6 +567,7 @@ typing-extensions==4.10.0 # faststream # opentelemetry-sdk # pydantic + # pydantic-core # typer # types-aiobotocore # types-aiobotocore-ec2 diff --git a/services/resource-usage-tracker/requirements/_test.txt b/services/resource-usage-tracker/requirements/_test.txt index 4db08363ded8..6ddd99492864 100644 --- a/services/resource-usage-tracker/requirements/_test.txt +++ b/services/resource-usage-tracker/requirements/_test.txt @@ -2,6 +2,10 @@ alembic==1.13.1 # via # -c requirements/_base.txt # -r requirements/_test.in +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic antlr4-python3-runtime==4.13.2 # via moto anyio==4.3.0 @@ -77,10 +81,6 @@ flask-cors==5.0.0 # via moto graphql-core==3.2.4 # via moto -greenlet==3.0.3 - # via - # -c requirements/_base.txt - # sqlalchemy h11==0.14.0 # via # -c requirements/_base.txt @@ -182,11 +182,15 @@ py-partiql-parser==0.5.6 # via moto pycparser==2.22 # via cffi -pydantic==1.10.14 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic pyparsing==3.1.2 # via # -c requirements/_base.txt @@ -308,6 +312,7 @@ typing-extensions==4.10.0 # cfn-lint # mypy # pydantic + # pydantic-core # sqlalchemy2-stubs urllib3==2.0.7 # via diff --git a/services/resource-usage-tracker/requirements/ci.txt b/services/resource-usage-tracker/requirements/ci.txt index 10b3745d37d9..697ade6fa5e9 100644 --- a/services/resource-usage-tracker/requirements/ci.txt +++ b/services/resource-usage-tracker/requirements/ci.txt @@ -13,6 +13,7 @@ # installs this repo's packages simcore-aws-library @ ../../packages/aws-library +simcore-common-library @ ../../packages/common-library simcore-models-library @ ../../packages/models-library pytest-simcore @ ../../packages/pytest-simcore simcore-service-library[fastapi] @ ../../packages/service-library diff --git a/services/resource-usage-tracker/requirements/dev.txt b/services/resource-usage-tracker/requirements/dev.txt index 4fc539932c08..253940c1800a 100644 --- a/services/resource-usage-tracker/requirements/dev.txt +++ b/services/resource-usage-tracker/requirements/dev.txt @@ -13,6 +13,7 @@ # installs this repo's packages --editable ../../packages/aws-library +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/pytest-simcore --editable ../../packages/service-library[fastapi] diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py index 63e86cce819f..ab1dfa3467ca 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py @@ -6,7 +6,7 @@ from models_library.basic_types import VersionStr from packaging.version import Version -from pydantic import parse_obj_as +from pydantic import TypeAdapter from servicelib.utils_meta import PackageInfo from settings_library.basic_types import VersionTag @@ -17,7 +17,9 @@ PROJECT_NAME: Final[str] = info.project_name VERSION: Final[Version] = info.version API_VERSION: Final[VersionStr] = info.__version__ -API_VTAG: Final[VersionTag] = parse_obj_as(VersionTag, info.api_prefix_path_tag) +API_VTAG: Final[VersionTag] = TypeAdapter(VersionTag).validate_python( + info.api_prefix_path_tag +) SUMMARY: Final[str] = info.get_summary() diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/_resource_tracker.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/_resource_tracker.py index cae70b1152c8..9da69bad6f3d 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/_resource_tracker.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/_resource_tracker.py @@ -24,7 +24,6 @@ from models_library.services import ServiceKey, ServiceVersion from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import AnyUrl from servicelib.rabbitmq import RPCRouter from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker.errors import ( CustomResourceUsageTrackerError, @@ -79,7 +78,7 @@ async def export_service_runs( access_all_wallet_usage: bool = False, order_by: OrderBy | None = None, filters: ServiceResourceUsagesFilters | None = None, -) -> AnyUrl: +) -> str: app_settings: ApplicationSettings = app.state.settings s3_settings = app_settings.RESOURCE_USAGE_TRACKER_S3 assert s3_settings # nosec diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/application.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/application.py index a97db0170ae4..5c1ee9079568 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/application.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/application.py @@ -32,7 +32,7 @@ def create_app(settings: ApplicationSettings) -> FastAPI: - _logger.info("app settings: %s", settings.json(indent=1)) + _logger.info("app settings: %s", settings.model_dump_json(indent=1)) app = FastAPI( debug=settings.RESOURCE_USAGE_TRACKER_DEBUG, diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/errors.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/errors.py index 298e63aef710..44557db0b021 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/errors.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/errors.py @@ -12,8 +12,10 @@ class ConfigurationError(ResourceUsageTrackerRuntimeError): def http404_error_handler( request: Request, # pylint: disable=unused-argument - error: CustomResourceUsageTrackerError, + error: Exception, ) -> JSONResponse: + assert isinstance(error, CustomResourceUsageTrackerError) # nosec + return JSONResponse( status_code=status.HTTP_404_NOT_FOUND, content={"message": f"{error.msg_template}"}, diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/settings.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/settings.py index 7658d1da0cdc..b473eb112942 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/settings.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/settings.py @@ -2,7 +2,7 @@ from functools import cached_property from models_library.basic_types import BootModeEnum -from pydantic import Field, PositiveInt, validator +from pydantic import AliasChoices, Field, PositiveInt, field_validator from settings_library.base import BaseCustomSettings from settings_library.basic_types import BuildTargetEnum, LogLevel, VersionTag from settings_library.postgres import PostgresSettings @@ -45,18 +45,23 @@ class _BaseApplicationSettings(BaseCustomSettings, MixinLoggingSettings): RESOURCE_USAGE_TRACKER_DEBUG: bool = Field( default=False, description="Debug mode", - env=["RESOURCE_USAGE_TRACKER_DEBUG", "DEBUG"], + validation_alias=AliasChoices( + "RESOURCE_USAGE_TRACKER_DEBUG", + "DEBUG", + ), ) RESOURCE_USAGE_TRACKER_LOGLEVEL: LogLevel = Field( default=LogLevel.INFO, - env=["RESOURCE_USAGE_TRACKER_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"], + validation_alias=AliasChoices( + "RESOURCE_USAGE_TRACKER_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL" + ), ) RESOURCE_USAGE_TRACKER_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=[ + validation_alias=AliasChoices( "RESOURCE_USAGE_TRACKER_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) @@ -64,7 +69,7 @@ class _BaseApplicationSettings(BaseCustomSettings, MixinLoggingSettings): def LOG_LEVEL(self) -> LogLevel: # noqa: N802 return self.RESOURCE_USAGE_TRACKER_LOGLEVEL - @validator("RESOURCE_USAGE_TRACKER_LOGLEVEL", pre=True) + @field_validator("RESOURCE_USAGE_TRACKER_LOGLEVEL", mode="before") @classmethod def valid_log_level(cls, value: str) -> str: return cls.validate_log_level(value) @@ -78,16 +83,18 @@ class MinimalApplicationSettings(_BaseApplicationSettings): """ RESOURCE_USAGE_TRACKER_PROMETHEUS: PrometheusSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) RESOURCE_USAGE_TRACKER_POSTGRES: PostgresSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True}, ) - RESOURCE_USAGE_TRACKER_REDIS: RedisSettings = Field(auto_default_from_env=True) + RESOURCE_USAGE_TRACKER_REDIS: RedisSettings = Field( + json_schema_extra={"auto_default_from_env": True}, + ) RESOURCE_USAGE_TRACKER_RABBITMQ: RabbitSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True}, ) @@ -110,7 +117,19 @@ class ApplicationSettings(MinimalApplicationSettings): description="Heartbeat couter limit when RUT considers service as unhealthy.", ) RESOURCE_USAGE_TRACKER_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True - RESOURCE_USAGE_TRACKER_S3: S3Settings | None = Field(auto_default_from_env=True) + RESOURCE_USAGE_TRACKER_S3: S3Settings | None = Field( + json_schema_extra={"auto_default_from_env": True}, + ) RESOURCE_USAGE_TRACKER_TRACING: TracingSettings | None = Field( - auto_default_from_env=True, description="settings for opentelemetry tracing" + description="settings for opentelemetry tracing", + json_schema_extra={"auto_default_from_env": True}, ) + + @field_validator( + "RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_INTERVAL_SEC", mode="before" + ) + @classmethod + def _validate_interval(cls, v): + if isinstance(v, str) and v.isnumeric(): + return int(v) + return v diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_credit_transactions.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_credit_transactions.py index a264f90d3750..4cdf74b64292 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_credit_transactions.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_credit_transactions.py @@ -13,7 +13,7 @@ ) from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict class CreditTransactionCreate(BaseModel): @@ -64,6 +64,4 @@ class CreditTransactionDB(BaseModel): created: datetime last_heartbeat_at: datetime modified: datetime - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_plans.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_plans.py index f946c92e5d92..7f27ef1096c9 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_plans.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_plans.py @@ -2,7 +2,7 @@ from models_library.resource_tracker import PricingPlanClassification, PricingPlanId from models_library.services import ServiceKey, ServiceVersion -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict ## DB Models @@ -15,16 +15,12 @@ class PricingPlansDB(BaseModel): is_active: bool created: datetime pricing_plan_key: str - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) class PricingPlansWithServiceDefaultPlanDB(PricingPlansDB): service_default_plan: bool - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) class PricingPlanToServiceDB(BaseModel): @@ -32,6 +28,4 @@ class PricingPlanToServiceDB(BaseModel): service_key: ServiceKey service_version: ServiceVersion created: datetime - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_unit_costs.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_unit_costs.py index b5fa3daadf0f..200419fbdca3 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_unit_costs.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_unit_costs.py @@ -6,7 +6,7 @@ PricingUnitCostId, PricingUnitId, ) -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict class PricingUnitCostsDB(BaseModel): @@ -21,6 +21,4 @@ class PricingUnitCostsDB(BaseModel): created: datetime comment: str | None modified: datetime - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_units.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_units.py index f0fed877d43e..bffc25e951c4 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_units.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_units.py @@ -9,7 +9,7 @@ PricingUnitId, UnitExtraInfo, ) -from pydantic import BaseModel, validator +from pydantic import BaseModel, ConfigDict, field_validator class PricingUnitsDB(BaseModel): @@ -23,11 +23,9 @@ class PricingUnitsDB(BaseModel): modified: datetime current_cost_per_unit: Decimal current_cost_per_unit_id: PricingUnitCostId + model_config = ConfigDict(from_attributes=True) - class Config: - orm_mode = True - - @validator("specific_info", pre=True) + @field_validator("specific_info", mode="before") @classmethod def default_hardware_info_when_empty(cls, v) -> HardwareInfo | Any: if not v: diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_service_runs.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_service_runs.py index 6bceaab4f8c3..20314b8b065b 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_service_runs.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_service_runs.py @@ -16,7 +16,7 @@ from models_library.services import ServiceKey, ServiceVersion from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import BaseModel, NonNegativeInt +from pydantic import BaseModel, ConfigDict, NonNegativeInt class ServiceRunCreate(BaseModel): @@ -93,17 +93,14 @@ class ServiceRunDB(BaseModel): last_heartbeat_at: datetime service_run_status_msg: str | None missed_heartbeat_counter: NonNegativeInt - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) class ServiceRunWithCreditsDB(ServiceRunDB): - osparc_credits: Decimal | None + osparc_credits: Decimal | None = None transaction_status: CreditTransactionStatus | None - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) class OsparcCreditsAggregatedByServiceKeyDB(BaseModel): @@ -111,8 +108,7 @@ class OsparcCreditsAggregatedByServiceKeyDB(BaseModel): service_key: ServiceKey running_time_in_hours: Decimal - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) class ServiceRunForCheckDB(BaseModel): @@ -120,6 +116,4 @@ class ServiceRunForCheckDB(BaseModel): last_heartbeat_at: datetime missed_heartbeat_counter: NonNegativeInt modified: datetime - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/db/repositories/resource_tracker.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/db/repositories/resource_tracker.py index 231c97502fbc..4358d5603967 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/db/repositories/resource_tracker.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/db/repositories/resource_tracker.py @@ -161,7 +161,7 @@ async def update_service_run_last_heartbeat( row = result.first() if row is None: return None - return ServiceRunDB.from_orm(row) + return ServiceRunDB.model_validate(row) async def update_service_run_stopped_at( self, data: ServiceRunStoppedAtUpdate @@ -191,7 +191,7 @@ async def update_service_run_stopped_at( row = result.first() if row is None: return None - return ServiceRunDB.from_orm(row) + return ServiceRunDB.model_validate(row) async def get_service_run_by_id( self, service_run_id: ServiceRunId @@ -204,7 +204,7 @@ async def get_service_run_by_id( row = result.first() if row is None: return None - return ServiceRunDB.from_orm(row) + return ServiceRunDB.model_validate(row) async def list_service_runs_by_product_and_user_and_wallet( self, @@ -309,7 +309,9 @@ async def list_service_runs_by_product_and_user_and_wallet( result = await conn.execute(query) - return [ServiceRunWithCreditsDB.from_orm(row) for row in result.fetchall()] + return [ + ServiceRunWithCreditsDB.model_validate(row) for row in result.fetchall() + ] async def get_osparc_credits_aggregated_by_service( self, @@ -405,7 +407,7 @@ async def get_osparc_credits_aggregated_by_service( return ( cast(int, count_result.scalar()), [ - OsparcCreditsAggregatedByServiceKeyDB.from_orm(row) + OsparcCreditsAggregatedByServiceKeyDB.model_validate(row) for row in list_result.fetchall() ], ) @@ -571,7 +573,7 @@ async def list_service_runs_with_running_status_across_all_products( ) result = await conn.execute(query) - return [ServiceRunForCheckDB.from_orm(row) for row in result.fetchall()] + return [ServiceRunForCheckDB.model_validate(row) for row in result.fetchall()] async def total_service_runs_with_running_status_across_all_products( self, @@ -620,7 +622,7 @@ async def update_service_missed_heartbeat_counter( row = result.first() if row is None: return None - return ServiceRunDB.from_orm(row) + return ServiceRunDB.model_validate(row) ################################# # Credit transactions @@ -847,7 +849,7 @@ def _version(column_or_value): result = await conn.execute(query) return [ - PricingPlansWithServiceDefaultPlanDB.from_orm(row) + PricingPlansWithServiceDefaultPlanDB.model_validate(row) for row in result.fetchall() ] @@ -873,7 +875,7 @@ async def get_pricing_plan( raise CustomResourceUsageTrackerError( msg=f"Pricing plan does not exists: {pricing_plan_id}" ) - return PricingPlansDB.from_orm(row) + return PricingPlansDB.model_validate(row) async def list_pricing_plans_by_product( self, product_name: ProductName @@ -890,7 +892,7 @@ async def list_pricing_plans_by_product( ).where(resource_tracker_pricing_plans.c.product_name == product_name) result = await conn.execute(select_stmt) - return [PricingPlansDB.from_orm(row) for row in result.fetchall()] + return [PricingPlansDB.model_validate(row) for row in result.fetchall()] async def create_pricing_plan(self, data: PricingPlanCreate) -> PricingPlansDB: async with self.db_engine.begin() as conn: @@ -924,7 +926,7 @@ async def create_pricing_plan(self, data: PricingPlanCreate) -> PricingPlansDB: raise CustomResourceUsageTrackerError( msg=f"Pricing plan was not created: {data}" ) - return PricingPlansDB.from_orm(row) + return PricingPlansDB.model_validate(row) async def update_pricing_plan( self, product_name: ProductName, data: PricingPlanUpdate @@ -961,7 +963,7 @@ async def update_pricing_plan( row = result.first() if row is None: return None - return PricingPlansDB.from_orm(row) + return PricingPlansDB.model_validate(row) ################################# # Pricing plan to service @@ -1000,7 +1002,9 @@ async def list_connected_services_to_pricing_plan_by_pricing_plan( ) result = await conn.execute(query) - return [PricingPlanToServiceDB.from_orm(row) for row in result.fetchall()] + return [ + PricingPlanToServiceDB.model_validate(row) for row in result.fetchall() + ] async def upsert_service_to_pricing_plan( self, @@ -1087,7 +1091,7 @@ async def upsert_service_to_pricing_plan( raise CustomResourceUsageTrackerError( msg="Pricing plan to service record was not created" ) - return PricingPlanToServiceDB.from_orm(row) + return PricingPlanToServiceDB.model_validate(row) ################################# # Pricing units @@ -1145,7 +1149,7 @@ async def list_pricing_units_by_pricing_plan( ) result = await conn.execute(query) - return [PricingUnitsDB.from_orm(row) for row in result.fetchall()] + return [PricingUnitsDB.model_validate(row) for row in result.fetchall()] async def get_valid_pricing_unit( self, @@ -1197,7 +1201,7 @@ async def get_valid_pricing_unit( raise CustomResourceUsageTrackerError( msg=f"Pricing plan {pricing_plan_id} and pricing unit {pricing_unit_id} for product {product_name} not found" ) - return PricingUnitsDB.from_orm(row) + return PricingUnitsDB.model_validate(row) async def create_pricing_unit_with_cost( self, data: PricingUnitWithCostCreate, pricing_plan_key: str @@ -1209,9 +1213,9 @@ async def create_pricing_unit_with_cost( .values( pricing_plan_id=data.pricing_plan_id, unit_name=data.unit_name, - unit_extra_info=data.unit_extra_info.dict(), + unit_extra_info=data.unit_extra_info.model_dump(), default=data.default, - specific_info=data.specific_info.dict(), + specific_info=data.specific_info.model_dump(), created=sa.func.now(), modified=sa.func.now(), ) @@ -1261,9 +1265,9 @@ async def update_pricing_unit_with_cost( resource_tracker_pricing_units.update() .values( unit_name=data.unit_name, - unit_extra_info=data.unit_extra_info.dict(), + unit_extra_info=data.unit_extra_info.model_dump(), default=data.default, - specific_info=data.specific_info.dict(), + specific_info=data.specific_info.model_dump(), modified=sa.func.now(), ) .where( @@ -1348,4 +1352,4 @@ async def get_pricing_unit_cost_by_id( raise CustomResourceUsageTrackerError( msg=f"Pricing unit cost id {pricing_unit_cost_id} not found in the resource_tracker_pricing_unit_costs table", ) - return PricingUnitCostsDB.from_orm(row) + return PricingUnitCostsDB.model_validate(row) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/s3.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/s3.py index 889b8cfcd1cb..285d08ef58e6 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/s3.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/s3.py @@ -4,6 +4,7 @@ from aws_library.s3 import S3NotConnectedError, SimcoreS3API from fastapi import FastAPI from models_library.api_schemas_storage import S3BucketName +from pydantic import TypeAdapter from settings_library.s3 import S3Settings from tenacity import ( AsyncRetrying, @@ -36,7 +37,9 @@ async def on_startup() -> None: ): with attempt: connected = await client.http_check_bucket_connected( - bucket=S3BucketName(settings.S3_BUCKET_NAME) + bucket=TypeAdapter(S3BucketName).validate_python( + settings.S3_BUCKET_NAME + ) ) if not connected: raise S3NotConnectedError # pragma: no cover diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/resource_tracker_process_messages.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/resource_tracker_process_messages.py index 4cb024b3b7c3..7c925244f8d1 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/resource_tracker_process_messages.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/resource_tracker_process_messages.py @@ -20,7 +20,7 @@ ServiceRunStatus, ) from models_library.services import ServiceType -from pydantic import parse_raw_as +from pydantic import TypeAdapter from .models.resource_tracker_credit_transactions import ( CreditTransactionCreate, @@ -45,9 +45,9 @@ async def process_message(app: FastAPI, data: bytes) -> bool: - rabbit_message: RabbitResourceTrackingMessages = parse_raw_as( - RabbitResourceTrackingMessages, data # type: ignore[arg-type] - ) + rabbit_message: RabbitResourceTrackingMessages = TypeAdapter( + RabbitResourceTrackingMessages + ).validate_json(data) _logger.info( "Process %s msg service_run_id: %s", rabbit_message.message_type, diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_pricing_plans.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_pricing_plans.py index d37f244dbc91..fe80ea2b443b 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_pricing_plans.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_pricing_plans.py @@ -13,6 +13,7 @@ PricingPlanUpdate, ) from models_library.services import ServiceKey, ServiceVersion +from pydantic import TypeAdapter from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker.errors import ( CustomResourceUsageTrackerError, ) @@ -96,7 +97,10 @@ async def list_connected_services_to_pricing_plan_by_pricing_plan( ] = await resource_tracker_repo.list_connected_services_to_pricing_plan_by_pricing_plan( product_name=product_name, pricing_plan_id=pricing_plan_id ) - return [PricingPlanToServiceGet.parse_obj(item) for item in output_list] + return [ + TypeAdapter(PricingPlanToServiceGet).validate_python(item.model_dump()) + for item in output_list + ] async def connect_service_to_pricing_plan( @@ -116,7 +120,7 @@ async def connect_service_to_pricing_plan( service_version=service_version, ) ) - return PricingPlanToServiceGet.parse_obj(output) + return TypeAdapter(PricingPlanToServiceGet).validate_python(output.model_dump()) async def list_pricing_plans_by_product( diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py index 3f2167e3d77b..a02f4ecd6460 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py @@ -18,7 +18,7 @@ from models_library.rest_ordering import OrderBy from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import AnyUrl, PositiveInt +from pydantic import PositiveInt, TypeAdapter from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker.errors import ( CustomResourceUsageTrackerError, ) @@ -153,12 +153,12 @@ async def export_service_runs( access_all_wallet_usage: bool = False, order_by: OrderBy | None = None, filters: ServiceResourceUsagesFilters | None = None, -) -> AnyUrl: +) -> str: started_from = filters.started_at.from_ if filters else None started_until = filters.started_at.until if filters else None # Create S3 key name - s3_bucket_name = S3BucketName(bucket_name) + s3_bucket_name = TypeAdapter(S3BucketName).validate_python(bucket_name) # NOTE: su stands for "service usage" file_name = f"su_{shortuuid.uuid()}.csv" s3_object_key = f"resource-usage-tracker-service-runs/{datetime.now(tz=timezone.utc).date()}/{file_name}" @@ -177,7 +177,7 @@ async def export_service_runs( ) # Create presigned S3 link - generated_url: AnyUrl = await s3_client.create_single_presigned_download_link( + generated_url: str = await s3_client.create_single_presigned_download_link( bucket=s3_bucket_name, object_key=s3_object_key, expiration_secs=_PRESIGNED_LINK_EXPIRATION_SEC, diff --git a/services/resource-usage-tracker/tests/unit/api_rest/test_api_meta.py b/services/resource-usage-tracker/tests/unit/api_rest/test_api_meta.py index cffb606fae54..569ddd28db15 100644 --- a/services/resource-usage-tracker/tests/unit/api_rest/test_api_meta.py +++ b/services/resource-usage-tracker/tests/unit/api_rest/test_api_meta.py @@ -68,7 +68,7 @@ def test_meta( ): response = client.get(f"/{API_VTAG}/meta") assert response.status_code == status.HTTP_200_OK - meta = _Meta.parse_obj(response.json()) + meta = _Meta.model_validate(response.json()) - response = client.get(meta.docs_url) + response = client.get(f"{meta.docs_url}") assert response.status_code == status.HTTP_200_OK diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/conftest.py b/services/resource-usage-tracker/tests/unit/with_dbs/conftest.py index 6f240d658eef..28ea92020f86 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/conftest.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/conftest.py @@ -19,6 +19,7 @@ RabbitResourceTrackingMessageType, RabbitResourceTrackingStartedMessage, ) +from pydantic import TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.rabbitmq import RabbitMQRPCClient @@ -176,7 +177,7 @@ async def assert_service_runs_db_row( ) row = result.first() assert row - service_run_db = ServiceRunDB.from_orm(row) + service_run_db = ServiceRunDB.model_validate(row) if status: assert service_run_db.service_run_status == status return service_run_db @@ -201,7 +202,7 @@ async def assert_credit_transactions_db_row( ) row = result.first() assert row - credit_transaction_db = CreditTransactionDB.from_orm(row) + credit_transaction_db = CreditTransactionDB.model_validate(row) if modified_at: assert credit_transaction_db.modified > modified_at return credit_transaction_db @@ -215,7 +216,9 @@ def random_rabbit_message_heartbeat( def _creator(**kwargs: dict[str, Any]) -> RabbitResourceTrackingHeartbeatMessage: msg_config = {"service_run_id": faker.uuid4(), **kwargs} - return RabbitResourceTrackingHeartbeatMessage(**msg_config) + return TypeAdapter(RabbitResourceTrackingHeartbeatMessage).validate_python( + msg_config + ) return _creator @@ -265,7 +268,9 @@ def _creator(**kwargs: dict[str, Any]) -> RabbitResourceTrackingStartedMessage: **kwargs, } - return RabbitResourceTrackingStartedMessage(**msg_config) + return TypeAdapter(RabbitResourceTrackingStartedMessage).validate_python( + msg_config + ) return _creator diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_credit_transactions.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_credit_transactions.py index 1e7098cecda2..244a74c62d74 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_credit_transactions.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_credit_transactions.py @@ -56,7 +56,7 @@ async def test_credit_transactions_workflow( ) assert response.status_code == status.HTTP_201_CREATED data = response.json() - data["credit_transaction_id"] == 1 + assert data["credit_transaction_id"] == 1 response = await async_client.post( url=f"{url}", @@ -73,7 +73,7 @@ async def test_credit_transactions_workflow( ) assert response.status_code == status.HTTP_201_CREATED data = response.json() - data["credit_transaction_id"] == 2 + assert data["credit_transaction_id"] == 2 response = await async_client.post( url=f"{url}", diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans.py index 609b0ebd54f0..8aea2c291bf6 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans.py @@ -76,7 +76,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=_PRICING_PLAN_ID, unit_name="S", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=False, specific_info={}, created=datetime.now(tz=timezone.utc), @@ -101,7 +103,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=_PRICING_PLAN_ID, unit_name="M", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=True, specific_info={}, created=datetime.now(tz=timezone.utc), @@ -126,7 +130,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=_PRICING_PLAN_ID, unit_name="L", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=False, specific_info={}, created=datetime.now(tz=timezone.utc), @@ -165,7 +171,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=_PRICING_PLAN_ID_2, unit_name="XXL", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=True, specific_info={}, created=datetime.now(tz=timezone.utc), diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans_rpc.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans_rpc.py index 4ec8d45bb72d..721a17e05c75 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans_rpc.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans_rpc.py @@ -195,7 +195,9 @@ async def test_rpc_pricing_plans_with_units_workflow( data=PricingUnitWithCostCreate( pricing_plan_id=_pricing_plan_id, unit_name="SMALL", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"]["examples"][ + 0 + ], default=True, specific_info=SpecificInfo(aws_ec2_instances=[]), cost_per_unit=Decimal(10), @@ -227,7 +229,9 @@ async def test_rpc_pricing_plans_with_units_workflow( pricing_plan_id=_pricing_plan_id, pricing_unit_id=_first_pricing_unit_id, unit_name=_unit_name, - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"]["examples"][ + 0 + ], default=True, specific_info=SpecificInfo(aws_ec2_instances=[]), pricing_unit_cost_update=None, @@ -246,7 +250,9 @@ async def test_rpc_pricing_plans_with_units_workflow( pricing_plan_id=_pricing_plan_id, pricing_unit_id=_first_pricing_unit_id, unit_name="MEDIUM", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"]["examples"][ + 0 + ], default=True, specific_info=SpecificInfo(aws_ec2_instances=[]), pricing_unit_cost_update=PricingUnitCostUpdate( @@ -277,7 +283,9 @@ async def test_rpc_pricing_plans_with_units_workflow( data=PricingUnitWithCostCreate( pricing_plan_id=_pricing_plan_id, unit_name="LARGE", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"]["examples"][ + 0 + ], default=False, specific_info=SpecificInfo(aws_ec2_instances=[]), cost_per_unit=Decimal(20), diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_service_runs__export.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_service_runs__export.py index c53c1accb90f..e433d8c13e61 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_service_runs__export.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_service_runs__export.py @@ -4,7 +4,7 @@ import pytest import sqlalchemy as sa from moto.server import ThreadedMotoServer -from pydantic import AnyUrl, parse_obj_as +from pydantic import AnyUrl, TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.rabbitmq import RabbitMQRPCClient @@ -37,8 +37,7 @@ async def mocked_export(mocker: MockerFixture): async def mocked_presigned_link(mocker: MockerFixture): mock_presigned_link = mocker.patch( "simcore_service_resource_usage_tracker.services.resource_tracker_service_runs.SimcoreS3API.create_single_presigned_download_link", - return_value=parse_obj_as( - AnyUrl, + return_value=TypeAdapter(AnyUrl).validate_python( "https://www.testing.com/", ), ) diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_background_task.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_background_task.py index 2b719326bc94..3bf73f32f6d4 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_background_task.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_background_task.py @@ -152,7 +152,7 @@ async def test_process_event_functions( # Check max acceptable missed heartbeats reached before considering them as unhealthy with postgres_db.connect() as con: result = con.execute(sa.select(resource_tracker_service_runs)) - service_run_db = [ServiceRunDB.from_orm(row) for row in result] + service_run_db = [ServiceRunDB.model_validate(row) for row in result] for service_run in service_run_db: if service_run.service_run_id in ( _SERVICE_RUN_ID_OSPARC_10_MIN_OLD, @@ -172,7 +172,7 @@ async def test_process_event_functions( with postgres_db.connect() as con: result = con.execute(sa.select(resource_tracker_service_runs)) - service_run_db = [ServiceRunDB.from_orm(row) for row in result] + service_run_db = [ServiceRunDB.model_validate(row) for row in result] for service_run in service_run_db: if service_run.service_run_id in ( _SERVICE_RUN_ID_OSPARC_10_MIN_OLD, @@ -186,7 +186,9 @@ async def test_process_event_functions( with postgres_db.connect() as con: result = con.execute(sa.select(resource_tracker_credit_transactions)) - credit_transaction_db = [CreditTransactionDB.from_orm(row) for row in result] + credit_transaction_db = [ + CreditTransactionDB.model_validate(row) for row in result + ] for transaction in credit_transaction_db: if transaction.service_run_id in ( _SERVICE_RUN_ID_OSPARC_10_MIN_OLD, diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py index 7a5e2114c1d8..6b0048edf615 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py @@ -58,7 +58,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="S", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=False, specific_info={}, created=datetime.now(tz=timezone.utc), @@ -83,7 +85,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="M", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=True, specific_info={}, created=datetime.now(tz=timezone.utc), @@ -108,7 +112,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="L", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=False, specific_info={}, created=datetime.now(tz=timezone.utc), diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py index 4b6c1a0dfac7..0520a797af80 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py @@ -72,7 +72,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="S", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=False, specific_info={}, created=datetime.now(tz=timezone.utc), @@ -97,7 +99,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="M", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=True, specific_info={}, created=datetime.now(tz=timezone.utc), @@ -122,7 +126,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="L", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=False, specific_info={}, created=datetime.now(tz=timezone.utc), diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py index c1d62af5b23f..5e7f9d323b5e 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py @@ -68,7 +68,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="S", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=False, specific_info={}, created=datetime.now(tz=timezone.utc), diff --git a/services/storage/requirements/_base.in b/services/storage/requirements/_base.in index 2db400167242..26ba2b4b8357 100644 --- a/services/storage/requirements/_base.in +++ b/services/storage/requirements/_base.in @@ -6,6 +6,7 @@ --requirement ../../../packages/aws-library/requirements/_base.in +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in diff --git a/services/storage/requirements/ci.txt b/services/storage/requirements/ci.txt index 3c4b32c1e032..26d5d78bff96 100644 --- a/services/storage/requirements/ci.txt +++ b/services/storage/requirements/ci.txt @@ -13,6 +13,7 @@ # installs this repo's packages simcore-aws-library @ ../../packages/aws-library/ +simcore-common-library @ ../../packages/common-library/ simcore-models-library @ ../../packages/models-library/ simcore-postgres-database @ ../../packages/postgres-database/ pytest-simcore @ ../../packages/pytest-simcore/ diff --git a/services/storage/requirements/dev.txt b/services/storage/requirements/dev.txt index 0b2b3ae29389..97aefedee51d 100644 --- a/services/storage/requirements/dev.txt +++ b/services/storage/requirements/dev.txt @@ -13,6 +13,7 @@ # installs this repo's packages --editable ../../packages/aws-library/ +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/postgres-database/ --editable ../../packages/pytest-simcore/ diff --git a/services/storage/src/simcore_service_storage/handlers_health.py b/services/storage/src/simcore_service_storage/handlers_health.py index 4ae743afef95..eb94feb9bb41 100644 --- a/services/storage/src/simcore_service_storage/handlers_health.py +++ b/services/storage/src/simcore_service_storage/handlers_health.py @@ -11,6 +11,7 @@ from models_library.api_schemas_storage import HealthCheck, S3BucketName from models_library.app_diagnostics import AppStatusCheck from models_library.utils.json_serialization import json_dumps +from pydantic import TypeAdapter from servicelib.rest_constants import RESPONSE_MODEL_POLICY from ._meta import API_VERSION, API_VTAG, PROJECT_NAME, VERSION @@ -53,7 +54,9 @@ async def get_status(request: web.Request) -> web.Response: s3_state = ( "connected" if await get_s3_client(request.app).bucket_exists( - bucket=S3BucketName(app_settings.STORAGE_S3.S3_BUCKET_NAME) + bucket=TypeAdapter(S3BucketName).validate_python( + app_settings.STORAGE_S3.S3_BUCKET_NAME + ) ) else "no access to S3 bucket" ) diff --git a/services/storage/tests/unit/test_dsm_soft_links.py b/services/storage/tests/unit/test_dsm_soft_links.py index d2d1c6acd65e..ed0e01ea7f0c 100644 --- a/services/storage/tests/unit/test_dsm_soft_links.py +++ b/services/storage/tests/unit/test_dsm_soft_links.py @@ -13,7 +13,7 @@ from models_library.projects_nodes_io import SimcoreS3FileID from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import ByteSize +from pydantic import ByteSize, TypeAdapter from simcore_postgres_database.storage_models import file_meta_data from simcore_service_storage.models import FileMetaData, FileMetaDataAtDB from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager @@ -34,7 +34,7 @@ async def output_file( file = FileMetaData.from_simcore_node( user_id=user_id, file_id=SimcoreS3FileID(f"{project_id}/{node_id}/filename.txt"), - bucket=S3BucketName("master-simcore"), + bucket=TypeAdapter(S3BucketName).validate_python("master-simcore"), location_id=SimcoreS3DataManager.get_location_id(), location_name=SimcoreS3DataManager.get_location_name(), sha256_checksum=faker.sha256(), diff --git a/services/storage/tests/unit/test_models.py b/services/storage/tests/unit/test_models.py index 82bd900b772b..0dbab6821d24 100644 --- a/services/storage/tests/unit/test_models.py +++ b/services/storage/tests/unit/test_models.py @@ -4,7 +4,7 @@ from models_library.api_schemas_storage import S3BucketName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimcoreS3FileID, StorageFileID -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError from simcore_service_storage.models import FileMetaData from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager @@ -15,7 +15,7 @@ ) def test_file_id_raises_error(file_id: str): with pytest.raises(ValidationError): - parse_obj_as(StorageFileID, file_id) + TypeAdapter(StorageFileID).validate_python(file_id) @pytest.mark.parametrize( @@ -38,17 +38,17 @@ def test_file_id_raises_error(file_id: str): ], ) def test_file_id(file_id: str): - parsed_file_id = parse_obj_as(StorageFileID, file_id) + parsed_file_id = TypeAdapter(StorageFileID).validate_python(file_id) assert parsed_file_id assert parsed_file_id == file_id def test_fmd_build(): - file_id = parse_obj_as(SimcoreS3FileID, f"api/{uuid.uuid4()}/xx.dat") + file_id = TypeAdapter(SimcoreS3FileID).validate_python(f"api/{uuid.uuid4()}/xx.dat") fmd = FileMetaData.from_simcore_node( user_id=12, file_id=file_id, - bucket=S3BucketName("test-bucket"), + bucket=TypeAdapter(S3BucketName).validate_python("test-bucket"), location_id=SimcoreS3DataManager.get_location_id(), location_name=SimcoreS3DataManager.get_location_name(), sha256_checksum=None, @@ -64,11 +64,13 @@ def test_fmd_build(): assert fmd.location_id == SimcoreS3DataManager.get_location_id() assert fmd.bucket_name == "test-bucket" - file_id = parse_obj_as(SimcoreS3FileID, f"{uuid.uuid4()}/{uuid.uuid4()}/xx.dat") + file_id = TypeAdapter(SimcoreS3FileID).validate_python( + f"{uuid.uuid4()}/{uuid.uuid4()}/xx.dat" + ) fmd = FileMetaData.from_simcore_node( user_id=12, file_id=file_id, - bucket=S3BucketName("test-bucket"), + bucket=TypeAdapter(S3BucketName).validate_python("test-bucket"), location_id=SimcoreS3DataManager.get_location_id(), location_name=SimcoreS3DataManager.get_location_name(), sha256_checksum=None, diff --git a/services/storage/tests/unit/test_utils.py b/services/storage/tests/unit/test_utils.py index 1b71a1c29d58..90dca22d42dd 100644 --- a/services/storage/tests/unit/test_utils.py +++ b/services/storage/tests/unit/test_utils.py @@ -16,7 +16,7 @@ from faker import Faker from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimcoreS3FileID -from pydantic import ByteSize, HttpUrl, parse_obj_as +from pydantic import ByteSize, HttpUrl, TypeAdapter, parse_obj_as from simcore_service_storage.constants import S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID from simcore_service_storage.models import ETag, FileMetaData, S3BucketName, UploadID from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager @@ -79,7 +79,7 @@ def test_file_entry_valid( fmd = FileMetaData.from_simcore_node( user_id=faker.pyint(min_value=1), file_id=file_id, - bucket=S3BucketName("pytest-bucket"), + bucket=TypeAdapter(S3BucketName).validate_python("pytest-bucket"), location_id=SimcoreS3DataManager.get_location_id(), location_name=SimcoreS3DataManager.get_location_name(), sha256_checksum=None, diff --git a/services/web/server/src/simcore_service_webserver/projects/settings.py b/services/web/server/src/simcore_service_webserver/projects/settings.py index 727dcb51b356..8a46b8def4fb 100644 --- a/services/web/server/src/simcore_service_webserver/projects/settings.py +++ b/services/web/server/src/simcore_service_webserver/projects/settings.py @@ -1,6 +1,7 @@ from datetime import timedelta from aiohttp import web +from common_library.pydantic_validators import validate_numeric_string_as_timedelta from pydantic import ByteSize, Field, NonNegativeInt, parse_obj_as from settings_library.base import BaseCustomSettings @@ -23,6 +24,10 @@ class ProjectsSettings(BaseCustomSettings): description="interval after which services need to be idle in order to be considered inactive", ) + _validate_projects_inactivity_interval = validate_numeric_string_as_timedelta( + "PROJECTS_INACTIVITY_INTERVAL" + ) + def get_plugin_settings(app: web.Application) -> ProjectsSettings: settings = app[APP_SETTINGS_KEY].WEBSERVER_PROJECTS diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/settings.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/settings.py index 3ef317631ed0..a79c4865f121 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/settings.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/settings.py @@ -2,6 +2,7 @@ from typing import Any, ClassVar from aiohttp import web +from common_library.pydantic_validators import validate_numeric_string_as_timedelta from pydantic import ByteSize, HttpUrl, parse_obj_as, validator from pydantic.fields import Field from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY @@ -50,6 +51,10 @@ def is_login_required(self): """ return not self.STUDIES_ACCESS_ANONYMOUS_ALLOWED + _validate_studies_guest_account_lifetime = validate_numeric_string_as_timedelta( + "STUDIES_GUEST_ACCOUNT_LIFETIME" + ) + class Config: schema_extra: ClassVar[dict[str, Any]] = { "example": { diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py index d81dbbdb352c..4c8b41e48611 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py @@ -28,7 +28,7 @@ ) from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle from models_library.projects import ProjectID -from models_library.projects_access import Owner, PositiveIntWithExclusiveMinimumRemoved +from models_library.projects_access import Owner from models_library.projects_state import ( ProjectLocked, ProjectRunningState, @@ -42,6 +42,7 @@ ServiceResourcesDictHelpers, ) from models_library.utils.fastapi_encoders import jsonable_encoder +from pydantic import PositiveInt from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.typing_env import EnvVarsDict from pytest_simcore.helpers.webserver_login import UserInfoDict, log_client_in @@ -1310,7 +1311,7 @@ async def test_open_shared_project_2_users_locked( expected_project_state_client_2.locked.value = True expected_project_state_client_2.locked.status = ProjectStatus.OPENED owner2 = Owner( - user_id=PositiveIntWithExclusiveMinimumRemoved(user_2["id"]), + user_id=PositiveInt(user_2["id"]), first_name=user_2.get("first_name", None), last_name=user_2.get("last_name", None), ) diff --git a/tests/environment-setup/requirements/requirements.txt b/tests/environment-setup/requirements/requirements.txt index bc2672270a70..9703160dd63e 100644 --- a/tests/environment-setup/requirements/requirements.txt +++ b/tests/environment-setup/requirements/requirements.txt @@ -1,3 +1,5 @@ +annotated-types==0.7.0 + # via pydantic iniconfig==2.0.0 # via pytest packaging==24.1 @@ -6,16 +8,20 @@ packaging==24.1 # pytest-sugar pluggy==1.5.0 # via pytest -pydantic==1.10.18 +pydantic==2.9.2 # via + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../requirements/constraints.txt # -r requirements/requirements.in +pydantic-core==2.23.4 + # via pydantic pytest==8.3.3 # via # -r requirements/requirements.in @@ -24,9 +30,13 @@ pytest==8.3.3 # pytest-sugar pytest-asyncio==0.23.8 # via + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/requirements.in @@ -38,13 +48,19 @@ pytest-sugar==1.0.0 # via -r requirements/requirements.in pyyaml==6.0.2 # via + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/requirements.in termcolor==2.4.0 # via pytest-sugar typing-extensions==4.12.2 - # via pydantic + # via + # pydantic + # pydantic-core diff --git a/tests/public-api/requirements/_base.txt b/tests/public-api/requirements/_base.txt index ef23afb0200c..836be727d0ca 100644 --- a/tests/public-api/requirements/_base.txt +++ b/tests/public-api/requirements/_base.txt @@ -1,7 +1,10 @@ +annotated-types==0.7.0 + # via pydantic anyio==4.6.0 # via httpx certifi==2024.8.30 # via + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # httpcore # httpx @@ -14,6 +17,7 @@ httpcore==1.0.5 # via httpx httpx==0.27.2 # via + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # osparc idna==3.10 @@ -32,14 +36,23 @@ osparc-client==0.6.6 # via osparc packaging==24.1 # via osparc -pydantic==1.10.18 +pydantic==2.9.2 # via + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-settings==2.5.2 + # via -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich python-dateutil==2.9.0.post0 # via osparc-client +python-dotenv==1.0.1 + # via pydantic-settings rich==13.8.1 # via # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -63,6 +76,7 @@ typer==0.12.5 typing-extensions==4.12.2 # via # pydantic + # pydantic-core # typer urllib3==2.2.3 # via diff --git a/tests/swarm-deploy/requirements/_test.txt b/tests/swarm-deploy/requirements/_test.txt index a468b66afafd..467c300de839 100644 --- a/tests/swarm-deploy/requirements/_test.txt +++ b/tests/swarm-deploy/requirements/_test.txt @@ -26,16 +26,28 @@ aiohappyeyeballs==2.4.0 # via aiohttp aiohttp==3.10.5 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -52,6 +64,8 @@ alembic==1.13.3 # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/_test.in +annotated-types==0.7.0 + # via pydantic anyio==4.6.0 # via # fast-depends @@ -79,16 +93,28 @@ attrs==24.2.0 # referencing certifi==2024.8.30 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -163,16 +189,28 @@ jsonschema-specifications==2023.7.1 # via jsonschema mako==1.3.5 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -237,16 +275,28 @@ opentelemetry-util-http==0.48b0 # via opentelemetry-instrumentation-requests orjson==3.10.7 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -277,34 +327,78 @@ psycopg2-binary==2.9.9 # via # aiopg # sqlalchemy -pydantic==1.10.18 +pydantic==2.9.2 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # fast-depends + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.7.3 @@ -320,16 +414,28 @@ pytest==8.3.3 # pytest-sugar pytest-asyncio==0.23.8 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -345,19 +451,33 @@ pytest-sugar==1.0.0 python-dateutil==2.9.0.post0 # via arrow python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -r requirements/_test.in + # pydantic-settings pyyaml==6.0.2 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -366,16 +486,28 @@ pyyaml==6.0.2 # -r requirements/_test.in redis==5.0.4 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -417,16 +549,28 @@ sniffio==1.3.1 # via anyio sqlalchemy==1.4.54 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -471,19 +615,32 @@ typing-extensions==4.12.2 # opentelemetry-sdk # pint # pydantic + # pydantic-core # typer urllib3==2.2.3 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt