Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions packages/aws-library/src/aws_library/s3/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from boto3.s3.transfer import TransferConfig
from botocore import exceptions as botocore_exc
from botocore.client import Config
from common_library.pydantic_type_adapters import AnyUrlLegacyAdapter
from common_library.pydantic_networks_extension import AnyUrlLegacy
from models_library.api_schemas_storage import ETag, S3BucketName, UploadedPart
from models_library.basic_types import SHA256Str
from pydantic import AnyUrl, ByteSize, TypeAdapter
Expand Down Expand Up @@ -264,7 +264,7 @@ async def create_single_presigned_download_link(
Params={"Bucket": bucket, "Key": object_key},
ExpiresIn=expiration_secs,
)
return f"{AnyUrlLegacyAdapter.validate_python(generated_link)}"
return f"{TypeAdapter(AnyUrlLegacy).validate_python(generated_link)}"

@s3_exception_handler(_logger)
async def create_single_presigned_upload_link(
Expand All @@ -277,7 +277,7 @@ async def create_single_presigned_upload_link(
Params={"Bucket": bucket, "Key": object_key},
ExpiresIn=expiration_secs,
)
return f"{AnyUrlLegacyAdapter.validate_python(generated_link)}"
return f"{TypeAdapter(AnyUrlLegacy).validate_python(generated_link)}"

@s3_exception_handler(_logger)
async def create_multipart_upload_links(
Expand Down Expand Up @@ -474,6 +474,6 @@ def is_multipart(file_size: ByteSize) -> bool:

@staticmethod
def compute_s3_url(*, bucket: S3BucketName, object_key: S3ObjectKey) -> AnyUrl:
return AnyUrlLegacyAdapter.validate_python(
return TypeAdapter(AnyUrlLegacy).validate_python(
f"s3://{bucket}/{urllib.parse.quote(object_key)}"
)
58 changes: 28 additions & 30 deletions packages/aws-library/tests/test_s3_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from collections.abc import AsyncIterator, Awaitable, Callable
from dataclasses import dataclass
from pathlib import Path
from typing import Any, Final
from typing import Any

import botocore.exceptions
import pytest
Expand Down Expand Up @@ -49,8 +49,6 @@
from types_aiobotocore_s3 import S3Client
from types_aiobotocore_s3.literals import BucketLocationConstraintType

_BYTE_SIZE_ADAPTER: Final[TypeAdapter[ByteSize]] = TypeAdapter(ByteSize)


@pytest.fixture
async def simcore_s3_api(
Expand Down Expand Up @@ -113,7 +111,7 @@ async def _(
file,
MultiPartUploadLinks(
upload_id="fake",
chunk_size=_BYTE_SIZE_ADAPTER.validate_python(file.stat().st_size),
chunk_size=TypeAdapter(ByteSize).validate_python(file.stat().st_size),
urls=[presigned_url],
),
)
Expand All @@ -137,7 +135,7 @@ async def with_uploaded_file_on_s3(
s3_client: S3Client,
with_s3_bucket: S3BucketName,
) -> AsyncIterator[UploadedFile]:
test_file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("10Kib"))
test_file = create_file_of_size(TypeAdapter(ByteSize).validate_python("10Kib"))
await s3_client.upload_file(
Filename=f"{test_file}",
Bucket=with_s3_bucket,
Expand Down Expand Up @@ -592,7 +590,7 @@ async def test_undelete_file(
assert file_metadata.size == with_uploaded_file_on_s3.local_path.stat().st_size

# upload another file on top of the existing one
new_file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("5Kib"))
new_file = create_file_of_size(TypeAdapter(ByteSize).validate_python("5Kib"))
await s3_client.upload_file(
Filename=f"{new_file}",
Bucket=with_s3_bucket,
Expand Down Expand Up @@ -747,7 +745,7 @@ async def test_create_single_presigned_upload_link(
[Path, str, S3BucketName, S3ObjectKey], Awaitable[None]
],
):
file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("1Mib"))
file = create_file_of_size(TypeAdapter(ByteSize).validate_python("1Mib"))
s3_object_key = file.name
presigned_url = await simcore_s3_api.create_single_presigned_upload_link(
bucket=with_s3_bucket,
Expand Down Expand Up @@ -775,7 +773,7 @@ async def test_create_single_presigned_upload_link_with_non_existing_bucket_rais
create_file_of_size: Callable[[ByteSize], Path],
default_expiration_time_seconds: int,
):
file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("1Mib"))
file = create_file_of_size(TypeAdapter(ByteSize).validate_python("1Mib"))
s3_object_key = file.name
with pytest.raises(S3BucketInvalidError):
await simcore_s3_api.create_single_presigned_upload_link(
Expand Down Expand Up @@ -1082,7 +1080,7 @@ async def test_copy_file_invalid_raises(
create_file_of_size: Callable[[ByteSize], Path],
faker: Faker,
):
file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("1MiB"))
file = create_file_of_size(TypeAdapter(ByteSize).validate_python("1MiB"))
uploaded_file = await upload_file(file)
dst_object_key = faker.file_name()
# NOTE: since aioboto3 13.1.0 this raises S3KeyNotFoundError instead of S3BucketInvalidError
Expand All @@ -1107,9 +1105,9 @@ async def test_copy_file_invalid_raises(
"directory_size, min_file_size, max_file_size",
[
(
_BYTE_SIZE_ADAPTER.validate_python("1Mib"),
_BYTE_SIZE_ADAPTER.validate_python("1B"),
_BYTE_SIZE_ADAPTER.validate_python("10Kib"),
TypeAdapter(ByteSize).validate_python("1Mib"),
TypeAdapter(ByteSize).validate_python("1B"),
TypeAdapter(ByteSize).validate_python("10Kib"),
)
],
ids=byte_size_ids,
Expand All @@ -1133,9 +1131,9 @@ async def test_get_directory_metadata(
"directory_size, min_file_size, max_file_size",
[
(
_BYTE_SIZE_ADAPTER.validate_python("1Mib"),
_BYTE_SIZE_ADAPTER.validate_python("1B"),
_BYTE_SIZE_ADAPTER.validate_python("10Kib"),
TypeAdapter(ByteSize).validate_python("1Mib"),
TypeAdapter(ByteSize).validate_python("1B"),
TypeAdapter(ByteSize).validate_python("10Kib"),
)
],
ids=byte_size_ids,
Expand Down Expand Up @@ -1165,9 +1163,9 @@ async def test_get_directory_metadata_raises(
"directory_size, min_file_size, max_file_size",
[
(
_BYTE_SIZE_ADAPTER.validate_python("1Mib"),
_BYTE_SIZE_ADAPTER.validate_python("1B"),
_BYTE_SIZE_ADAPTER.validate_python("10Kib"),
TypeAdapter(ByteSize).validate_python("1Mib"),
TypeAdapter(ByteSize).validate_python("1B"),
TypeAdapter(ByteSize).validate_python("10Kib"),
)
],
ids=byte_size_ids,
Expand Down Expand Up @@ -1201,9 +1199,9 @@ async def test_delete_file_recursively(
"directory_size, min_file_size, max_file_size",
[
(
_BYTE_SIZE_ADAPTER.validate_python("1Mib"),
_BYTE_SIZE_ADAPTER.validate_python("1B"),
_BYTE_SIZE_ADAPTER.validate_python("10Kib"),
TypeAdapter(ByteSize).validate_python("1Mib"),
TypeAdapter(ByteSize).validate_python("1B"),
TypeAdapter(ByteSize).validate_python("10Kib"),
)
],
ids=byte_size_ids,
Expand Down Expand Up @@ -1239,9 +1237,9 @@ async def test_delete_file_recursively_raises(
"directory_size, min_file_size, max_file_size",
[
(
_BYTE_SIZE_ADAPTER.validate_python("1Mib"),
_BYTE_SIZE_ADAPTER.validate_python("1B"),
_BYTE_SIZE_ADAPTER.validate_python("10Kib"),
TypeAdapter(ByteSize).validate_python("1Mib"),
TypeAdapter(ByteSize).validate_python("1B"),
TypeAdapter(ByteSize).validate_python("10Kib"),
)
],
ids=byte_size_ids,
Expand Down Expand Up @@ -1339,14 +1337,14 @@ def run_async_test(*args, **kwargs) -> None:
"directory_size, min_file_size, max_file_size",
[
(
_BYTE_SIZE_ADAPTER.validate_python("1Mib"),
_BYTE_SIZE_ADAPTER.validate_python("1B"),
_BYTE_SIZE_ADAPTER.validate_python("10Kib"),
TypeAdapter(ByteSize).validate_python("1Mib"),
TypeAdapter(ByteSize).validate_python("1B"),
TypeAdapter(ByteSize).validate_python("10Kib"),
),
(
_BYTE_SIZE_ADAPTER.validate_python("500Mib"),
_BYTE_SIZE_ADAPTER.validate_python("10Mib"),
_BYTE_SIZE_ADAPTER.validate_python("50Mib"),
TypeAdapter(ByteSize).validate_python("500Mib"),
TypeAdapter(ByteSize).validate_python("10Mib"),
TypeAdapter(ByteSize).validate_python("50Mib"),
),
],
ids=byte_size_ids,
Expand Down
46 changes: 21 additions & 25 deletions packages/aws-library/tests/test_s3_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
# pylint: disable=unused-variable


from typing import Final

import pytest
from aws_library.s3._utils import (
_MULTIPART_MAX_NUMBER_OF_PARTS,
Expand All @@ -15,61 +13,59 @@
from pydantic import ByteSize, TypeAdapter
from pytest_simcore.helpers.parametrizations import byte_size_ids

_BYTE_SIZE_ADAPTER: Final[TypeAdapter[ByteSize]] = TypeAdapter(ByteSize)


@pytest.mark.parametrize(
"file_size, expected_num_chunks, expected_chunk_size",
[
(
_BYTE_SIZE_ADAPTER.validate_python("5Mib"),
TypeAdapter(ByteSize).validate_python("5Mib"),
1,
_BYTE_SIZE_ADAPTER.validate_python("10Mib"),
TypeAdapter(ByteSize).validate_python("10Mib"),
),
(
_BYTE_SIZE_ADAPTER.validate_python("10Mib"),
TypeAdapter(ByteSize).validate_python("10Mib"),
1,
_BYTE_SIZE_ADAPTER.validate_python("10Mib"),
TypeAdapter(ByteSize).validate_python("10Mib"),
),
(
_BYTE_SIZE_ADAPTER.validate_python("20Mib"),
TypeAdapter(ByteSize).validate_python("20Mib"),
2,
_BYTE_SIZE_ADAPTER.validate_python("10Mib"),
TypeAdapter(ByteSize).validate_python("10Mib"),
),
(
_BYTE_SIZE_ADAPTER.validate_python("50Mib"),
TypeAdapter(ByteSize).validate_python("50Mib"),
5,
_BYTE_SIZE_ADAPTER.validate_python("10Mib"),
TypeAdapter(ByteSize).validate_python("10Mib"),
),
(
_BYTE_SIZE_ADAPTER.validate_python("150Mib"),
TypeAdapter(ByteSize).validate_python("150Mib"),
15,
_BYTE_SIZE_ADAPTER.validate_python("10Mib"),
TypeAdapter(ByteSize).validate_python("10Mib"),
),
(
_BYTE_SIZE_ADAPTER.validate_python("550Mib"),
TypeAdapter(ByteSize).validate_python("550Mib"),
55,
_BYTE_SIZE_ADAPTER.validate_python("10Mib"),
TypeAdapter(ByteSize).validate_python("10Mib"),
),
(
_BYTE_SIZE_ADAPTER.validate_python("560Gib"),
TypeAdapter(ByteSize).validate_python("560Gib"),
5735,
_BYTE_SIZE_ADAPTER.validate_python("100Mib"),
TypeAdapter(ByteSize).validate_python("100Mib"),
),
(
_BYTE_SIZE_ADAPTER.validate_python("5Tib"),
TypeAdapter(ByteSize).validate_python("5Tib"),
8739,
_BYTE_SIZE_ADAPTER.validate_python("600Mib"),
TypeAdapter(ByteSize).validate_python("600Mib"),
),
(
_BYTE_SIZE_ADAPTER.validate_python("15Tib"),
TypeAdapter(ByteSize).validate_python("15Tib"),
7680,
_BYTE_SIZE_ADAPTER.validate_python("2Gib"),
TypeAdapter(ByteSize).validate_python("2Gib"),
),
(
_BYTE_SIZE_ADAPTER.validate_python("9431773844"),
TypeAdapter(ByteSize).validate_python("9431773844"),
900,
_BYTE_SIZE_ADAPTER.validate_python("10Mib"),
TypeAdapter(ByteSize).validate_python("10Mib"),
),
],
ids=byte_size_ids,
Expand All @@ -83,7 +79,7 @@ def test_compute_num_file_chunks(


def test_enormous_file_size_raises_value_error():
enormous_file_size = _BYTE_SIZE_ADAPTER.validate_python(
enormous_file_size = TypeAdapter(ByteSize).validate_python(
(
max(_MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE)
* _MULTIPART_MAX_NUMBER_OF_PARTS
Expand Down

This file was deleted.

35 changes: 35 additions & 0 deletions packages/common-library/src/common_library/pydantic_validators.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import datetime
import warnings
from datetime import timedelta

from pydantic import TypeAdapter, field_validator


def timedelta_try_convert_str_to_float(field: str):
"""Transforms a float/int number into a valid datetime as it used to work in the past"""

def _try_convert_str_to_float_or_return(
v: datetime.timedelta | str | float,
) -> datetime.timedelta | str | float:
if isinstance(v, str):
try:
converted_value = float(v)

iso8601_format = TypeAdapter(timedelta).dump_python(
timedelta(seconds=converted_value), mode="json"
)
warnings.warn(
f"{field}='{v}' -should be set to-> {field}='{iso8601_format}' (ISO8601 datetime format). "
"Please also convert the value in the >>OPS REPOSITORY<<. "
"For details: https://docs.pydantic.dev/1.10/usage/types/#datetime-types.",
DeprecationWarning,
stacklevel=8,
)

return converted_value
except ValueError:
# returns format like "1:00:00"
return v
return v

return field_validator(field, mode="before")(_try_convert_str_to_float_or_return)
40 changes: 40 additions & 0 deletions packages/common-library/tests/test_pydantic_validators.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
from datetime import timedelta

import pytest
from common_library.pydantic_validators import timedelta_try_convert_str_to_float
from faker import Faker
from pydantic import Field
from pydantic_settings import BaseSettings, SettingsConfigDict
from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict


def test_validate_timedelta_in_legacy_mode(
monkeypatch: pytest.MonkeyPatch, faker: Faker
):
class Settings(BaseSettings):
APP_NAME: str
REQUEST_TIMEOUT: timedelta = Field(default=timedelta(seconds=40))

_try_convert_request_timeout = timedelta_try_convert_str_to_float(
"REQUEST_TIMEOUT"
)

model_config = SettingsConfigDict()

app_name = faker.pystr()
env_vars: dict[str, str | bool] = {"APP_NAME": app_name}

# without timedelta
setenvs_from_dict(monkeypatch, env_vars)
settings = Settings()
print(settings.model_dump())
assert app_name == settings.APP_NAME
assert timedelta(seconds=40) == settings.REQUEST_TIMEOUT

# with timedelta in seconds
env_vars["REQUEST_TIMEOUT"] = "5555"
setenvs_from_dict(monkeypatch, env_vars)
settings = Settings()
print(settings.model_dump())
assert app_name == settings.APP_NAME
assert timedelta(seconds=5555) == settings.REQUEST_TIMEOUT
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@


class AioDockerContainerSpec(ContainerSpec):
Env: dict[str, str | None] | None = Field( # type: ignore
Env: dict[str, str | None] | None = Field(
default=None,
description="aiodocker expects here a dictionary and re-convert it back internally`.\n",
)
Expand Down
Loading
Loading