Skip to content

Commit 01b4c09

Browse files
GitHKAndrei Neagu
andauthored
🎨 pydantic2 migration: fixed unit-tests for dynamic-sidecar (#6534)
Co-authored-by: Andrei Neagu <[email protected]>
1 parent c9a48cc commit 01b4c09

File tree

68 files changed

+903
-400
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

68 files changed

+903
-400
lines changed

packages/aws-library/src/aws_library/s3/_client.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
from boto3.s3.transfer import TransferConfig
1414
from botocore import exceptions as botocore_exc
1515
from botocore.client import Config
16-
from common_library.pydantic_type_adapters import AnyUrlLegacyAdapter
16+
from common_library.pydantic_networks_extension import AnyUrlLegacy
1717
from models_library.api_schemas_storage import ETag, S3BucketName, UploadedPart
1818
from models_library.basic_types import SHA256Str
1919
from pydantic import AnyUrl, ByteSize, TypeAdapter
@@ -264,7 +264,7 @@ async def create_single_presigned_download_link(
264264
Params={"Bucket": bucket, "Key": object_key},
265265
ExpiresIn=expiration_secs,
266266
)
267-
return f"{AnyUrlLegacyAdapter.validate_python(generated_link)}"
267+
return f"{TypeAdapter(AnyUrlLegacy).validate_python(generated_link)}"
268268

269269
@s3_exception_handler(_logger)
270270
async def create_single_presigned_upload_link(
@@ -277,7 +277,7 @@ async def create_single_presigned_upload_link(
277277
Params={"Bucket": bucket, "Key": object_key},
278278
ExpiresIn=expiration_secs,
279279
)
280-
return f"{AnyUrlLegacyAdapter.validate_python(generated_link)}"
280+
return f"{TypeAdapter(AnyUrlLegacy).validate_python(generated_link)}"
281281

282282
@s3_exception_handler(_logger)
283283
async def create_multipart_upload_links(
@@ -474,6 +474,6 @@ def is_multipart(file_size: ByteSize) -> bool:
474474

475475
@staticmethod
476476
def compute_s3_url(*, bucket: S3BucketName, object_key: S3ObjectKey) -> AnyUrl:
477-
return AnyUrlLegacyAdapter.validate_python(
477+
return TypeAdapter(AnyUrlLegacy).validate_python(
478478
f"s3://{bucket}/{urllib.parse.quote(object_key)}"
479479
)

packages/aws-library/tests/test_s3_client.py

Lines changed: 28 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
from collections.abc import AsyncIterator, Awaitable, Callable
1515
from dataclasses import dataclass
1616
from pathlib import Path
17-
from typing import Any, Final
17+
from typing import Any
1818

1919
import botocore.exceptions
2020
import pytest
@@ -49,8 +49,6 @@
4949
from types_aiobotocore_s3 import S3Client
5050
from types_aiobotocore_s3.literals import BucketLocationConstraintType
5151

52-
_BYTE_SIZE_ADAPTER: Final[TypeAdapter[ByteSize]] = TypeAdapter(ByteSize)
53-
5452

5553
@pytest.fixture
5654
async def simcore_s3_api(
@@ -113,7 +111,7 @@ async def _(
113111
file,
114112
MultiPartUploadLinks(
115113
upload_id="fake",
116-
chunk_size=_BYTE_SIZE_ADAPTER.validate_python(file.stat().st_size),
114+
chunk_size=TypeAdapter(ByteSize).validate_python(file.stat().st_size),
117115
urls=[presigned_url],
118116
),
119117
)
@@ -137,7 +135,7 @@ async def with_uploaded_file_on_s3(
137135
s3_client: S3Client,
138136
with_s3_bucket: S3BucketName,
139137
) -> AsyncIterator[UploadedFile]:
140-
test_file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("10Kib"))
138+
test_file = create_file_of_size(TypeAdapter(ByteSize).validate_python("10Kib"))
141139
await s3_client.upload_file(
142140
Filename=f"{test_file}",
143141
Bucket=with_s3_bucket,
@@ -592,7 +590,7 @@ async def test_undelete_file(
592590
assert file_metadata.size == with_uploaded_file_on_s3.local_path.stat().st_size
593591

594592
# upload another file on top of the existing one
595-
new_file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("5Kib"))
593+
new_file = create_file_of_size(TypeAdapter(ByteSize).validate_python("5Kib"))
596594
await s3_client.upload_file(
597595
Filename=f"{new_file}",
598596
Bucket=with_s3_bucket,
@@ -747,7 +745,7 @@ async def test_create_single_presigned_upload_link(
747745
[Path, str, S3BucketName, S3ObjectKey], Awaitable[None]
748746
],
749747
):
750-
file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("1Mib"))
748+
file = create_file_of_size(TypeAdapter(ByteSize).validate_python("1Mib"))
751749
s3_object_key = file.name
752750
presigned_url = await simcore_s3_api.create_single_presigned_upload_link(
753751
bucket=with_s3_bucket,
@@ -775,7 +773,7 @@ async def test_create_single_presigned_upload_link_with_non_existing_bucket_rais
775773
create_file_of_size: Callable[[ByteSize], Path],
776774
default_expiration_time_seconds: int,
777775
):
778-
file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("1Mib"))
776+
file = create_file_of_size(TypeAdapter(ByteSize).validate_python("1Mib"))
779777
s3_object_key = file.name
780778
with pytest.raises(S3BucketInvalidError):
781779
await simcore_s3_api.create_single_presigned_upload_link(
@@ -1082,7 +1080,7 @@ async def test_copy_file_invalid_raises(
10821080
create_file_of_size: Callable[[ByteSize], Path],
10831081
faker: Faker,
10841082
):
1085-
file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("1MiB"))
1083+
file = create_file_of_size(TypeAdapter(ByteSize).validate_python("1MiB"))
10861084
uploaded_file = await upload_file(file)
10871085
dst_object_key = faker.file_name()
10881086
# NOTE: since aioboto3 13.1.0 this raises S3KeyNotFoundError instead of S3BucketInvalidError
@@ -1107,9 +1105,9 @@ async def test_copy_file_invalid_raises(
11071105
"directory_size, min_file_size, max_file_size",
11081106
[
11091107
(
1110-
_BYTE_SIZE_ADAPTER.validate_python("1Mib"),
1111-
_BYTE_SIZE_ADAPTER.validate_python("1B"),
1112-
_BYTE_SIZE_ADAPTER.validate_python("10Kib"),
1108+
TypeAdapter(ByteSize).validate_python("1Mib"),
1109+
TypeAdapter(ByteSize).validate_python("1B"),
1110+
TypeAdapter(ByteSize).validate_python("10Kib"),
11131111
)
11141112
],
11151113
ids=byte_size_ids,
@@ -1133,9 +1131,9 @@ async def test_get_directory_metadata(
11331131
"directory_size, min_file_size, max_file_size",
11341132
[
11351133
(
1136-
_BYTE_SIZE_ADAPTER.validate_python("1Mib"),
1137-
_BYTE_SIZE_ADAPTER.validate_python("1B"),
1138-
_BYTE_SIZE_ADAPTER.validate_python("10Kib"),
1134+
TypeAdapter(ByteSize).validate_python("1Mib"),
1135+
TypeAdapter(ByteSize).validate_python("1B"),
1136+
TypeAdapter(ByteSize).validate_python("10Kib"),
11391137
)
11401138
],
11411139
ids=byte_size_ids,
@@ -1165,9 +1163,9 @@ async def test_get_directory_metadata_raises(
11651163
"directory_size, min_file_size, max_file_size",
11661164
[
11671165
(
1168-
_BYTE_SIZE_ADAPTER.validate_python("1Mib"),
1169-
_BYTE_SIZE_ADAPTER.validate_python("1B"),
1170-
_BYTE_SIZE_ADAPTER.validate_python("10Kib"),
1166+
TypeAdapter(ByteSize).validate_python("1Mib"),
1167+
TypeAdapter(ByteSize).validate_python("1B"),
1168+
TypeAdapter(ByteSize).validate_python("10Kib"),
11711169
)
11721170
],
11731171
ids=byte_size_ids,
@@ -1201,9 +1199,9 @@ async def test_delete_file_recursively(
12011199
"directory_size, min_file_size, max_file_size",
12021200
[
12031201
(
1204-
_BYTE_SIZE_ADAPTER.validate_python("1Mib"),
1205-
_BYTE_SIZE_ADAPTER.validate_python("1B"),
1206-
_BYTE_SIZE_ADAPTER.validate_python("10Kib"),
1202+
TypeAdapter(ByteSize).validate_python("1Mib"),
1203+
TypeAdapter(ByteSize).validate_python("1B"),
1204+
TypeAdapter(ByteSize).validate_python("10Kib"),
12071205
)
12081206
],
12091207
ids=byte_size_ids,
@@ -1239,9 +1237,9 @@ async def test_delete_file_recursively_raises(
12391237
"directory_size, min_file_size, max_file_size",
12401238
[
12411239
(
1242-
_BYTE_SIZE_ADAPTER.validate_python("1Mib"),
1243-
_BYTE_SIZE_ADAPTER.validate_python("1B"),
1244-
_BYTE_SIZE_ADAPTER.validate_python("10Kib"),
1240+
TypeAdapter(ByteSize).validate_python("1Mib"),
1241+
TypeAdapter(ByteSize).validate_python("1B"),
1242+
TypeAdapter(ByteSize).validate_python("10Kib"),
12451243
)
12461244
],
12471245
ids=byte_size_ids,
@@ -1339,14 +1337,14 @@ def run_async_test(*args, **kwargs) -> None:
13391337
"directory_size, min_file_size, max_file_size",
13401338
[
13411339
(
1342-
_BYTE_SIZE_ADAPTER.validate_python("1Mib"),
1343-
_BYTE_SIZE_ADAPTER.validate_python("1B"),
1344-
_BYTE_SIZE_ADAPTER.validate_python("10Kib"),
1340+
TypeAdapter(ByteSize).validate_python("1Mib"),
1341+
TypeAdapter(ByteSize).validate_python("1B"),
1342+
TypeAdapter(ByteSize).validate_python("10Kib"),
13451343
),
13461344
(
1347-
_BYTE_SIZE_ADAPTER.validate_python("500Mib"),
1348-
_BYTE_SIZE_ADAPTER.validate_python("10Mib"),
1349-
_BYTE_SIZE_ADAPTER.validate_python("50Mib"),
1345+
TypeAdapter(ByteSize).validate_python("500Mib"),
1346+
TypeAdapter(ByteSize).validate_python("10Mib"),
1347+
TypeAdapter(ByteSize).validate_python("50Mib"),
13501348
),
13511349
],
13521350
ids=byte_size_ids,

packages/aws-library/tests/test_s3_utils.py

Lines changed: 21 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,6 @@
44
# pylint: disable=unused-variable
55

66

7-
from typing import Final
8-
97
import pytest
108
from aws_library.s3._utils import (
119
_MULTIPART_MAX_NUMBER_OF_PARTS,
@@ -15,61 +13,59 @@
1513
from pydantic import ByteSize, TypeAdapter
1614
from pytest_simcore.helpers.parametrizations import byte_size_ids
1715

18-
_BYTE_SIZE_ADAPTER: Final[TypeAdapter[ByteSize]] = TypeAdapter(ByteSize)
19-
2016

2117
@pytest.mark.parametrize(
2218
"file_size, expected_num_chunks, expected_chunk_size",
2319
[
2420
(
25-
_BYTE_SIZE_ADAPTER.validate_python("5Mib"),
21+
TypeAdapter(ByteSize).validate_python("5Mib"),
2622
1,
27-
_BYTE_SIZE_ADAPTER.validate_python("10Mib"),
23+
TypeAdapter(ByteSize).validate_python("10Mib"),
2824
),
2925
(
30-
_BYTE_SIZE_ADAPTER.validate_python("10Mib"),
26+
TypeAdapter(ByteSize).validate_python("10Mib"),
3127
1,
32-
_BYTE_SIZE_ADAPTER.validate_python("10Mib"),
28+
TypeAdapter(ByteSize).validate_python("10Mib"),
3329
),
3430
(
35-
_BYTE_SIZE_ADAPTER.validate_python("20Mib"),
31+
TypeAdapter(ByteSize).validate_python("20Mib"),
3632
2,
37-
_BYTE_SIZE_ADAPTER.validate_python("10Mib"),
33+
TypeAdapter(ByteSize).validate_python("10Mib"),
3834
),
3935
(
40-
_BYTE_SIZE_ADAPTER.validate_python("50Mib"),
36+
TypeAdapter(ByteSize).validate_python("50Mib"),
4137
5,
42-
_BYTE_SIZE_ADAPTER.validate_python("10Mib"),
38+
TypeAdapter(ByteSize).validate_python("10Mib"),
4339
),
4440
(
45-
_BYTE_SIZE_ADAPTER.validate_python("150Mib"),
41+
TypeAdapter(ByteSize).validate_python("150Mib"),
4642
15,
47-
_BYTE_SIZE_ADAPTER.validate_python("10Mib"),
43+
TypeAdapter(ByteSize).validate_python("10Mib"),
4844
),
4945
(
50-
_BYTE_SIZE_ADAPTER.validate_python("550Mib"),
46+
TypeAdapter(ByteSize).validate_python("550Mib"),
5147
55,
52-
_BYTE_SIZE_ADAPTER.validate_python("10Mib"),
48+
TypeAdapter(ByteSize).validate_python("10Mib"),
5349
),
5450
(
55-
_BYTE_SIZE_ADAPTER.validate_python("560Gib"),
51+
TypeAdapter(ByteSize).validate_python("560Gib"),
5652
5735,
57-
_BYTE_SIZE_ADAPTER.validate_python("100Mib"),
53+
TypeAdapter(ByteSize).validate_python("100Mib"),
5854
),
5955
(
60-
_BYTE_SIZE_ADAPTER.validate_python("5Tib"),
56+
TypeAdapter(ByteSize).validate_python("5Tib"),
6157
8739,
62-
_BYTE_SIZE_ADAPTER.validate_python("600Mib"),
58+
TypeAdapter(ByteSize).validate_python("600Mib"),
6359
),
6460
(
65-
_BYTE_SIZE_ADAPTER.validate_python("15Tib"),
61+
TypeAdapter(ByteSize).validate_python("15Tib"),
6662
7680,
67-
_BYTE_SIZE_ADAPTER.validate_python("2Gib"),
63+
TypeAdapter(ByteSize).validate_python("2Gib"),
6864
),
6965
(
70-
_BYTE_SIZE_ADAPTER.validate_python("9431773844"),
66+
TypeAdapter(ByteSize).validate_python("9431773844"),
7167
900,
72-
_BYTE_SIZE_ADAPTER.validate_python("10Mib"),
68+
TypeAdapter(ByteSize).validate_python("10Mib"),
7369
),
7470
],
7571
ids=byte_size_ids,
@@ -83,7 +79,7 @@ def test_compute_num_file_chunks(
8379

8480

8581
def test_enormous_file_size_raises_value_error():
86-
enormous_file_size = _BYTE_SIZE_ADAPTER.validate_python(
82+
enormous_file_size = TypeAdapter(ByteSize).validate_python(
8783
(
8884
max(_MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE)
8985
* _MULTIPART_MAX_NUMBER_OF_PARTS

packages/common-library/src/common_library/pydantic_type_adapters.py

Lines changed: 0 additions & 8 deletions
This file was deleted.
Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
import datetime
2+
import warnings
3+
from datetime import timedelta
4+
5+
from pydantic import TypeAdapter, field_validator
6+
7+
8+
def timedelta_try_convert_str_to_float(field: str):
9+
"""Transforms a float/int number into a valid datetime as it used to work in the past"""
10+
11+
def _try_convert_str_to_float_or_return(
12+
v: datetime.timedelta | str | float,
13+
) -> datetime.timedelta | str | float:
14+
if isinstance(v, str):
15+
try:
16+
converted_value = float(v)
17+
18+
iso8601_format = TypeAdapter(timedelta).dump_python(
19+
timedelta(seconds=converted_value), mode="json"
20+
)
21+
warnings.warn(
22+
f"{field}='{v}' -should be set to-> {field}='{iso8601_format}' (ISO8601 datetime format). "
23+
"Please also convert the value in the >>OPS REPOSITORY<<. "
24+
"For details: https://docs.pydantic.dev/1.10/usage/types/#datetime-types.",
25+
DeprecationWarning,
26+
stacklevel=8,
27+
)
28+
29+
return converted_value
30+
except ValueError:
31+
# returns format like "1:00:00"
32+
return v
33+
return v
34+
35+
return field_validator(field, mode="before")(_try_convert_str_to_float_or_return)
Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
from datetime import timedelta
2+
3+
import pytest
4+
from common_library.pydantic_validators import timedelta_try_convert_str_to_float
5+
from faker import Faker
6+
from pydantic import Field
7+
from pydantic_settings import BaseSettings, SettingsConfigDict
8+
from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict
9+
10+
11+
def test_validate_timedelta_in_legacy_mode(
12+
monkeypatch: pytest.MonkeyPatch, faker: Faker
13+
):
14+
class Settings(BaseSettings):
15+
APP_NAME: str
16+
REQUEST_TIMEOUT: timedelta = Field(default=timedelta(seconds=40))
17+
18+
_try_convert_request_timeout = timedelta_try_convert_str_to_float(
19+
"REQUEST_TIMEOUT"
20+
)
21+
22+
model_config = SettingsConfigDict()
23+
24+
app_name = faker.pystr()
25+
env_vars: dict[str, str | bool] = {"APP_NAME": app_name}
26+
27+
# without timedelta
28+
setenvs_from_dict(monkeypatch, env_vars)
29+
settings = Settings()
30+
print(settings.model_dump())
31+
assert app_name == settings.APP_NAME
32+
assert timedelta(seconds=40) == settings.REQUEST_TIMEOUT
33+
34+
# with timedelta in seconds
35+
env_vars["REQUEST_TIMEOUT"] = "5555"
36+
setenvs_from_dict(monkeypatch, env_vars)
37+
settings = Settings()
38+
print(settings.model_dump())
39+
assert app_name == settings.APP_NAME
40+
assert timedelta(seconds=5555) == settings.REQUEST_TIMEOUT

packages/models-library/src/models_library/aiodocker_api.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111

1212

1313
class AioDockerContainerSpec(ContainerSpec):
14-
Env: dict[str, str | None] | None = Field( # type: ignore
14+
Env: dict[str, str | None] | None = Field(
1515
default=None,
1616
description="aiodocker expects here a dictionary and re-convert it back internally`.\n",
1717
)

0 commit comments

Comments
 (0)