Skip to content

Commit 1147997

Browse files
author
Andrei Neagu
committed
using suggested TypeAdapter pattern
1 parent 7a41cd0 commit 1147997

File tree

39 files changed

+209
-195
lines changed

39 files changed

+209
-195
lines changed

packages/aws-library/src/aws_library/s3/_client.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
from boto3.s3.transfer import TransferConfig
1414
from botocore import exceptions as botocore_exc
1515
from botocore.client import Config
16-
from common_library.pydantic_type_adapters import AnyUrlLegacyAdapter
16+
from common_library.pydantic_networks_extension import AnyUrlLegacy
1717
from models_library.api_schemas_storage import ETag, S3BucketName, UploadedPart
1818
from models_library.basic_types import SHA256Str
1919
from pydantic import AnyUrl, ByteSize, TypeAdapter
@@ -264,7 +264,7 @@ async def create_single_presigned_download_link(
264264
Params={"Bucket": bucket, "Key": object_key},
265265
ExpiresIn=expiration_secs,
266266
)
267-
return f"{AnyUrlLegacyAdapter.validate_python(generated_link)}"
267+
return f"{TypeAdapter(AnyUrlLegacy).validate_python(generated_link)}"
268268

269269
@s3_exception_handler(_logger)
270270
async def create_single_presigned_upload_link(
@@ -277,7 +277,7 @@ async def create_single_presigned_upload_link(
277277
Params={"Bucket": bucket, "Key": object_key},
278278
ExpiresIn=expiration_secs,
279279
)
280-
return f"{AnyUrlLegacyAdapter.validate_python(generated_link)}"
280+
return f"{TypeAdapter(AnyUrlLegacy).validate_python(generated_link)}"
281281

282282
@s3_exception_handler(_logger)
283283
async def create_multipart_upload_links(
@@ -474,6 +474,6 @@ def is_multipart(file_size: ByteSize) -> bool:
474474

475475
@staticmethod
476476
def compute_s3_url(*, bucket: S3BucketName, object_key: S3ObjectKey) -> AnyUrl:
477-
return AnyUrlLegacyAdapter.validate_python(
477+
return TypeAdapter(AnyUrlLegacy).validate_python(
478478
f"s3://{bucket}/{urllib.parse.quote(object_key)}"
479479
)
Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,14 @@
11
from typing import Final
22

3-
from common_library.pydantic_type_adapters import ByteSizeAdapter
43
from pydantic import ByteSize, TypeAdapter
54

65
# NOTE: AWS S3 upload limits https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html
76
MULTIPART_UPLOADS_MIN_TOTAL_SIZE: Final[ByteSize] = TypeAdapter(
87
ByteSize
98
).validate_python("100MiB")
10-
MULTIPART_COPY_THRESHOLD: Final[ByteSize] = ByteSizeAdapter.validate_python("100MiB")
9+
MULTIPART_COPY_THRESHOLD: Final[ByteSize] = TypeAdapter(ByteSize).validate_python(
10+
"100MiB"
11+
)
1112

12-
PRESIGNED_LINK_MAX_SIZE: Final[ByteSize] = ByteSizeAdapter.validate_python("5GiB")
13-
S3_MAX_FILE_SIZE: Final[ByteSize] = ByteSizeAdapter.validate_python("5TiB")
13+
PRESIGNED_LINK_MAX_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("5GiB")
14+
S3_MAX_FILE_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("5TiB")

packages/aws-library/src/aws_library/s3/_utils.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,13 @@
11
from typing import Final
22

3-
from common_library.pydantic_type_adapters import ByteSizeAdapter
4-
from pydantic import ByteSize
3+
from pydantic import ByteSize, TypeAdapter
54

65
_MULTIPART_MAX_NUMBER_OF_PARTS: Final[int] = 10000
76

87
# this is artifically defined, if possible we keep a maximum number of requests for parallel
98
# uploading. If that is not possible then we create as many upload part as the max part size allows
109
_MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE: Final[list[ByteSize]] = [
11-
ByteSizeAdapter.validate_python(x)
10+
TypeAdapter(ByteSize).validate_python(x)
1211
for x in [
1312
"10Mib",
1413
"50Mib",

packages/aws-library/tests/test_s3_client.py

Lines changed: 27 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@
2828
S3UploadNotFoundError,
2929
)
3030
from aws_library.s3._models import MultiPartUploadLinks
31-
from common_library.pydantic_type_adapters import ByteSizeAdapter
3231
from faker import Faker
3332
from models_library.api_schemas_storage import S3BucketName, UploadedPart
3433
from models_library.basic_types import SHA256Str
@@ -112,7 +111,7 @@ async def _(
112111
file,
113112
MultiPartUploadLinks(
114113
upload_id="fake",
115-
chunk_size=ByteSizeAdapter.validate_python(file.stat().st_size),
114+
chunk_size=TypeAdapter(ByteSize).validate_python(file.stat().st_size),
116115
urls=[presigned_url],
117116
),
118117
)
@@ -136,7 +135,7 @@ async def with_uploaded_file_on_s3(
136135
s3_client: S3Client,
137136
with_s3_bucket: S3BucketName,
138137
) -> AsyncIterator[UploadedFile]:
139-
test_file = create_file_of_size(ByteSizeAdapter.validate_python("10Kib"))
138+
test_file = create_file_of_size(TypeAdapter(ByteSize).validate_python("10Kib"))
140139
await s3_client.upload_file(
141140
Filename=f"{test_file}",
142141
Bucket=with_s3_bucket,
@@ -591,7 +590,7 @@ async def test_undelete_file(
591590
assert file_metadata.size == with_uploaded_file_on_s3.local_path.stat().st_size
592591

593592
# upload another file on top of the existing one
594-
new_file = create_file_of_size(ByteSizeAdapter.validate_python("5Kib"))
593+
new_file = create_file_of_size(TypeAdapter(ByteSize).validate_python("5Kib"))
595594
await s3_client.upload_file(
596595
Filename=f"{new_file}",
597596
Bucket=with_s3_bucket,
@@ -746,7 +745,7 @@ async def test_create_single_presigned_upload_link(
746745
[Path, str, S3BucketName, S3ObjectKey], Awaitable[None]
747746
],
748747
):
749-
file = create_file_of_size(ByteSizeAdapter.validate_python("1Mib"))
748+
file = create_file_of_size(TypeAdapter(ByteSize).validate_python("1Mib"))
750749
s3_object_key = file.name
751750
presigned_url = await simcore_s3_api.create_single_presigned_upload_link(
752751
bucket=with_s3_bucket,
@@ -774,7 +773,7 @@ async def test_create_single_presigned_upload_link_with_non_existing_bucket_rais
774773
create_file_of_size: Callable[[ByteSize], Path],
775774
default_expiration_time_seconds: int,
776775
):
777-
file = create_file_of_size(ByteSizeAdapter.validate_python("1Mib"))
776+
file = create_file_of_size(TypeAdapter(ByteSize).validate_python("1Mib"))
778777
s3_object_key = file.name
779778
with pytest.raises(S3BucketInvalidError):
780779
await simcore_s3_api.create_single_presigned_upload_link(
@@ -1081,7 +1080,7 @@ async def test_copy_file_invalid_raises(
10811080
create_file_of_size: Callable[[ByteSize], Path],
10821081
faker: Faker,
10831082
):
1084-
file = create_file_of_size(ByteSizeAdapter.validate_python("1MiB"))
1083+
file = create_file_of_size(TypeAdapter(ByteSize).validate_python("1MiB"))
10851084
uploaded_file = await upload_file(file)
10861085
dst_object_key = faker.file_name()
10871086
# NOTE: since aioboto3 13.1.0 this raises S3KeyNotFoundError instead of S3BucketInvalidError
@@ -1106,9 +1105,9 @@ async def test_copy_file_invalid_raises(
11061105
"directory_size, min_file_size, max_file_size",
11071106
[
11081107
(
1109-
ByteSizeAdapter.validate_python("1Mib"),
1110-
ByteSizeAdapter.validate_python("1B"),
1111-
ByteSizeAdapter.validate_python("10Kib"),
1108+
TypeAdapter(ByteSize).validate_python("1Mib"),
1109+
TypeAdapter(ByteSize).validate_python("1B"),
1110+
TypeAdapter(ByteSize).validate_python("10Kib"),
11121111
)
11131112
],
11141113
ids=byte_size_ids,
@@ -1132,9 +1131,9 @@ async def test_get_directory_metadata(
11321131
"directory_size, min_file_size, max_file_size",
11331132
[
11341133
(
1135-
ByteSizeAdapter.validate_python("1Mib"),
1136-
ByteSizeAdapter.validate_python("1B"),
1137-
ByteSizeAdapter.validate_python("10Kib"),
1134+
TypeAdapter(ByteSize).validate_python("1Mib"),
1135+
TypeAdapter(ByteSize).validate_python("1B"),
1136+
TypeAdapter(ByteSize).validate_python("10Kib"),
11381137
)
11391138
],
11401139
ids=byte_size_ids,
@@ -1164,9 +1163,9 @@ async def test_get_directory_metadata_raises(
11641163
"directory_size, min_file_size, max_file_size",
11651164
[
11661165
(
1167-
ByteSizeAdapter.validate_python("1Mib"),
1168-
ByteSizeAdapter.validate_python("1B"),
1169-
ByteSizeAdapter.validate_python("10Kib"),
1166+
TypeAdapter(ByteSize).validate_python("1Mib"),
1167+
TypeAdapter(ByteSize).validate_python("1B"),
1168+
TypeAdapter(ByteSize).validate_python("10Kib"),
11701169
)
11711170
],
11721171
ids=byte_size_ids,
@@ -1200,9 +1199,9 @@ async def test_delete_file_recursively(
12001199
"directory_size, min_file_size, max_file_size",
12011200
[
12021201
(
1203-
ByteSizeAdapter.validate_python("1Mib"),
1204-
ByteSizeAdapter.validate_python("1B"),
1205-
ByteSizeAdapter.validate_python("10Kib"),
1202+
TypeAdapter(ByteSize).validate_python("1Mib"),
1203+
TypeAdapter(ByteSize).validate_python("1B"),
1204+
TypeAdapter(ByteSize).validate_python("10Kib"),
12061205
)
12071206
],
12081207
ids=byte_size_ids,
@@ -1238,9 +1237,9 @@ async def test_delete_file_recursively_raises(
12381237
"directory_size, min_file_size, max_file_size",
12391238
[
12401239
(
1241-
ByteSizeAdapter.validate_python("1Mib"),
1242-
ByteSizeAdapter.validate_python("1B"),
1243-
ByteSizeAdapter.validate_python("10Kib"),
1240+
TypeAdapter(ByteSize).validate_python("1Mib"),
1241+
TypeAdapter(ByteSize).validate_python("1B"),
1242+
TypeAdapter(ByteSize).validate_python("10Kib"),
12441243
)
12451244
],
12461245
ids=byte_size_ids,
@@ -1338,14 +1337,14 @@ def run_async_test(*args, **kwargs) -> None:
13381337
"directory_size, min_file_size, max_file_size",
13391338
[
13401339
(
1341-
ByteSizeAdapter.validate_python("1Mib"),
1342-
ByteSizeAdapter.validate_python("1B"),
1343-
ByteSizeAdapter.validate_python("10Kib"),
1340+
TypeAdapter(ByteSize).validate_python("1Mib"),
1341+
TypeAdapter(ByteSize).validate_python("1B"),
1342+
TypeAdapter(ByteSize).validate_python("10Kib"),
13441343
),
13451344
(
1346-
ByteSizeAdapter.validate_python("500Mib"),
1347-
ByteSizeAdapter.validate_python("10Mib"),
1348-
ByteSizeAdapter.validate_python("50Mib"),
1345+
TypeAdapter(ByteSize).validate_python("500Mib"),
1346+
TypeAdapter(ByteSize).validate_python("10Mib"),
1347+
TypeAdapter(ByteSize).validate_python("50Mib"),
13491348
),
13501349
],
13511350
ids=byte_size_ids,

packages/aws-library/tests/test_s3_utils.py

Lines changed: 22 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -10,63 +10,62 @@
1010
_MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE,
1111
compute_num_file_chunks,
1212
)
13-
from common_library.pydantic_type_adapters import ByteSizeAdapter
14-
from pydantic import ByteSize
13+
from pydantic import ByteSize, TypeAdapter
1514
from pytest_simcore.helpers.parametrizations import byte_size_ids
1615

1716

1817
@pytest.mark.parametrize(
1918
"file_size, expected_num_chunks, expected_chunk_size",
2019
[
2120
(
22-
ByteSizeAdapter.validate_python("5Mib"),
21+
TypeAdapter(ByteSize).validate_python("5Mib"),
2322
1,
24-
ByteSizeAdapter.validate_python("10Mib"),
23+
TypeAdapter(ByteSize).validate_python("10Mib"),
2524
),
2625
(
27-
ByteSizeAdapter.validate_python("10Mib"),
26+
TypeAdapter(ByteSize).validate_python("10Mib"),
2827
1,
29-
ByteSizeAdapter.validate_python("10Mib"),
28+
TypeAdapter(ByteSize).validate_python("10Mib"),
3029
),
3130
(
32-
ByteSizeAdapter.validate_python("20Mib"),
31+
TypeAdapter(ByteSize).validate_python("20Mib"),
3332
2,
34-
ByteSizeAdapter.validate_python("10Mib"),
33+
TypeAdapter(ByteSize).validate_python("10Mib"),
3534
),
3635
(
37-
ByteSizeAdapter.validate_python("50Mib"),
36+
TypeAdapter(ByteSize).validate_python("50Mib"),
3837
5,
39-
ByteSizeAdapter.validate_python("10Mib"),
38+
TypeAdapter(ByteSize).validate_python("10Mib"),
4039
),
4140
(
42-
ByteSizeAdapter.validate_python("150Mib"),
41+
TypeAdapter(ByteSize).validate_python("150Mib"),
4342
15,
44-
ByteSizeAdapter.validate_python("10Mib"),
43+
TypeAdapter(ByteSize).validate_python("10Mib"),
4544
),
4645
(
47-
ByteSizeAdapter.validate_python("550Mib"),
46+
TypeAdapter(ByteSize).validate_python("550Mib"),
4847
55,
49-
ByteSizeAdapter.validate_python("10Mib"),
48+
TypeAdapter(ByteSize).validate_python("10Mib"),
5049
),
5150
(
52-
ByteSizeAdapter.validate_python("560Gib"),
51+
TypeAdapter(ByteSize).validate_python("560Gib"),
5352
5735,
54-
ByteSizeAdapter.validate_python("100Mib"),
53+
TypeAdapter(ByteSize).validate_python("100Mib"),
5554
),
5655
(
57-
ByteSizeAdapter.validate_python("5Tib"),
56+
TypeAdapter(ByteSize).validate_python("5Tib"),
5857
8739,
59-
ByteSizeAdapter.validate_python("600Mib"),
58+
TypeAdapter(ByteSize).validate_python("600Mib"),
6059
),
6160
(
62-
ByteSizeAdapter.validate_python("15Tib"),
61+
TypeAdapter(ByteSize).validate_python("15Tib"),
6362
7680,
64-
ByteSizeAdapter.validate_python("2Gib"),
63+
TypeAdapter(ByteSize).validate_python("2Gib"),
6564
),
6665
(
67-
ByteSizeAdapter.validate_python("9431773844"),
66+
TypeAdapter(ByteSize).validate_python("9431773844"),
6867
900,
69-
ByteSizeAdapter.validate_python("10Mib"),
68+
TypeAdapter(ByteSize).validate_python("10Mib"),
7069
),
7170
],
7271
ids=byte_size_ids,
@@ -80,7 +79,7 @@ def test_compute_num_file_chunks(
8079

8180

8281
def test_enormous_file_size_raises_value_error():
83-
enormous_file_size = ByteSizeAdapter.validate_python(
82+
enormous_file_size = TypeAdapter(ByteSize).validate_python(
8483
(
8584
max(_MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE)
8685
* _MULTIPART_MAX_NUMBER_OF_PARTS

packages/common-library/src/common_library/pydantic_type_adapters.py

Lines changed: 0 additions & 12 deletions
This file was deleted.

packages/models-library/src/models_library/rest_pagination.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from typing import Annotated, Final, Generic, TypeAlias, TypeVar
22

3-
from common_library.pydantic_type_adapters import AnyHttpUrlLegacyAdapter
3+
from common_library.pydantic_networks_extension import AnyHttpUrlLegacy
44
from pydantic import (
55
BaseModel,
66
BeforeValidator,
@@ -99,7 +99,9 @@ class PageLinks(
9999
PageRefs[
100100
Annotated[
101101
str,
102-
BeforeValidator(lambda x: str(AnyHttpUrlLegacyAdapter.validate_python(x))),
102+
BeforeValidator(
103+
lambda x: str(TypeAdapter(AnyHttpUrlLegacy).validate_python(x))
104+
),
103105
]
104106
]
105107
):

packages/models-library/src/models_library/rest_pagination_utils.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
from math import ceil
22
from typing import Any, Protocol, TypedDict, Union, runtime_checkable
33

4-
from common_library.pydantic_type_adapters import AnyHttpUrlLegacyAdapter
4+
from common_library.pydantic_networks_extension import AnyHttpUrlLegacy
5+
from pydantic import TypeAdapter
56

67
from .rest_pagination import PageLinks, PageMetaInfoLimitOffset
78

@@ -40,7 +41,7 @@ def _replace_query(url: _URLType, query: dict[str, Any]) -> str:
4041
new_url = url.replace_query_params(**query)
4142

4243
new_url_str = f"{new_url}"
43-
return f"{AnyHttpUrlLegacyAdapter.validate_python(new_url_str)}"
44+
return f"{TypeAdapter(AnyHttpUrlLegacy).validate_python(new_url_str)}"
4445

4546

4647
class PageDict(TypedDict):

packages/models-library/src/models_library/service_settings_labels.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,14 @@
55
from pathlib import Path
66
from typing import Any, Literal, TypeAlias
77

8-
from common_library.pydantic_type_adapters import ByteSizeAdapter
98
from pydantic import (
109
BaseModel,
10+
ByteSize,
1111
ConfigDict,
1212
Field,
1313
Json,
1414
PrivateAttr,
15+
TypeAdapter,
1516
ValidationError,
1617
ValidationInfo,
1718
field_validator,
@@ -200,7 +201,7 @@ def validate_volume_limits(cls, v, info: ValidationInfo) -> str | None:
200201
for path_str, size_str in v.items():
201202
# checks that format is correct
202203
try:
203-
ByteSizeAdapter.validate_python(size_str)
204+
TypeAdapter(ByteSize).validate_python(size_str)
204205
except ValidationError as e:
205206
msg = f"Provided size='{size_str}' contains invalid charactes: {e!s}"
206207
raise ValueError(msg) from e

0 commit comments

Comments
 (0)