Skip to content

Commit d8f6de0

Browse files
committed
everywhere TypeAdapter
1 parent 1a62b0e commit d8f6de0

File tree

12 files changed

+186
-151
lines changed

12 files changed

+186
-151
lines changed

services/storage/src/simcore_service_storage/datcore_adapter/datcore_adapter.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
from aiohttp.client import ClientSession
1010
from models_library.api_schemas_storage import DatCoreDatasetName
1111
from models_library.users import UserID
12-
from pydantic import AnyUrl, parse_obj_as
12+
from pydantic import AnyUrl, TypeAdapter
1313
from servicelib.aiohttp.application_keys import APP_CONFIG_KEY
1414
from servicelib.aiohttp.client_session import get_client_session
1515
from servicelib.utils import logged_gather
@@ -229,7 +229,7 @@ async def get_file_download_presigned_link(
229229
dict[str, Any],
230230
await _request(app, api_key, api_secret, "GET", f"/files/{file_id}"),
231231
)
232-
url: AnyUrl = parse_obj_as(AnyUrl, file_download_data["link"])
232+
url: AnyUrl = TypeAdapter(AnyUrl).validate_python(file_download_data["link"])
233233
return url
234234

235235

services/storage/src/simcore_service_storage/s3_utils.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
from collections import defaultdict
33
from dataclasses import dataclass, field
44

5-
from pydantic import ByteSize, parse_obj_as
5+
from pydantic import ByteSize, TypeAdapter
66
from servicelib.aiohttp.long_running_tasks.server import (
77
ProgressMessage,
88
ProgressPercent,
@@ -55,7 +55,7 @@ def finalize_transfer(self) -> None:
5555
def copy_transfer_cb(self, total_bytes_copied: int, *, file_name: str) -> None:
5656
_logger.debug(
5757
"Copied %s of %s",
58-
parse_obj_as(ByteSize, total_bytes_copied).human_readable(),
58+
TypeAdapter(ByteSize).validate_python(total_bytes_copied).human_readable(),
5959
file_name,
6060
)
6161
self._file_total_bytes_copied[file_name] = total_bytes_copied
@@ -66,7 +66,7 @@ def copy_transfer_cb(self, total_bytes_copied: int, *, file_name: str) -> None:
6666
def upload_transfer_cb(self, bytes_transferred: int, *, file_name: str) -> None:
6767
_logger.debug(
6868
"Uploaded %s of %s",
69-
parse_obj_as(ByteSize, bytes_transferred).human_readable(),
69+
TypeAdapter(ByteSize).validate_python(bytes_transferred).human_readable(),
7070
file_name,
7171
)
7272
self._file_total_bytes_copied[file_name] += bytes_transferred

services/storage/src/simcore_service_storage/simcore_s3_dsm.py

Lines changed: 24 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
StorageFileID,
3636
)
3737
from models_library.users import UserID
38-
from pydantic import AnyUrl, ByteSize, NonNegativeInt, TypeAdapter, parse_obj_as
38+
from pydantic import AnyUrl, ByteSize, NonNegativeInt, TypeAdapter
3939
from servicelib.aiohttp.client_session import get_client_session
4040
from servicelib.aiohttp.long_running_tasks.server import TaskProgress
4141
from servicelib.logging_utils import log_context
@@ -257,7 +257,7 @@ async def get_file(self, user_id: UserID, file_id: StorageFileID) -> FileMetaDat
257257
raise FileAccessRightError(access_right="read", file_id=file_id)
258258

259259
fmd = await db_file_meta_data.get(
260-
conn, parse_obj_as(SimcoreS3FileID, file_id)
260+
conn, TypeAdapter(SimcoreS3FileID).validate_python(file_id)
261261
)
262262
if is_file_entry_valid(fmd):
263263
return convert_db_to_model(fmd)
@@ -412,7 +412,7 @@ async def complete_file_upload(
412412
if not can.write:
413413
raise FileAccessRightError(access_right="write", file_id=file_id)
414414
fmd = await db_file_meta_data.get(
415-
conn, parse_obj_as(SimcoreS3FileID, file_id)
415+
conn, TypeAdapter(SimcoreS3FileID).validate_python(file_id)
416416
)
417417

418418
if is_valid_managed_multipart_upload(fmd.upload_id):
@@ -460,12 +460,12 @@ async def create_file_download_link(
460460
):
461461
raise S3KeyNotFoundError(key=file_id, bucket=self.simcore_bucket_name)
462462
return await self.__get_link(
463-
parse_obj_as(SimcoreS3FileID, file_id), link_type
463+
TypeAdapter(SimcoreS3FileID).validate_python(file_id), link_type
464464
)
465465
# standard file link
466466
async with self.engine.acquire() as conn:
467467
fmd = await db_file_meta_data.get(
468-
conn, parse_obj_as(SimcoreS3FileID, file_id)
468+
conn, TypeAdapter(SimcoreS3FileID).validate_python(file_id)
469469
)
470470
if not is_file_entry_valid(fmd):
471471
# try lazy update
@@ -487,9 +487,8 @@ async def __ensure_read_access_rights(
487487
async def __get_link(
488488
self, s3_file_id: SimcoreS3FileID, link_type: LinkType
489489
) -> AnyUrl:
490-
link: AnyUrl = parse_obj_as(
491-
AnyUrl,
492-
f"s3://{self.simcore_bucket_name}/{urllib.parse.quote(s3_file_id)}",
490+
link: AnyUrl = TypeAdapter(AnyUrl).validate_python(
491+
f"s3://{self.simcore_bucket_name}/{urllib.parse.quote(s3_file_id)}"
493492
)
494493
if link_type == LinkType.PRESIGNED:
495494
link = await get_s3_client(self.app).create_single_presigned_download_link(
@@ -528,7 +527,7 @@ async def delete_file(
528527
# NOTE: deleting might be slow, so better ensure we release the connection
529528
async with self.engine.acquire() as conn:
530529
file: FileMetaDataAtDB = await db_file_meta_data.get(
531-
conn, parse_obj_as(SimcoreS3FileID, file_id)
530+
conn, TypeAdapter(SimcoreS3FileID).validate_python(file_id)
532531
)
533532
await get_s3_client(self.app).delete_objects_recursively(
534533
bucket=file.bucket_name,
@@ -632,9 +631,9 @@ async def deep_copy_project_simcore_s3(
632631
limit=_MAX_PARALLEL_S3_CALLS,
633632
)
634633
total_num_of_files = sum(n for _, n in sizes_and_num_files)
635-
src_project_total_data_size: ByteSize = parse_obj_as(
636-
ByteSize, sum(n for n, _ in sizes_and_num_files)
637-
)
634+
src_project_total_data_size: ByteSize = TypeAdapter(
635+
ByteSize
636+
).validate_python(sum(n for n, _ in sizes_and_num_files))
638637
with log_context(
639638
_logger,
640639
logging.INFO,
@@ -660,7 +659,7 @@ async def deep_copy_project_simcore_s3(
660659
self._copy_path_s3_s3(
661660
user_id,
662661
src_fmd=src_fmd,
663-
dst_file_id=SimcoreS3FileID(
662+
dst_file_id=TypeAdapter(SimcoreS3FileID).validate_python(
664663
f"{dst_project_uuid}/{new_node_id}/{src_fmd.object_name.split('/', maxsplit=2)[-1]}"
665664
),
666665
bytes_transfered_cb=s3_transfered_data_cb.copy_transfer_cb,
@@ -718,7 +717,7 @@ async def _get_size_and_num_files(
718717
total_size += sum(x.size for x in s3_objects)
719718
total_num_s3_objects += len(s3_objects)
720719

721-
return parse_obj_as(ByteSize, total_size), total_num_s3_objects
720+
return TypeAdapter(ByteSize).validate_python(total_size), total_num_s3_objects
722721

723722
async def search_owned_files(
724723
self,
@@ -759,7 +758,7 @@ async def create_soft_link(
759758
) -> FileMetaData:
760759
async with self.engine.acquire() as conn:
761760
if await db_file_meta_data.exists(
762-
conn, parse_obj_as(SimcoreS3FileID, link_file_id)
761+
conn, TypeAdapter(SimcoreS3FileID).validate_python(link_file_id)
763762
):
764763
raise LinkAlreadyExistsError(file_id=link_file_id)
765764
# validate target_uuid
@@ -908,7 +907,7 @@ async def _update_fmd_from_other(
908907
s3_metadata = await get_s3_client(self.app).get_object_metadata(
909908
bucket=fmd.bucket_name, object_key=fmd.object_name
910909
)
911-
fmd.file_size = parse_obj_as(ByteSize, s3_metadata.size)
910+
fmd.file_size = TypeAdapter(ByteSize).validate_python(s3_metadata.size)
912911
fmd.last_modified = s3_metadata.last_modified
913912
fmd.entity_tag = s3_metadata.e_tag
914913
else:
@@ -945,12 +944,12 @@ async def _update_database_from_storage(
945944
s3_metadata = await self._get_s3_metadata(fmd)
946945
if not fmd.is_directory:
947946
assert isinstance(s3_metadata, S3MetaData) # nosec
948-
fmd.file_size = parse_obj_as(ByteSize, s3_metadata.size)
947+
fmd.file_size = TypeAdapter(ByteSize).validate_python(s3_metadata.size)
949948
fmd.last_modified = s3_metadata.last_modified
950949
fmd.entity_tag = s3_metadata.e_tag
951950
elif fmd.is_directory:
952951
assert isinstance(s3_metadata, S3DirectoryMetaData) # nosec
953-
fmd.file_size = parse_obj_as(ByteSize, s3_metadata.size)
952+
fmd.file_size = TypeAdapter(ByteSize).validate_python(s3_metadata.size)
954953
fmd.upload_expires_at = None
955954
fmd.upload_id = None
956955
async with self.engine.acquire() as conn:
@@ -978,7 +977,9 @@ async def _copy_file_datcore_s3(
978977
)
979978
assert dc_link.path # nosec
980979
filename = Path(dc_link.path).name
981-
dst_file_id = SimcoreS3FileID(f"{dest_project_id}/{dest_node_id}/{filename}")
980+
dst_file_id = TypeAdapter(SimcoreS3FileID).validate_python(
981+
f"{dest_project_id}/{dest_node_id}/{filename}"
982+
)
982983
_logger.debug("copying %s to %s", f"{source_uuid=}", f"{dst_file_id=}")
983984

984985
with tempfile.TemporaryDirectory() as tmpdir:
@@ -1075,7 +1076,7 @@ async def _create_fmd_for_upload(
10751076
)
10761077
fmd = FileMetaData.from_simcore_node(
10771078
user_id=user_id,
1078-
file_id=parse_obj_as(SimcoreS3FileID, file_id),
1079+
file_id=TypeAdapter(SimcoreS3FileID).validate_python(file_id),
10791080
bucket=self.simcore_bucket_name,
10801081
location_id=self.location_id,
10811082
location_name=self.location_name,
@@ -1092,7 +1093,9 @@ def create_simcore_s3_data_manager(app: web.Application) -> SimcoreS3DataManager
10921093
assert cfg.STORAGE_S3 # nosec
10931094
return SimcoreS3DataManager(
10941095
engine=app[APP_AIOPG_ENGINE_KEY],
1095-
simcore_bucket_name=parse_obj_as(S3BucketName, cfg.STORAGE_S3.S3_BUCKET_NAME),
1096+
simcore_bucket_name=TypeAdapter(S3BucketName).validate_python(
1097+
cfg.STORAGE_S3.S3_BUCKET_NAME
1098+
),
10961099
app=app,
10971100
settings=cfg,
10981101
)

services/storage/tests/conftest.py

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@
3939
from models_library.projects_nodes_io import LocationID, SimcoreS3FileID
4040
from models_library.users import UserID
4141
from models_library.utils.fastapi_encoders import jsonable_encoder
42-
from pydantic import ByteSize, parse_obj_as
42+
from pydantic import ByteSize, TypeAdapter
4343
from pytest_mock import MockerFixture
4444
from pytest_simcore.helpers.assert_checks import assert_status
4545
from pytest_simcore.helpers.logging_tools import log_context
@@ -259,7 +259,7 @@ async def _getter(file_id: SimcoreS3FileID) -> FileMetaDataGet:
259259
data, error = await assert_status(response, status.HTTP_200_OK)
260260
assert not error
261261
assert data
262-
received_fmd = parse_obj_as(FileMetaDataGet, data)
262+
received_fmd = TypeAdapter(FileMetaDataGet).validate_python(data)
263263
assert received_fmd
264264
return received_fmd
265265

@@ -291,7 +291,7 @@ async def _link_creator(
291291
data, error = await assert_status(response, status.HTTP_200_OK)
292292
assert not error
293293
assert data
294-
received_file_upload = parse_obj_as(FileUploadSchema, data)
294+
received_file_upload = TypeAdapter(FileUploadSchema).validate_python(data)
295295
assert received_file_upload
296296
file_params.append((user_id, location_id, file_id))
297297
return received_file_upload
@@ -430,7 +430,7 @@ def _creator(
430430
if file_base_path:
431431
s3_file_name = f"{file_base_path / file_name}"
432432
clean_path = Path(f"{project_id}/{node_id}/{s3_file_name}")
433-
return SimcoreS3FileID(f"{clean_path}")
433+
return TypeAdapter(SimcoreS3FileID).validate_python(f"{clean_path}")
434434

435435
return _creator
436436

@@ -470,7 +470,7 @@ async def _directory_creator(dir_name: str):
470470
assert len(directory_file_upload.urls) == 1
471471

472472
# complete the upload
473-
complete_url = URL(directory_file_upload.links.complete_upload).relative()
473+
complete_url = URL(f"{directory_file_upload.links.complete_upload}").relative()
474474
response = await client.post(
475475
f"{complete_url}",
476476
json=jsonable_encoder(FileUploadCompletionBody(parts=[])),
@@ -480,7 +480,7 @@ async def _directory_creator(dir_name: str):
480480
assert not error
481481
assert data
482482
file_upload_complete_response = FileUploadCompleteResponse.parse_obj(data)
483-
state_url = URL(file_upload_complete_response.links.state).relative()
483+
state_url = URL(f"{file_upload_complete_response.links.state}").relative()
484484

485485
# check that it finished updating
486486
assert client.app
@@ -535,7 +535,9 @@ async def _create_file(s: int, f: int):
535535
await storage_s3_client.upload_file(
536536
bucket=storage_s3_bucket,
537537
file=file,
538-
object_key=SimcoreS3FileID(f"{clean_path}"),
538+
object_key=TypeAdapter(SimcoreS3FileID).validate_python(
539+
f"{clean_path}"
540+
),
539541
bytes_transfered_cb=None,
540542
)
541543

services/storage/tests/fixtures/data_models.py

Lines changed: 13 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
from models_library.projects import ProjectID
1919
from models_library.projects_nodes_io import NodeID, SimcoreS3FileID
2020
from models_library.users import UserID
21-
from pydantic import ByteSize, parse_obj_as
21+
from pydantic import ByteSize, TypeAdapter
2222
from pytest_simcore.helpers.faker_factories import random_project, random_user
2323
from servicelib.utils import limited_gather
2424
from simcore_postgres_database.models.project_to_groups import project_to_groups
@@ -45,7 +45,7 @@ async def _user_context(aiopg_engine: Engine, *, name: str) -> AsyncIterator[Use
4545
assert isinstance(row.id, int)
4646

4747
try:
48-
yield UserID(row.id)
48+
yield TypeAdapter(UserID).validate_python(row.id)
4949
finally:
5050
async with aiopg_engine.acquire() as conn:
5151
await conn.execute(users.delete().where(users.c.id == row.id))
@@ -149,7 +149,7 @@ async def project_id(
149149
async def collaborator_id(aiopg_engine: Engine) -> AsyncIterator[UserID]:
150150

151151
async with _user_context(aiopg_engine, name="collaborator") as new_user_id:
152-
yield UserID(new_user_id)
152+
yield TypeAdapter(UserID).validate_python(new_user_id)
153153

154154

155155
@pytest.fixture
@@ -177,7 +177,7 @@ async def _() -> None:
177177
)
178178
row = await result.fetchone()
179179
assert row
180-
access_rights: dict[str, Any] = row[projects.c.access_rights]
180+
access_rights: dict[str | int, Any] = row[projects.c.access_rights]
181181

182182
access_rights[await _get_user_group(conn, user_id)] = {
183183
"read": True,
@@ -279,22 +279,19 @@ async def random_project_with_files(
279279
async def _creator(
280280
num_nodes: int = 12,
281281
file_sizes: tuple[ByteSize, ...] = (
282-
parse_obj_as(ByteSize, "7Mib"),
283-
parse_obj_as(ByteSize, "110Mib"),
284-
parse_obj_as(ByteSize, "1Mib"),
282+
TypeAdapter(ByteSize).validate_python("7Mib"),
283+
TypeAdapter(ByteSize).validate_python("110Mib"),
284+
TypeAdapter(ByteSize).validate_python("1Mib"),
285285
),
286286
file_checksums: tuple[SHA256Str, ...] = (
287-
parse_obj_as(
288-
SHA256Str,
289-
"311e2e130d83cfea9c3b7560699c221b0b7f9e5d58b02870bd52b695d8b4aabd",
287+
TypeAdapter(SHA256Str).validate_python(
288+
"311e2e130d83cfea9c3b7560699c221b0b7f9e5d58b02870bd52b695d8b4aabd"
290289
),
291-
parse_obj_as(
292-
SHA256Str,
293-
"08e297db979d3c84f6b072c2a1e269e8aa04e82714ca7b295933a0c9c0f62b2e",
290+
TypeAdapter(SHA256Str).validate_python(
291+
"08e297db979d3c84f6b072c2a1e269e8aa04e82714ca7b295933a0c9c0f62b2e"
294292
),
295-
parse_obj_as(
296-
SHA256Str,
297-
"488f3b57932803bbf644593bd46d95599b1d4da1d63bc020d7ebe6f1c255f7f3",
293+
TypeAdapter(SHA256Str).validate_python(
294+
"488f3b57932803bbf644593bd46d95599b1d4da1d63bc020d7ebe6f1c255f7f3"
298295
),
299296
),
300297
) -> tuple[

services/storage/tests/unit/test_dsm.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
from faker import Faker
1111
from models_library.projects_nodes_io import SimcoreS3FileID
1212
from models_library.users import UserID
13-
from pydantic import ByteSize, parse_obj_as
13+
from pydantic import ByteSize, TypeAdapter
1414
from servicelib.utils import limited_gather
1515
from simcore_service_storage.models import FileMetaData, S3BucketName
1616
from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager
@@ -31,7 +31,7 @@ async def dsm_mockup_complete_db(
3131
cleanup_user_projects_file_metadata: None,
3232
faker: Faker,
3333
) -> tuple[FileMetaData, FileMetaData]:
34-
file_size = parse_obj_as(ByteSize, "10Mib")
34+
file_size = TypeAdapter(ByteSize).validate_python("10Mib")
3535
uploaded_files = await limited_gather(
3636
*(upload_file(file_size, faker.file_name(), None) for _ in range(2)),
3737
limit=2,

0 commit comments

Comments
 (0)