Skip to content

Commit 042d093

Browse files
author
Andrei Neagu
committed
changed types
1 parent da15df2 commit 042d093

File tree

3 files changed

+10
-19
lines changed

3 files changed

+10
-19
lines changed

packages/models-library/src/models_library/api_schemas_webserver/storage.py

Lines changed: 3 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from pathlib import Path
2-
from typing import Annotated, Any
2+
from typing import Annotated
33

4-
from pydantic import BaseModel, BeforeValidator, Field
4+
from pydantic import BaseModel, Field
55

66
from ..api_schemas_storage.storage_schemas import (
77
DEFAULT_NUMBER_OF_PATHS_PER_PAGE,
@@ -37,17 +37,7 @@ class BatchDeletePathsBodyParams(InputSchema):
3737
paths: set[Path]
3838

3939

40-
def _ensure_valid_path(value: Any) -> str:
41-
try:
42-
Path(value)
43-
except Exception as e:
44-
msg = f"Provided {value=} is nto a valid path"
45-
raise ValueError(msg) from e
46-
47-
return value
48-
49-
50-
PathToExport = Annotated[str, BeforeValidator(_ensure_valid_path)]
40+
PathToExport = Path
5141

5242

5343
class DataExportPost(InputSchema):

services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
from aws_library.s3._models import S3ObjectKey
66
from celery import Task # type: ignore[import-untyped]
77
from models_library.api_schemas_storage.storage_schemas import FoldersBody
8+
from models_library.api_schemas_webserver.storage import PathToExport
89
from models_library.progress_bar import ProgressReport
910
from models_library.projects_nodes_io import StorageFileID
1011
from models_library.users import UserID
@@ -65,7 +66,7 @@ async def export_data(
6566
task_id: TaskID,
6667
*,
6768
user_id: UserID,
68-
paths_to_export: list[S3ObjectKey],
69+
paths_to_export: list[PathToExport],
6970
) -> StorageFileID:
7071
"""
7172
AccessRightError: in case user can't access project
@@ -80,8 +81,8 @@ async def export_data(
8081
)
8182
assert isinstance(dsm, SimcoreS3DataManager) # nosec
8283

83-
paths_to_export = [
84-
TypeAdapter(S3ObjectKey).validate_python(path_to_export)
84+
object_keys = [
85+
TypeAdapter(S3ObjectKey).validate_python(f"{path_to_export}")
8586
for path_to_export in paths_to_export
8687
]
8788

@@ -96,5 +97,5 @@ async def _progress_cb(report: ProgressReport) -> None:
9697
progress_report_cb=_progress_cb,
9798
) as progress_bar:
9899
return await dsm.create_s3_export(
99-
user_id, paths_to_export, progress_bar=progress_bar
100+
user_id, object_keys, progress_bar=progress_bar
100101
)

services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
1-
from aws_library.s3._models import S3ObjectKey
21
from fastapi import FastAPI
32
from models_library.api_schemas_rpc_async_jobs.async_jobs import (
43
AsyncJobGet,
54
AsyncJobNameData,
65
)
76
from models_library.api_schemas_storage.storage_schemas import FoldersBody
7+
from models_library.api_schemas_webserver.storage import PathToExport
88
from servicelib.rabbitmq import RPCRouter
99

1010
from ...modules.celery import get_celery_client
@@ -31,7 +31,7 @@ async def copy_folders_from_project(
3131

3232
@router.expose()
3333
async def start_export_data(
34-
app: FastAPI, job_id_data: AsyncJobNameData, paths_to_export: list[S3ObjectKey]
34+
app: FastAPI, job_id_data: AsyncJobNameData, paths_to_export: list[PathToExport]
3535
) -> AsyncJobGet:
3636
task_uuid = await get_celery_client(app).send_task(
3737
export_data.__name__,

0 commit comments

Comments
 (0)