diff --git a/api/specs/web-server/Makefile b/api/specs/web-server/Makefile index 4442c262d89..168d7c9ec78 100644 --- a/api/specs/web-server/Makefile +++ b/api/specs/web-server/Makefile @@ -16,3 +16,6 @@ install-dev install: _check_venv_active .PHONY: all all: _check_venv_active install python openapi.py + +.PHONY: openapi-specs +openapi-specs: all diff --git a/api/specs/web-server/_storage.py b/api/specs/web-server/_storage.py index 4f7b7ff11db..50e1eaeb5fd 100644 --- a/api/specs/web-server/_storage.py +++ b/api/specs/web-server/_storage.py @@ -5,11 +5,10 @@ from typing import TypeAlias +from uuid import UUID from fastapi import APIRouter, Query, status -from models_library.generics import Envelope -from models_library.projects_nodes_io import LocationID -from models_library.storage_schemas import ( +from models_library.api_schemas_storage.storage_schemas import ( FileLocation, FileMetaDataGet, FileUploadCompleteFutureResponse, @@ -19,6 +18,15 @@ LinkType, PresignedLink, ) +from models_library.api_schemas_webserver.storage import ( + DataExportPost, + StorageAsyncJobGet, + StorageAsyncJobResult, + StorageAsyncJobStatus, +) +from models_library.generics import Envelope +from models_library.projects_nodes_io import LocationID +from models_library.users import UserID from pydantic import AnyUrl, ByteSize from simcore_service_webserver._meta import API_VTAG from simcore_service_webserver.storage.schemas import DatasetMetaData, FileMetaData @@ -167,3 +175,49 @@ async def is_completed_upload_file( location_id: LocationID, file_id: StorageFileIDStr, future_id: str ): """Returns state of upload completion""" + + +# data export +@router.post( + "/storage/locations/{location_id}/export-data", + response_model=Envelope[StorageAsyncJobGet], + name="export_data", + description="Export data", +) +async def export_data(data_export: DataExportPost, location_id: LocationID): + """Trigger data export. Returns async job id for getting status and results""" + + +@router.get( + "/storage/async-jobs/{job_id}/status", + response_model=Envelope[StorageAsyncJobStatus], + name="get_async_job_status", +) +async def get_async_job_status(storage_async_job_get: StorageAsyncJobGet, job_id: UUID): + """Get async job status""" + + +@router.post( + "/storage/async-jobs/{job_id}:abort", + name="abort_async_job", +) +async def abort_async_job(storage_async_job_get: StorageAsyncJobGet, job_id: UUID): + """aborts execution of an async job""" + + +@router.get( + "/storage/async-jobs/{job_id}/result", + response_model=Envelope[StorageAsyncJobResult], + name="get_async_job_result", +) +async def get_async_job_result(storage_async_job_get: StorageAsyncJobGet, job_id: UUID): + """Get the result of the async job""" + + +@router.get( + "/storage/async-jobs", + response_model=Envelope[list[StorageAsyncJobGet]], + name="get_async_jobs", +) +async def get_async_jobs(user_id: UserID): + """Retrunsa list of async jobs for the user""" diff --git a/packages/aws-library/src/aws_library/s3/_client.py b/packages/aws-library/src/aws_library/s3/_client.py index ec504df7444..e62d55d2791 100644 --- a/packages/aws-library/src/aws_library/s3/_client.py +++ b/packages/aws-library/src/aws_library/s3/_client.py @@ -13,9 +13,13 @@ from boto3.s3.transfer import TransferConfig from botocore import exceptions as botocore_exc from botocore.client import Config +from models_library.api_schemas_storage.storage_schemas import ( + ETag, + S3BucketName, + UploadedPart, +) from models_library.basic_types import SHA256Str from models_library.bytes_iters import BytesIter, DataSize -from models_library.storage_schemas import ETag, S3BucketName, UploadedPart from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.bytes_iters import DEFAULT_READ_CHUNK_SIZE, BytesStreamer from servicelib.logging_utils import log_catch, log_context diff --git a/packages/aws-library/src/aws_library/s3/_models.py b/packages/aws-library/src/aws_library/s3/_models.py index 4e37d0dafc2..f02a4765fad 100644 --- a/packages/aws-library/src/aws_library/s3/_models.py +++ b/packages/aws-library/src/aws_library/s3/_models.py @@ -2,8 +2,8 @@ from dataclasses import dataclass from typing import TypeAlias +from models_library.api_schemas_storage.storage_schemas import ETag from models_library.basic_types import SHA256Str -from models_library.storage_schemas import ETag from pydantic import AnyUrl, BaseModel, ByteSize from types_aiobotocore_s3.type_defs import HeadObjectOutputTypeDef, ObjectTypeDef diff --git a/packages/aws-library/tests/test_s3_client.py b/packages/aws-library/tests/test_s3_client.py index cc5957d2ab6..cca7a19fd78 100644 --- a/packages/aws-library/tests/test_s3_client.py +++ b/packages/aws-library/tests/test_s3_client.py @@ -38,8 +38,11 @@ ) from aws_library.s3._models import MultiPartUploadLinks from faker import Faker +from models_library.api_schemas_storage.storage_schemas import ( + S3BucketName, + UploadedPart, +) from models_library.basic_types import SHA256Str -from models_library.storage_schemas import S3BucketName, UploadedPart from moto.server import ThreadedMotoServer from pydantic import AnyUrl, ByteSize, TypeAdapter from pytest_benchmark.plugin import BenchmarkFixture diff --git a/packages/models-library/src/models_library/api_schemas_rpc_data_export/__init__.py b/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/__init__.py similarity index 100% rename from packages/models-library/src/models_library/api_schemas_rpc_data_export/__init__.py rename to packages/models-library/src/models_library/api_schemas_rpc_async_jobs/__init__.py diff --git a/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/async_jobs.py b/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/async_jobs.py new file mode 100644 index 00000000000..560c4063250 --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/async_jobs.py @@ -0,0 +1,51 @@ +from datetime import datetime +from typing import Any, TypeAlias +from uuid import UUID + +from models_library.users import UserID +from pydantic import BaseModel, model_validator +from typing_extensions import Self + +from ..progress_bar import ProgressReport + +AsyncJobId: TypeAlias = UUID + + +class AsyncJobStatus(BaseModel): + job_id: AsyncJobId + progress: ProgressReport + done: bool + started: datetime + stopped: datetime | None + + @model_validator(mode="after") + def _check_consistency(self) -> Self: + is_done = self.done + is_stopped = self.stopped is not None + + if is_done != is_stopped: + msg = f"Inconsistent data: {self.done=}, {self.stopped=}" + raise ValueError(msg) + return self + + +class AsyncJobResult(BaseModel): + result: Any | None + error: Any | None + + +class AsyncJobGet(BaseModel): + job_id: AsyncJobId + job_name: str + + +class AsyncJobAbort(BaseModel): + result: bool + job_id: AsyncJobId + + +class AsyncJobAccessData(BaseModel): + """Data for controlling access to an async job""" + + user_id: UserID | None + product_name: str diff --git a/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/exceptions.py b/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/exceptions.py new file mode 100644 index 00000000000..5902bf31738 --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/exceptions.py @@ -0,0 +1,13 @@ +from common_library.errors_classes import OsparcErrorMixin + + +class BaseAsyncjobRpcError(OsparcErrorMixin, RuntimeError): + pass + + +class StatusError(BaseAsyncjobRpcError): + msg_template: str = "Could not get status of job {job_id}" + + +class ResultError(BaseAsyncjobRpcError): + msg_template: str = "Could not get results of job {job_id}" diff --git a/packages/models-library/src/models_library/api_schemas_rpc_data_export/tasks.py b/packages/models-library/src/models_library/api_schemas_rpc_data_export/tasks.py deleted file mode 100644 index 65787850d29..00000000000 --- a/packages/models-library/src/models_library/api_schemas_rpc_data_export/tasks.py +++ /dev/null @@ -1,36 +0,0 @@ -from datetime import datetime -from typing import Any, TypeAlias -from uuid import UUID - -from pydantic import BaseModel, Field, PositiveFloat, model_validator -from typing_extensions import Self - -TaskRpcId: TypeAlias = UUID - - -class TaskRpcStatus(BaseModel): - task_id: TaskRpcId - task_progress: PositiveFloat = Field(..., ge=0.0, le=1.0) - done: bool - started: datetime - stopped: datetime | None - - @model_validator(mode="after") - def _check_consistency(self) -> Self: - is_done = self.done - is_stopped = self.stopped is not None - - if is_done != is_stopped: - msg = f"Inconsistent data: {self.done=}, {self.stopped=}" - raise ValueError(msg) - return self - - -class TaskRpcResult(BaseModel): - result: Any | None - error: Any | None - - -class TaskRpcGet(BaseModel): - task_id: TaskRpcId - task_name: str diff --git a/packages/models-library/src/models_library/api_schemas_storage/data_export_async_jobs.py b/packages/models-library/src/models_library/api_schemas_storage/data_export_async_jobs.py new file mode 100644 index 00000000000..3645c918e99 --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_storage/data_export_async_jobs.py @@ -0,0 +1,33 @@ +# pylint: disable=R6301 +from pathlib import Path + +from common_library.errors_classes import OsparcErrorMixin +from models_library.projects_nodes_io import LocationID +from models_library.users import UserID +from pydantic import BaseModel, Field + + +class DataExportTaskStartInput(BaseModel): + user_id: UserID + product_name: str + location_id: LocationID + paths: list[Path] = Field(..., min_length=1) + + +### Exceptions + + +class StorageRpcError(OsparcErrorMixin, RuntimeError): + pass + + +class InvalidFileIdentifierError(StorageRpcError): + msg_template: str = "Could not find the file {file_id}" + + +class AccessRightError(StorageRpcError): + msg_template: str = "User {user_id} does not have access to file {file_id}" + + +class DataExportError(StorageRpcError): + msg_template: str = "Could not complete data export job with id {job_id}" diff --git a/packages/models-library/src/models_library/api_schemas_storage/data_export_tasks.py b/packages/models-library/src/models_library/api_schemas_storage/data_export_tasks.py deleted file mode 100644 index 242ea2eb42e..00000000000 --- a/packages/models-library/src/models_library/api_schemas_storage/data_export_tasks.py +++ /dev/null @@ -1,14 +0,0 @@ -# pylint: disable=R6301 -from pathlib import Path - -from models_library.api_schemas_rpc_data_export.tasks import TaskRpcId -from pydantic import BaseModel, Field - - -class DataExportTaskStartInput(BaseModel): - paths: list[Path] = Field(..., min_length=1) - - -class DataExportTaskAbortOutput(BaseModel): - result: bool - task_id: TaskRpcId diff --git a/packages/models-library/src/models_library/storage_schemas.py b/packages/models-library/src/models_library/api_schemas_storage/storage_schemas.py similarity index 98% rename from packages/models-library/src/models_library/storage_schemas.py rename to packages/models-library/src/models_library/api_schemas_storage/storage_schemas.py index 2a9ffc6acc3..2faa2462cfd 100644 --- a/packages/models-library/src/models_library/storage_schemas.py +++ b/packages/models-library/src/models_library/api_schemas_storage/storage_schemas.py @@ -8,6 +8,8 @@ from datetime import datetime from enum import Enum + +# /data-export from typing import Annotated, Any, Literal, Self, TypeAlias from uuid import UUID @@ -26,10 +28,10 @@ ) from pydantic.networks import AnyUrl -from .basic_regex import DATCORE_DATASET_NAME_RE, S3_BUCKET_NAME_RE -from .basic_types import SHA256Str -from .generics import ListModel -from .projects_nodes_io import ( +from ..basic_regex import DATCORE_DATASET_NAME_RE, S3_BUCKET_NAME_RE +from ..basic_types import SHA256Str +from ..generics import ListModel +from ..projects_nodes_io import ( LocationID, LocationName, NodeID, diff --git a/packages/models-library/src/models_library/api_schemas_webserver/storage.py b/packages/models-library/src/models_library/api_schemas_webserver/storage.py new file mode 100644 index 00000000000..10808b69049 --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_webserver/storage.py @@ -0,0 +1,70 @@ +from datetime import datetime +from pathlib import Path +from typing import Any + +from ..api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobGet, + AsyncJobId, + AsyncJobResult, + AsyncJobStatus, +) +from ..api_schemas_storage.data_export_async_jobs import DataExportTaskStartInput +from ..progress_bar import ProgressReport +from ..projects_nodes_io import LocationID +from ..users import UserID +from ._base import InputSchema, OutputSchema + + +class DataExportPost(InputSchema): + paths: list[Path] + + def to_rpc_schema( + self, user_id: UserID, product_name: str, location_id: LocationID + ) -> DataExportTaskStartInput: + return DataExportTaskStartInput( + paths=self.paths, + user_id=user_id, + product_name=product_name, + location_id=location_id, + ) + + +class StorageAsyncJobGet(OutputSchema): + job_id: AsyncJobId + + @classmethod + def from_rpc_schema(cls, async_job_rpc_get: AsyncJobGet) -> "StorageAsyncJobGet": + return StorageAsyncJobGet(job_id=async_job_rpc_get.job_id) + + +class StorageAsyncJobStatus(OutputSchema): + job_id: AsyncJobId + progress: ProgressReport + done: bool + started: datetime + stopped: datetime | None + + @classmethod + def from_rpc_schema( + cls, async_job_rpc_status: AsyncJobStatus + ) -> "StorageAsyncJobStatus": + return StorageAsyncJobStatus( + job_id=async_job_rpc_status.job_id, + progress=async_job_rpc_status.progress, + done=async_job_rpc_status.done, + started=async_job_rpc_status.started, + stopped=async_job_rpc_status.stopped, + ) + + +class StorageAsyncJobResult(OutputSchema): + result: Any | None + error: Any | None + + @classmethod + def from_rpc_schema( + cls, async_job_rpc_result: AsyncJobResult + ) -> "StorageAsyncJobResult": + return StorageAsyncJobResult( + result=async_job_rpc_result.result, error=async_job_rpc_result.error + ) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py index 0a51779192b..61d630d994c 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py @@ -8,7 +8,11 @@ import orjson from aws_library.s3 import MultiPartUploadLinks from fastapi import status -from models_library.storage_schemas import ETag, FileUploadSchema, UploadedPart +from models_library.api_schemas_storage.storage_schemas import ( + ETag, + FileUploadSchema, + UploadedPart, +) from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.utils import limited_as_completed, logged_gather from types_aiobotocore_s3 import S3Client diff --git a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py index c281df43595..da657de6917 100644 --- a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py +++ b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py @@ -12,10 +12,7 @@ from aioresponses.core import CallbackResult from faker import Faker from models_library.api_schemas_directorv2.comp_tasks import ComputationGet -from models_library.generics import Envelope -from models_library.projects_pipeline import ComputationTask -from models_library.projects_state import RunningState -from models_library.storage_schemas import ( +from models_library.api_schemas_storage.storage_schemas import ( FileMetaDataGet, FileUploadCompleteFutureResponse, FileUploadCompleteResponse, @@ -25,6 +22,9 @@ LinkType, PresignedLink, ) +from models_library.generics import Envelope +from models_library.projects_pipeline import ComputationTask +from models_library.projects_state import RunningState from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.aiohttp import status diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/__init__.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/async_jobs/__init__.py similarity index 100% rename from packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/__init__.py rename to packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/async_jobs/__init__.py diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/async_jobs/async_jobs.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/async_jobs/async_jobs.py new file mode 100644 index 00000000000..8daa9f674c4 --- /dev/null +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/async_jobs/async_jobs.py @@ -0,0 +1,101 @@ +from typing import Final + +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobAbort, + AsyncJobAccessData, + AsyncJobGet, + AsyncJobId, + AsyncJobResult, + AsyncJobStatus, +) +from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace +from pydantic import NonNegativeInt, TypeAdapter + +from ... import RabbitMQRPCClient + +_DEFAULT_TIMEOUT_S: Final[NonNegativeInt] = 30 + +_RPC_METHOD_NAME_ADAPTER = TypeAdapter(RPCMethodName) + + +async def abort( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + rpc_namespace: RPCNamespace, + job_id: AsyncJobId, + access_data: AsyncJobAccessData | None +) -> AsyncJobAbort: + result = await rabbitmq_rpc_client.request( + rpc_namespace, + _RPC_METHOD_NAME_ADAPTER.validate_python("abort"), + job_id=job_id, + access_data=access_data, + timeout_s=_DEFAULT_TIMEOUT_S, + ) + assert isinstance(result, AsyncJobAbort) + return result + + +async def get_status( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + rpc_namespace: RPCNamespace, + job_id: AsyncJobId, + access_data: AsyncJobAccessData | None +) -> AsyncJobStatus: + result = await rabbitmq_rpc_client.request( + rpc_namespace, + _RPC_METHOD_NAME_ADAPTER.validate_python("get_status"), + job_id=job_id, + access_data=access_data, + timeout_s=_DEFAULT_TIMEOUT_S, + ) + assert isinstance(result, AsyncJobStatus) + return result + + +async def get_result( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + rpc_namespace: RPCNamespace, + job_id: AsyncJobId, + access_data: AsyncJobAccessData | None +) -> AsyncJobResult: + result = await rabbitmq_rpc_client.request( + rpc_namespace, + _RPC_METHOD_NAME_ADAPTER.validate_python("get_result"), + job_id=job_id, + access_data=access_data, + timeout_s=_DEFAULT_TIMEOUT_S, + ) + assert isinstance(result, AsyncJobResult) + return result + + +async def list_jobs( + rabbitmq_rpc_client: RabbitMQRPCClient, *, rpc_namespace: RPCNamespace, filter_: str +) -> list[AsyncJobGet]: + result: list[AsyncJobGet] = await rabbitmq_rpc_client.request( + rpc_namespace, + _RPC_METHOD_NAME_ADAPTER.validate_python("list_jobs"), + filter_=filter_, + timeout_s=_DEFAULT_TIMEOUT_S, + ) + return result + + +async def submit_job( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + rpc_namespace: RPCNamespace, + job_name: str, + **kwargs +) -> AsyncJobGet: + result = await rabbitmq_rpc_client.request( + rpc_namespace, + _RPC_METHOD_NAME_ADAPTER.validate_python(job_name), + **kwargs, + timeout_s=_DEFAULT_TIMEOUT_S, + ) + assert isinstance(result, AsyncJobGet) + return result diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/data_export.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/data_export.py deleted file mode 100644 index 8e71ae1ec68..00000000000 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/data_export.py +++ /dev/null @@ -1,73 +0,0 @@ -from typing import Final - -from models_library.api_schemas_rpc_data_export.tasks import ( - TaskRpcGet, - TaskRpcId, - TaskRpcResult, - TaskRpcStatus, -) -from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE -from models_library.api_schemas_storage.data_export_tasks import ( - DataExportTaskAbortOutput, - DataExportTaskStartInput, -) -from models_library.rabbitmq_basic_types import RPCMethodName -from pydantic import NonNegativeInt, TypeAdapter - -from ... import RabbitMQRPCClient - -_DEFAULT_TIMEOUT_S: Final[NonNegativeInt] = 30 - -_RPC_METHOD_NAME_ADAPTER = TypeAdapter(RPCMethodName) - - -async def start_data_export( - rabbitmq_rpc_client: RabbitMQRPCClient, *, paths: DataExportTaskStartInput -) -> TaskRpcGet: - result = await rabbitmq_rpc_client.request( - STORAGE_RPC_NAMESPACE, - _RPC_METHOD_NAME_ADAPTER.validate_python("start_data_export"), - paths=paths, - timeout_s=_DEFAULT_TIMEOUT_S, - ) - assert isinstance(result, TaskRpcGet) - return result - - -async def abort_data_export( - rabbitmq_rpc_client: RabbitMQRPCClient, *, task_id: TaskRpcId -) -> DataExportTaskAbortOutput: - result = await rabbitmq_rpc_client.request( - STORAGE_RPC_NAMESPACE, - _RPC_METHOD_NAME_ADAPTER.validate_python("abort_data_export"), - task_id=task_id, - timeout_s=_DEFAULT_TIMEOUT_S, - ) - assert isinstance(result, DataExportTaskAbortOutput) - return result - - -async def get_data_export_status( - rabbitmq_rpc_client: RabbitMQRPCClient, *, task_id: TaskRpcId -) -> TaskRpcStatus: - result = await rabbitmq_rpc_client.request( - STORAGE_RPC_NAMESPACE, - _RPC_METHOD_NAME_ADAPTER.validate_python("get_data_export_status"), - task_id=task_id, - timeout_s=_DEFAULT_TIMEOUT_S, - ) - assert isinstance(result, TaskRpcStatus) - return result - - -async def get_data_export_result( - rabbitmq_rpc_client: RabbitMQRPCClient, *, task_id: TaskRpcId -) -> TaskRpcResult: - result = await rabbitmq_rpc_client.request( - STORAGE_RPC_NAMESPACE, - _RPC_METHOD_NAME_ADAPTER.validate_python("get_data_export_result"), - task_id=task_id, - timeout_s=_DEFAULT_TIMEOUT_S, - ) - assert isinstance(result, TaskRpcResult) - return result diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager_utils.py index 9042568652f..08bd0a8cf97 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager_utils.py @@ -2,9 +2,7 @@ from typing import cast from aiohttp import ClientError, ClientSession -from models_library.generics import Envelope -from models_library.projects_nodes_io import LocationID, LocationName -from models_library.storage_schemas import ( +from models_library.api_schemas_storage.storage_schemas import ( ETag, FileUploadCompleteFutureResponse, FileUploadCompleteResponse, @@ -12,6 +10,8 @@ FileUploadCompletionBody, UploadedPart, ) +from models_library.generics import Envelope +from models_library.projects_nodes_io import LocationID, LocationName from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import AnyUrl, TypeAdapter diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py index 5e1d6461b65..7f6801043cd 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py @@ -17,8 +17,12 @@ ClientSession, RequestInfo, ) +from models_library.api_schemas_storage.storage_schemas import ( + ETag, + FileUploadSchema, + UploadedPart, +) from models_library.basic_types import SHA256Str -from models_library.storage_schemas import ETag, FileUploadSchema, UploadedPart from multidict import MultiMapping from pydantic import AnyUrl, NonNegativeInt from servicelib.aiohttp import status diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py index ca0a54875ea..46b8444fde9 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py @@ -5,15 +5,15 @@ import aiofiles from aiohttp import ClientSession -from models_library.basic_types import SHA256Str -from models_library.projects_nodes_io import LocationID, LocationName, StorageFileID -from models_library.storage_schemas import ( +from models_library.api_schemas_storage.storage_schemas import ( ETag, FileMetaDataGet, FileUploadSchema, LinkType, UploadedPart, ) +from models_library.basic_types import SHA256Str +from models_library.projects_nodes_io import LocationID, LocationName, StorageFileID from models_library.users import UserID from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.file_utils import create_sha256_checksum diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py index a97905b7ec1..71d80febbc0 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py @@ -10,16 +10,16 @@ from aiohttp import ClientResponse, ClientSession from aiohttp import client as aiohttp_client_module from aiohttp.client_exceptions import ClientConnectionError, ClientResponseError -from models_library.basic_types import SHA256Str -from models_library.generics import Envelope -from models_library.projects_nodes_io import LocationID, StorageFileID -from models_library.storage_schemas import ( +from models_library.api_schemas_storage.storage_schemas import ( FileLocationArray, FileMetaDataGet, FileUploadSchema, LinkType, PresignedLink, ) +from models_library.basic_types import SHA256Str +from models_library.generics import Envelope +from models_library.projects_nodes_io import LocationID, StorageFileID from models_library.users import UserID from pydantic import ByteSize from pydantic.networks import AnyUrl diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/__init__.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/__init__.py index 5c107efdf62..8874f98efe7 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/__init__.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/__init__.py @@ -1,8 +1,8 @@ import logging +from models_library.api_schemas_storage.storage_schemas import LinkType as FileLinkType from models_library.projects import ProjectIDStr from models_library.projects_nodes_io import NodeIDStr -from models_library.storage_schemas import LinkType as FileLinkType from models_library.users import UserID from settings_library.aws_s3_cli import AwsS3CliSettings from settings_library.r_clone import RCloneSettings diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py index fa7489ac36f..59c73716ca3 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py @@ -6,10 +6,10 @@ from pathlib import Path from typing import Any +from models_library.api_schemas_storage.storage_schemas import LinkType from models_library.projects import ProjectIDStr from models_library.projects_nodes_io import NodeIDStr from models_library.services_types import ServicePortKey -from models_library.storage_schemas import LinkType from models_library.users import UserID from pydantic import BaseModel, ConfigDict, Field, ValidationError from pydantic_core import InitErrorDetails diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py index ef21f8c653a..014aff56529 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py @@ -6,9 +6,9 @@ from pprint import pformat from typing import Any +from models_library.api_schemas_storage.storage_schemas import LinkType from models_library.services_io import BaseServiceIOModel from models_library.services_types import ServicePortKey -from models_library.storage_schemas import LinkType from pydantic import ( AnyUrl, ConfigDict, diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py index 20fd414bc05..41f317b6f44 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py @@ -4,9 +4,12 @@ from pathlib import Path from typing import Any +from models_library.api_schemas_storage.storage_schemas import ( + FileUploadSchema, + LinkType, +) from models_library.basic_types import SHA256Str from models_library.services_types import FileName, ServicePortKey -from models_library.storage_schemas import FileUploadSchema, LinkType from models_library.users import UserID from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.progress_bar import ProgressBarData diff --git a/packages/simcore-sdk/tests/integration/conftest.py b/packages/simcore-sdk/tests/integration/conftest.py index f8dc1e5029c..b3aba7d8d35 100644 --- a/packages/simcore-sdk/tests/integration/conftest.py +++ b/packages/simcore-sdk/tests/integration/conftest.py @@ -13,9 +13,9 @@ import pytest import sqlalchemy as sa from aiohttp import ClientSession +from models_library.api_schemas_storage.storage_schemas import FileUploadSchema from models_library.generics import Envelope from models_library.projects_nodes_io import LocationID, NodeIDStr, SimcoreS3FileID -from models_library.storage_schemas import FileUploadSchema from models_library.users import UserID from pydantic import TypeAdapter from pytest_simcore.helpers.faker_factories import random_project, random_user diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py b/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py index f353494e01d..70ad8adbbc7 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py @@ -13,7 +13,7 @@ from aiohttp import ClientResponse, ClientSession, TCPConnector from aioresponses import aioresponses from faker import Faker -from models_library.storage_schemas import ( +from models_library.api_schemas_storage.storage_schemas import ( FileUploadLinks, FileUploadSchema, UploadedPart, diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py index 50476a7b15f..516c828266f 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py @@ -21,8 +21,8 @@ from aiohttp.client import ClientSession from aioresponses import aioresponses as AioResponsesMock from faker import Faker +from models_library.api_schemas_storage.storage_schemas import FileMetaDataGet from models_library.projects_nodes_io import LocationID -from models_library.storage_schemas import FileMetaDataGet from pydantic import TypeAdapter, ValidationError from pytest_mock.plugin import MockerFixture from servicelib.progress_bar import ProgressBarData diff --git a/packages/simcore-sdk/tests/unit/test_storage_client.py b/packages/simcore-sdk/tests/unit/test_storage_client.py index c094b7a04b9..b02c8b2244b 100644 --- a/packages/simcore-sdk/tests/unit/test_storage_client.py +++ b/packages/simcore-sdk/tests/unit/test_storage_client.py @@ -12,13 +12,13 @@ import pytest from aioresponses import aioresponses as AioResponsesMock from faker import Faker -from models_library.projects_nodes_io import SimcoreS3FileID -from models_library.storage_schemas import ( +from models_library.api_schemas_storage.storage_schemas import ( FileLocationArray, FileMetaDataGet, FileUploadSchema, LocationID, ) +from models_library.projects_nodes_io import SimcoreS3FileID from models_library.users import UserID from pydantic import AnyUrl, ByteSize, TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict diff --git a/services/api-server/src/simcore_service_api_server/api/routes/files.py b/services/api-server/src/simcore_service_api_server/api/routes/files.py index 4b15c92e5c2..0821d81abab 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/files.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/files.py @@ -10,8 +10,12 @@ from fastapi import Header, Request, UploadFile, status from fastapi.exceptions import HTTPException from fastapi_pagination.api import create_page +from models_library.api_schemas_storage.storage_schemas import ( + ETag, + FileUploadCompletionBody, + LinkType, +) from models_library.basic_types import SHA256Str -from models_library.storage_schemas import ETag, FileUploadCompletionBody, LinkType from pydantic import AnyUrl, ByteSize, PositiveInt, TypeAdapter, ValidationError from servicelib.fastapi.requests_decorators import cancel_on_disconnect from simcore_sdk.node_ports_common.constants import SIMCORE_LOCATION diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/files.py b/services/api-server/src/simcore_service_api_server/models/schemas/files.py index 4aae188c1a0..29cc9aacf0a 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/files.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/files.py @@ -7,9 +7,9 @@ import aiofiles from fastapi import UploadFile +from models_library.api_schemas_storage.storage_schemas import ETag from models_library.basic_types import SHA256Str from models_library.projects_nodes_io import StorageFileID -from models_library.storage_schemas import ETag from pydantic import ( AnyHttpUrl, BaseModel, diff --git a/services/api-server/src/simcore_service_api_server/services_http/storage.py b/services/api-server/src/simcore_service_api_server/services_http/storage.py index 4abcef973f4..52d3c8e8ddb 100644 --- a/services/api-server/src/simcore_service_api_server/services_http/storage.py +++ b/services/api-server/src/simcore_service_api_server/services_http/storage.py @@ -8,11 +8,16 @@ from fastapi import FastAPI from fastapi.encoders import jsonable_encoder +from models_library.api_schemas_storage.storage_schemas import FileMetaDataArray +from models_library.api_schemas_storage.storage_schemas import ( + FileMetaDataGet as StorageFileMetaData, +) +from models_library.api_schemas_storage.storage_schemas import ( + FileUploadSchema, + PresignedLink, +) from models_library.basic_types import SHA256Str from models_library.generics import Envelope -from models_library.storage_schemas import FileMetaDataArray -from models_library.storage_schemas import FileMetaDataGet as StorageFileMetaData -from models_library.storage_schemas import FileUploadSchema, PresignedLink from pydantic import AnyUrl, PositiveInt from settings_library.tracing import TracingSettings from starlette.datastructures import URL diff --git a/services/api-server/tests/unit/conftest.py b/services/api-server/tests/unit/conftest.py index a36a4f7513a..83db84a5d2c 100644 --- a/services/api-server/tests/unit/conftest.py +++ b/services/api-server/tests/unit/conftest.py @@ -27,12 +27,12 @@ TaskProgress, TaskStatus, ) +from models_library.api_schemas_storage.storage_schemas import HealthCheck from models_library.api_schemas_webserver.projects import ProjectGet from models_library.app_diagnostics import AppStatusCheck from models_library.generics import Envelope from models_library.projects import ProjectID from models_library.projects_nodes_io import BaseFileLink, SimcoreS3FileID -from models_library.storage_schemas import HealthCheck from models_library.users import UserID from moto.server import ThreadedMotoServer from packaging.version import Version diff --git a/services/api-server/tests/unit/test_api_files.py b/services/api-server/tests/unit/test_api_files.py index ed8666e693c..323332f1f5b 100644 --- a/services/api-server/tests/unit/test_api_files.py +++ b/services/api-server/tests/unit/test_api_files.py @@ -18,8 +18,12 @@ from fastapi import status from fastapi.encoders import jsonable_encoder from httpx import AsyncClient +from models_library.api_schemas_storage.storage_schemas import ( + ETag, + FileUploadCompletionBody, + UploadedPart, +) from models_library.basic_types import SHA256Str -from models_library.storage_schemas import ETag, FileUploadCompletionBody, UploadedPart from pydantic import TypeAdapter from pytest_simcore.helpers.httpx_calls_capture_models import ( CreateRespxMockCallback, diff --git a/services/api-server/tests/unit/test_models_schemas_files.py b/services/api-server/tests/unit/test_models_schemas_files.py index 1ced425d5f9..3a57327e324 100644 --- a/services/api-server/tests/unit/test_models_schemas_files.py +++ b/services/api-server/tests/unit/test_models_schemas_files.py @@ -11,9 +11,11 @@ import pytest from fastapi import UploadFile +from models_library.api_schemas_storage.storage_schemas import ( + FileMetaDataGet as StorageFileMetaData, +) from models_library.basic_types import SHA256Str from models_library.projects_nodes_io import StorageFileID -from models_library.storage_schemas import FileMetaDataGet as StorageFileMetaData from pydantic import TypeAdapter, ValidationError from simcore_service_api_server.models.schemas.files import File from simcore_service_api_server.services_http.storage import to_file_api_model diff --git a/services/director-v2/tests/unit/with_dbs/test_utils_dask.py b/services/director-v2/tests/unit/with_dbs/test_utils_dask.py index 147b857213d..7c991d3390d 100644 --- a/services/director-v2/tests/unit/with_dbs/test_utils_dask.py +++ b/services/director-v2/tests/unit/with_dbs/test_utils_dask.py @@ -30,11 +30,14 @@ from faker import Faker from fastapi import FastAPI from models_library.api_schemas_directorv2.services import NodeRequirements +from models_library.api_schemas_storage.storage_schemas import ( + FileUploadLinks, + FileUploadSchema, +) from models_library.docker import to_simcore_runtime_docker_label_key from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimCoreFileLink, SimcoreS3FileID from models_library.services import ServiceRunID -from models_library.storage_schemas import FileUploadLinks, FileUploadSchema from models_library.users import UserID from pydantic import ByteSize, TypeAdapter from pydantic.networks import AnyUrl diff --git a/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py b/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py index 7302e23f209..e205946c90d 100644 --- a/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py +++ b/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py @@ -19,9 +19,9 @@ from botocore.client import Config from botocore.exceptions import ClientError from fastapi import FastAPI +from models_library.api_schemas_storage.storage_schemas import S3BucketName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimcoreS3FileID -from models_library.storage_schemas import S3BucketName from models_library.users import UserID from pydantic import TypeAdapter from pytest_mock import MockerFixture diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/service_runs_db.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/service_runs_db.py index 62feebd0e0c..ed19570b523 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/service_runs_db.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/service_runs_db.py @@ -9,6 +9,7 @@ from models_library.api_schemas_resource_usage_tracker.credit_transactions import ( WalletTotalCredits, ) +from models_library.api_schemas_storage.storage_schemas import S3BucketName from models_library.products import ProductName from models_library.projects import ProjectID from models_library.resource_tracker import ( @@ -18,7 +19,6 @@ ) from models_library.rest_ordering import OrderBy, OrderDirection from models_library.services_types import ServiceRunID -from models_library.storage_schemas import S3BucketName from models_library.users import UserID from models_library.wallets import WalletID from pydantic import PositiveInt diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/s3.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/s3.py index 1cdd7d07673..b83ce3f49db 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/s3.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/s3.py @@ -3,7 +3,7 @@ from aws_library.s3 import S3NotConnectedError, SimcoreS3API from fastapi import FastAPI -from models_library.storage_schemas import S3BucketName +from models_library.api_schemas_storage.storage_schemas import S3BucketName from pydantic import TypeAdapter from servicelib.logging_utils import log_context from settings_library.s3 import S3Settings diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/service_runs.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/service_runs.py index 6d1718e8531..9a9a1398712 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/service_runs.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/service_runs.py @@ -12,6 +12,7 @@ ServiceRunGet, ServiceRunPage, ) +from models_library.api_schemas_storage.storage_schemas import S3BucketName from models_library.products import ProductName from models_library.projects import ProjectID from models_library.resource_tracker import ( @@ -21,7 +22,6 @@ ServicesAggregatedUsagesType, ) from models_library.rest_ordering import OrderBy -from models_library.storage_schemas import S3BucketName from models_library.users import UserID from models_library.wallets import WalletID from pydantic import AnyUrl, TypeAdapter diff --git a/services/storage/src/simcore_service_storage/api/rest/_datasets.py b/services/storage/src/simcore_service_storage/api/rest/_datasets.py index a71e4a3fa46..76e6e185068 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_datasets.py +++ b/services/storage/src/simcore_service_storage/api/rest/_datasets.py @@ -2,9 +2,12 @@ from typing import Annotated from fastapi import APIRouter, Depends, Request +from models_library.api_schemas_storage.storage_schemas import ( + DatasetMetaDataGet, + FileMetaDataGet, +) from models_library.generics import Envelope from models_library.projects_nodes_io import LocationID -from models_library.storage_schemas import DatasetMetaDataGet, FileMetaDataGet from ...dsm import get_dsm_provider from ...models import FilesMetadataDatasetQueryParams, StorageQueryParamsBase diff --git a/services/storage/src/simcore_service_storage/api/rest/_files.py b/services/storage/src/simcore_service_storage/api/rest/_files.py index 2e1f3dcea7a..3b1bb4c8a46 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_files.py +++ b/services/storage/src/simcore_service_storage/api/rest/_files.py @@ -3,9 +3,7 @@ from typing import Annotated, cast from fastapi import APIRouter, Depends, Header, HTTPException, Request -from models_library.generics import Envelope -from models_library.projects_nodes_io import LocationID, StorageFileID -from models_library.storage_schemas import ( +from models_library.api_schemas_storage.storage_schemas import ( FileMetaDataGet, FileMetaDataGetv010, FileUploadCompleteFutureResponse, @@ -17,6 +15,8 @@ FileUploadSchema, SoftCopyBody, ) +from models_library.generics import Envelope +from models_library.projects_nodes_io import LocationID, StorageFileID from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.aiohttp import status from yarl import URL diff --git a/services/storage/src/simcore_service_storage/api/rest/_health.py b/services/storage/src/simcore_service_storage/api/rest/_health.py index 2535478f735..7272066ee75 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_health.py +++ b/services/storage/src/simcore_service_storage/api/rest/_health.py @@ -8,9 +8,9 @@ from aws_library.s3 import S3AccessError from fastapi import APIRouter, Request +from models_library.api_schemas_storage.storage_schemas import HealthCheck, S3BucketName from models_library.app_diagnostics import AppStatusCheck from models_library.generics import Envelope -from models_library.storage_schemas import HealthCheck, S3BucketName from pydantic import TypeAdapter from servicelib.db_asyncpg_utils import check_postgres_liveness from servicelib.fastapi.db_asyncpg_engine import get_engine diff --git a/services/storage/src/simcore_service_storage/api/rest/_locations.py b/services/storage/src/simcore_service_storage/api/rest/_locations.py index 2110e322339..133c65e2005 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_locations.py +++ b/services/storage/src/simcore_service_storage/api/rest/_locations.py @@ -2,8 +2,8 @@ from typing import Annotated from fastapi import APIRouter, Depends, Request, status +from models_library.api_schemas_storage.storage_schemas import FileLocation from models_library.generics import Envelope -from models_library.storage_schemas import FileLocation # Exclusive for simcore-s3 storage ----------------------- from ...dsm import get_dsm_provider diff --git a/services/storage/src/simcore_service_storage/api/rest/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/rest/_simcore_s3.py index 650a8c3c933..29b199e6feb 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/api/rest/_simcore_s3.py @@ -5,9 +5,12 @@ from fastapi import APIRouter, Depends, FastAPI, Request from models_library.api_schemas_long_running_tasks.base import TaskProgress from models_library.api_schemas_long_running_tasks.tasks import TaskGet +from models_library.api_schemas_storage.storage_schemas import ( + FileMetaDataGet, + FoldersBody, +) from models_library.generics import Envelope from models_library.projects import ProjectID -from models_library.storage_schemas import FileMetaDataGet, FoldersBody from servicelib.aiohttp import status from servicelib.fastapi.long_running_tasks._dependencies import get_tasks_manager from servicelib.logging_utils import log_context diff --git a/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py b/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py new file mode 100644 index 00000000000..c9e9699942a --- /dev/null +++ b/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py @@ -0,0 +1,59 @@ +# pylint: disable=unused-argument +from datetime import datetime +from uuid import uuid4 + +from fastapi import FastAPI +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobAbort, + AsyncJobAccessData, + AsyncJobGet, + AsyncJobId, + AsyncJobResult, + AsyncJobStatus, +) +from models_library.api_schemas_rpc_async_jobs.exceptions import ( + ResultError, + StatusError, +) +from models_library.progress_bar import ProgressReport +from servicelib.rabbitmq import RPCRouter + +router = RPCRouter() + + +@router.expose() +async def abort( + app: FastAPI, job_id: AsyncJobId, access_data: AsyncJobAccessData | None +) -> AsyncJobAbort: + assert app # nosec + return AsyncJobAbort(result=True, job_id=job_id) + + +@router.expose(reraise_if_error_type=(StatusError,)) +async def get_status( + app: FastAPI, job_id: AsyncJobId, access_data: AsyncJobAccessData | None +) -> AsyncJobStatus: + assert app # nosec + progress_report = ProgressReport(actual_value=0.5, total=1.0, attempt=1) + return AsyncJobStatus( + job_id=job_id, + progress=progress_report, + done=False, + started=datetime.now(), + stopped=None, + ) + + +@router.expose(reraise_if_error_type=(ResultError,)) +async def get_result( + app: FastAPI, job_id: AsyncJobId, access_data: AsyncJobAccessData | None +) -> AsyncJobResult: + assert app # nosec + assert job_id # nosec + return AsyncJobResult(result="Here's your result.", error=None) + + +@router.expose() +async def list_jobs(app: FastAPI, filter_: str) -> list[AsyncJobGet]: + assert app # nosec + return [AsyncJobGet(job_id=AsyncJobId(f"{uuid4()}"), job_name="myjob")] diff --git a/services/storage/src/simcore_service_storage/api/rpc/_data_export.py b/services/storage/src/simcore_service_storage/api/rpc/_data_export.py index 08520687803..644daf24586 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_data_export.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_data_export.py @@ -1,55 +1,30 @@ -from datetime import datetime from uuid import uuid4 from fastapi import FastAPI -from models_library.api_schemas_rpc_data_export.tasks import ( - TaskRpcGet, - TaskRpcId, - TaskRpcResult, - TaskRpcStatus, -) -from models_library.api_schemas_storage.data_export_tasks import ( - DataExportTaskAbortOutput, +from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobGet, AsyncJobId +from models_library.api_schemas_storage.data_export_async_jobs import ( + AccessRightError, + DataExportError, DataExportTaskStartInput, + InvalidFileIdentifierError, ) from servicelib.rabbitmq import RPCRouter router = RPCRouter() -@router.expose() +@router.expose( + reraise_if_error_type=( + InvalidFileIdentifierError, + AccessRightError, + DataExportError, + ) +) async def start_data_export( app: FastAPI, paths: DataExportTaskStartInput -) -> TaskRpcGet: +) -> AsyncJobGet: assert app # nosec - return TaskRpcGet( - task_id=uuid4(), - task_name=", ".join(str(p) for p in paths.paths), + return AsyncJobGet( + job_id=AsyncJobId(f"{uuid4()}"), + job_name=", ".join(str(p) for p in paths.paths), ) - - -@router.expose() -async def abort_data_export( - app: FastAPI, task_id: TaskRpcId -) -> DataExportTaskAbortOutput: - assert app # nosec - return DataExportTaskAbortOutput(result=True, task_id=task_id) - - -@router.expose() -async def get_data_export_status(app: FastAPI, task_id: TaskRpcId) -> TaskRpcStatus: - assert app # nosec - return TaskRpcStatus( - task_id=task_id, - task_progress=0.5, - done=False, - started=datetime.now(), - stopped=None, - ) - - -@router.expose() -async def get_data_export_result(app: FastAPI, task_id: TaskRpcId) -> TaskRpcResult: - assert app # nosec - assert task_id # nosec - return TaskRpcResult(result="Here's your result.", error=None) diff --git a/services/storage/src/simcore_service_storage/api/rpc/routes.py b/services/storage/src/simcore_service_storage/api/rpc/routes.py index 4a39c8d9288..812ce296adf 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/routes.py +++ b/services/storage/src/simcore_service_storage/api/rpc/routes.py @@ -6,12 +6,12 @@ from servicelib.rabbitmq import RPCRouter from ...modules.rabbitmq import get_rabbitmq_rpc_server -from . import _data_export +from . import _async_jobs, _data_export _logger = logging.getLogger(__name__) -ROUTERS: list[RPCRouter] = [_data_export.router] +ROUTERS: list[RPCRouter] = [_data_export.router, _async_jobs.router] def setup_rpc_api_routes(app: FastAPI) -> None: diff --git a/services/storage/src/simcore_service_storage/constants.py b/services/storage/src/simcore_service_storage/constants.py index ea3aa84d967..fecbfb54e87 100644 --- a/services/storage/src/simcore_service_storage/constants.py +++ b/services/storage/src/simcore_service_storage/constants.py @@ -1,7 +1,7 @@ from typing import Final from aws_library.s3 import PRESIGNED_LINK_MAX_SIZE, S3_MAX_FILE_SIZE -from models_library.storage_schemas import LinkType +from models_library.api_schemas_storage.storage_schemas import LinkType from pydantic import ByteSize RETRY_WAIT_SECS = 2 diff --git a/services/storage/src/simcore_service_storage/datcore_dsm.py b/services/storage/src/simcore_service_storage/datcore_dsm.py index 97c4408b5b5..3c9c50e3acc 100644 --- a/services/storage/src/simcore_service_storage/datcore_dsm.py +++ b/services/storage/src/simcore_service_storage/datcore_dsm.py @@ -1,10 +1,14 @@ from dataclasses import dataclass from fastapi import FastAPI +from models_library.api_schemas_storage.storage_schemas import ( + DatCoreDatasetName, + LinkType, + UploadedPart, +) from models_library.basic_types import SHA256Str from models_library.projects import ProjectID from models_library.projects_nodes_io import LocationID, LocationName, StorageFileID -from models_library.storage_schemas import DatCoreDatasetName, LinkType, UploadedPart from models_library.users import UserID from pydantic import AnyUrl, ByteSize diff --git a/services/storage/src/simcore_service_storage/dsm_factory.py b/services/storage/src/simcore_service_storage/dsm_factory.py index b1576b6e020..a5d579a23ee 100644 --- a/services/storage/src/simcore_service_storage/dsm_factory.py +++ b/services/storage/src/simcore_service_storage/dsm_factory.py @@ -3,10 +3,10 @@ from dataclasses import dataclass, field from fastapi import FastAPI +from models_library.api_schemas_storage.storage_schemas import LinkType, UploadedPart from models_library.basic_types import SHA256Str from models_library.projects import ProjectID from models_library.projects_nodes_io import LocationID, LocationName, StorageFileID -from models_library.storage_schemas import LinkType, UploadedPart from models_library.users import UserID from pydantic import AnyUrl, ByteSize diff --git a/services/storage/src/simcore_service_storage/models.py b/services/storage/src/simcore_service_storage/models.py index c90154bb002..3febefb138f 100644 --- a/services/storage/src/simcore_service_storage/models.py +++ b/services/storage/src/simcore_service_storage/models.py @@ -6,6 +6,15 @@ import arrow from aws_library.s3 import UploadID +from models_library.api_schemas_storage.storage_schemas import ( + UNDEFINED_SIZE, + UNDEFINED_SIZE_TYPE, + DatasetMetaDataGet, + ETag, + FileMetaDataGet, + LinkType, + S3BucketName, +) from models_library.basic_types import SHA256Str from models_library.projects import ProjectID from models_library.projects_nodes_io import ( @@ -19,15 +28,6 @@ DEFAULT_NUMBER_OF_ITEMS_PER_PAGE, MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE, ) -from models_library.storage_schemas import ( - UNDEFINED_SIZE, - UNDEFINED_SIZE_TYPE, - DatasetMetaDataGet, - ETag, - FileMetaDataGet, - LinkType, - S3BucketName, -) from models_library.users import UserID from models_library.utils.common_validators import empty_str_to_none_pre_validator from pydantic import ( diff --git a/services/storage/src/simcore_service_storage/modules/datcore_adapter/datcore_adapter.py b/services/storage/src/simcore_service_storage/modules/datcore_adapter/datcore_adapter.py index 1ed8077550e..ebcb56cba0c 100644 --- a/services/storage/src/simcore_service_storage/modules/datcore_adapter/datcore_adapter.py +++ b/services/storage/src/simcore_service_storage/modules/datcore_adapter/datcore_adapter.py @@ -5,7 +5,7 @@ import httpx from fastapi import FastAPI -from models_library.storage_schemas import DatCoreDatasetName +from models_library.api_schemas_storage.storage_schemas import DatCoreDatasetName from models_library.users import UserID from pydantic import AnyUrl, TypeAdapter from servicelib.fastapi.client_session import get_client_session diff --git a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py index 7911a010be8..753e36f9834 100644 --- a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py +++ b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py @@ -18,6 +18,13 @@ UploadID, ) from fastapi import FastAPI +from models_library.api_schemas_storage.storage_schemas import ( + UNDEFINED_SIZE, + UNDEFINED_SIZE_TYPE, + LinkType, + S3BucketName, + UploadedPart, +) from models_library.basic_types import SHA256Str from models_library.projects import ProjectID from models_library.projects_nodes_io import ( @@ -26,13 +33,6 @@ SimcoreS3FileID, StorageFileID, ) -from models_library.storage_schemas import ( - UNDEFINED_SIZE, - UNDEFINED_SIZE_TYPE, - LinkType, - S3BucketName, - UploadedPart, -) from models_library.users import UserID from pydantic import AnyUrl, ByteSize, NonNegativeInt, TypeAdapter from servicelib.aiohttp.long_running_tasks.server import TaskProgress diff --git a/services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py b/services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py index 921facf24d1..db431b02c01 100644 --- a/services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py +++ b/services/storage/src/simcore_service_storage/utils/simcore_s3_dsm_utils.py @@ -2,12 +2,12 @@ from pathlib import Path from aws_library.s3 import S3MetaData, SimcoreS3API +from models_library.api_schemas_storage.storage_schemas import S3BucketName from models_library.projects_nodes_io import ( SimcoreS3DirectoryID, SimcoreS3FileID, StorageFileID, ) -from models_library.storage_schemas import S3BucketName from pydantic import ByteSize, NonNegativeInt, TypeAdapter from servicelib.utils import ensure_ends_with from sqlalchemy.ext.asyncio import AsyncConnection diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index fefbaa157ee..15a95dd919a 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -23,11 +23,7 @@ from faker import Faker from fakeredis.aioredis import FakeRedis from fastapi import FastAPI -from models_library.basic_types import SHA256Str -from models_library.projects import ProjectID -from models_library.projects_nodes import NodeID -from models_library.projects_nodes_io import LocationID, SimcoreS3FileID -from models_library.storage_schemas import ( +from models_library.api_schemas_storage.storage_schemas import ( FileMetaDataGet, FileUploadCompleteFutureResponse, FileUploadCompleteResponse, @@ -37,6 +33,10 @@ LinkType, UploadedPart, ) +from models_library.basic_types import SHA256Str +from models_library.projects import ProjectID +from models_library.projects_nodes import NodeID +from models_library.projects_nodes_io import LocationID, SimcoreS3FileID from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import ByteSize, TypeAdapter diff --git a/services/storage/tests/unit/test_data_export.py b/services/storage/tests/unit/test_data_export.py index 1d350753c32..bb25413e948 100644 --- a/services/storage/tests/unit/test_data_export.py +++ b/services/storage/tests/unit/test_data_export.py @@ -6,21 +6,22 @@ import pytest from faker import Faker from fastapi import FastAPI -from models_library.api_schemas_rpc_data_export.tasks import ( - TaskRpcGet, - TaskRpcId, - TaskRpcResult, - TaskRpcStatus, +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobAbort, + AsyncJobGet, + AsyncJobId, + AsyncJobResult, + AsyncJobStatus, ) -from models_library.api_schemas_storage.data_export_tasks import ( - DataExportTaskAbortOutput, +from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE +from models_library.api_schemas_storage.data_export_async_jobs import ( DataExportTaskStartInput, ) from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.rabbitmq import RabbitMQRPCClient -from servicelib.rabbitmq.rpc_interfaces.storage import data_export +from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs from settings_library.rabbit import RabbitSettings from simcore_service_storage.core.settings import ApplicationSettings @@ -74,27 +75,58 @@ async def rpc_client( async def test_start_data_export(rpc_client: RabbitMQRPCClient, faker: Faker): - result = await data_export.start_data_export( - rpc_client, paths=DataExportTaskStartInput(paths=[Path(faker.file_path())]) + result = await async_jobs.submit_job( + rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_name="start_data_export", + paths=DataExportTaskStartInput( + user_id=1, + product_name="osparc", + location_id=0, + paths=[Path(faker.file_path())], + ), ) - assert isinstance(result, TaskRpcGet) + assert isinstance(result, AsyncJobGet) async def test_abort_data_export(rpc_client: RabbitMQRPCClient, faker: Faker): - _task_id = TaskRpcId(faker.uuid4()) - result = await data_export.abort_data_export(rpc_client, task_id=_task_id) - assert isinstance(result, DataExportTaskAbortOutput) - assert result.task_id == _task_id + _job_id = AsyncJobId(faker.uuid4()) + result = await async_jobs.abort( + rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id=_job_id, + access_data=None, + ) + assert isinstance(result, AsyncJobAbort) + assert result.job_id == _job_id async def test_get_data_export_status(rpc_client: RabbitMQRPCClient, faker: Faker): - _task_id = TaskRpcId(faker.uuid4()) - result = await data_export.get_data_export_status(rpc_client, task_id=_task_id) - assert isinstance(result, TaskRpcStatus) - assert result.task_id == _task_id + _job_id = AsyncJobId(faker.uuid4()) + result = await async_jobs.get_status( + rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id=_job_id, + access_data=None, + ) + assert isinstance(result, AsyncJobStatus) + assert result.job_id == _job_id async def test_get_data_export_result(rpc_client: RabbitMQRPCClient, faker: Faker): - _task_id = TaskRpcId(faker.uuid4()) - result = await data_export.get_data_export_result(rpc_client, task_id=_task_id) - assert isinstance(result, TaskRpcResult) + _job_id = AsyncJobId(faker.uuid4()) + result = await async_jobs.get_result( + rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id=_job_id, + access_data=None, + ) + assert isinstance(result, AsyncJobResult) + + +async def test_list_jobs(rpc_client: RabbitMQRPCClient, faker: Faker): + result = await async_jobs.list_jobs( + rpc_client, rpc_namespace=STORAGE_RPC_NAMESPACE, filter_="" + ) + assert isinstance(result, list) + assert all(isinstance(elm, AsyncJobGet) for elm in result) diff --git a/services/storage/tests/unit/test_dsm_dsmcleaner.py b/services/storage/tests/unit/test_dsm_dsmcleaner.py index 0d98e7d65de..6144571e3a3 100644 --- a/services/storage/tests/unit/test_dsm_dsmcleaner.py +++ b/services/storage/tests/unit/test_dsm_dsmcleaner.py @@ -16,9 +16,9 @@ import pytest from aws_library.s3 import MultiPartUploadLinks, SimcoreS3API from faker import Faker +from models_library.api_schemas_storage.storage_schemas import LinkType from models_library.basic_types import SHA256Str from models_library.projects_nodes_io import SimcoreS3DirectoryID, SimcoreS3FileID -from models_library.storage_schemas import LinkType from models_library.users import UserID from pydantic import ByteSize, TypeAdapter from pytest_simcore.helpers.parametrizations import byte_size_ids diff --git a/services/storage/tests/unit/test_dsm_soft_links.py b/services/storage/tests/unit/test_dsm_soft_links.py index 859d5c6be09..da0d363482d 100644 --- a/services/storage/tests/unit/test_dsm_soft_links.py +++ b/services/storage/tests/unit/test_dsm_soft_links.py @@ -8,8 +8,8 @@ import pytest from faker import Faker +from models_library.api_schemas_storage.storage_schemas import S3BucketName from models_library.projects_nodes_io import SimcoreS3FileID -from models_library.storage_schemas import S3BucketName from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import ByteSize, TypeAdapter diff --git a/services/storage/tests/unit/test_handlers_datasets.py b/services/storage/tests/unit/test_handlers_datasets.py index 6f69b94d289..9bdbc743772 100644 --- a/services/storage/tests/unit/test_handlers_datasets.py +++ b/services/storage/tests/unit/test_handlers_datasets.py @@ -12,9 +12,12 @@ from faker import Faker from fastapi import FastAPI from httpx import AsyncClient +from models_library.api_schemas_storage.storage_schemas import ( + DatasetMetaDataGet, + FileMetaDataGet, +) from models_library.projects import ProjectID from models_library.projects_nodes_io import SimcoreS3FileID -from models_library.storage_schemas import DatasetMetaDataGet, FileMetaDataGet from models_library.users import UserID from pydantic import ByteSize from pytest_mock import MockerFixture diff --git a/services/storage/tests/unit/test_handlers_files.py b/services/storage/tests/unit/test_handlers_files.py index 550d1e6e62b..d6946433614 100644 --- a/services/storage/tests/unit/test_handlers_files.py +++ b/services/storage/tests/unit/test_handlers_files.py @@ -26,10 +26,7 @@ from aws_library.s3._constants import MULTIPART_UPLOADS_MIN_TOTAL_SIZE from faker import Faker from fastapi import FastAPI -from models_library.basic_types import SHA256Str -from models_library.projects import ProjectID -from models_library.projects_nodes_io import LocationID, NodeID, SimcoreS3FileID -from models_library.storage_schemas import ( +from models_library.api_schemas_storage.storage_schemas import ( FileMetaDataGet, FileUploadCompleteFutureResponse, FileUploadCompleteResponse, @@ -41,6 +38,9 @@ SoftCopyBody, UploadedPart, ) +from models_library.basic_types import SHA256Str +from models_library.projects import ProjectID +from models_library.projects_nodes_io import LocationID, NodeID, SimcoreS3FileID from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import AnyUrl, ByteSize, TypeAdapter diff --git a/services/storage/tests/unit/test_handlers_files_metadata.py b/services/storage/tests/unit/test_handlers_files_metadata.py index 564bd500810..4c70256a574 100644 --- a/services/storage/tests/unit/test_handlers_files_metadata.py +++ b/services/storage/tests/unit/test_handlers_files_metadata.py @@ -13,8 +13,11 @@ import pytest from faker import Faker from fastapi import FastAPI +from models_library.api_schemas_storage.storage_schemas import ( + FileMetaDataGet, + SimcoreS3FileID, +) from models_library.projects import ProjectID -from models_library.storage_schemas import FileMetaDataGet, SimcoreS3FileID from models_library.users import UserID from pydantic import ByteSize, TypeAdapter from pytest_simcore.helpers.fastapi import url_from_operation_id diff --git a/services/storage/tests/unit/test_handlers_health.py b/services/storage/tests/unit/test_handlers_health.py index 5556e6681ed..640fbb376b2 100644 --- a/services/storage/tests/unit/test_handlers_health.py +++ b/services/storage/tests/unit/test_handlers_health.py @@ -7,8 +7,8 @@ import httpx import simcore_service_storage._meta from fastapi import FastAPI +from models_library.api_schemas_storage.storage_schemas import HealthCheck, S3BucketName from models_library.app_diagnostics import AppStatusCheck -from models_library.storage_schemas import HealthCheck, S3BucketName from moto.server import ThreadedMotoServer from pytest_simcore.helpers.fastapi import url_from_operation_id from pytest_simcore.helpers.httpx_assert_checks import assert_status diff --git a/services/storage/tests/unit/test_handlers_simcore_s3.py b/services/storage/tests/unit/test_handlers_simcore_s3.py index f5ff87df5d0..0ddc39f206d 100644 --- a/services/storage/tests/unit/test_handlers_simcore_s3.py +++ b/services/storage/tests/unit/test_handlers_simcore_s3.py @@ -19,10 +19,13 @@ from aws_library.s3 import SimcoreS3API from faker import Faker from fastapi import FastAPI +from models_library.api_schemas_storage.storage_schemas import ( + FileMetaDataGet, + FoldersBody, +) from models_library.basic_types import SHA256Str from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, NodeIDStr, SimcoreS3FileID -from models_library.storage_schemas import FileMetaDataGet, FoldersBody from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import ByteSize, TypeAdapter diff --git a/services/storage/tests/unit/test_models.py b/services/storage/tests/unit/test_models.py index c64959ec42d..da33d24ad50 100644 --- a/services/storage/tests/unit/test_models.py +++ b/services/storage/tests/unit/test_models.py @@ -1,9 +1,9 @@ import uuid import pytest +from models_library.api_schemas_storage.storage_schemas import S3BucketName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimcoreS3FileID, StorageFileID -from models_library.storage_schemas import S3BucketName from pydantic import TypeAdapter, ValidationError from simcore_service_storage.models import FileMetaData from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager diff --git a/services/storage/tests/unit/test_simcore_s3_dsm.py b/services/storage/tests/unit/test_simcore_s3_dsm.py index 75707058dd5..92f3a9751bb 100644 --- a/services/storage/tests/unit/test_simcore_s3_dsm.py +++ b/services/storage/tests/unit/test_simcore_s3_dsm.py @@ -7,9 +7,9 @@ import pytest from faker import Faker +from models_library.api_schemas_storage.storage_schemas import FileUploadSchema from models_library.basic_types import SHA256Str from models_library.projects_nodes_io import SimcoreS3FileID -from models_library.storage_schemas import FileUploadSchema from models_library.users import UserID from pydantic import ByteSize, TypeAdapter from simcore_service_storage.models import FileMetaData diff --git a/services/storage/tests/unit/test_utils.py b/services/storage/tests/unit/test_utils.py index b0c5e336170..3ee5d73a85a 100644 --- a/services/storage/tests/unit/test_utils.py +++ b/services/storage/tests/unit/test_utils.py @@ -13,9 +13,9 @@ import httpx import pytest from faker import Faker +from models_library.api_schemas_storage.storage_schemas import UNDEFINED_SIZE_TYPE from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimcoreS3FileID -from models_library.storage_schemas import UNDEFINED_SIZE_TYPE from pydantic import ByteSize, HttpUrl, TypeAdapter from pytest_simcore.helpers.faker_factories import DEFAULT_FAKER from simcore_service_storage.constants import S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID diff --git a/services/web/server/VERSION b/services/web/server/VERSION index 831bbee364f..cb6b534abe1 100644 --- a/services/web/server/VERSION +++ b/services/web/server/VERSION @@ -1 +1 @@ -0.58.1 +0.59.0 diff --git a/services/web/server/setup.cfg b/services/web/server/setup.cfg index bfe7ed522f8..8300bb68d43 100644 --- a/services/web/server/setup.cfg +++ b/services/web/server/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.58.1 +current_version = 0.59.0 commit = True message = services/webserver api version: {current_version} → {new_version} tag = False @@ -12,13 +12,13 @@ commit_args = --no-verify [tool:pytest] addopts = --strict-markers asyncio_mode = auto -markers = +markers = slow: marks tests as slow (deselect with '-m "not slow"') acceptance_test: "marks tests as 'acceptance tests' i.e. does the system do what the user expects? Typically those are workflows." testit: "marks test to run during development" heavy_load: "mark tests that require large amount of data" [mypy] -plugins = +plugins = pydantic.mypy sqlalchemy.ext.mypy.plugin diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index 7fc1c307e54..1e6ca222ff4 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -2,7 +2,7 @@ openapi: 3.1.0 info: title: simcore-service-webserver description: Main service with an interface (http-API & websockets) to the web front-end - version: 0.58.1 + version: 0.59.0 servers: - url: '' description: webserver @@ -6351,6 +6351,139 @@ paths: application/json: schema: $ref: '#/components/schemas/Envelope_FileUploadCompleteFutureResponse_' + /v0/storage/locations/{location_id}/export-data: + post: + tags: + - storage + summary: Export Data + description: Export data + operationId: export_data + parameters: + - name: location_id + in: path + required: true + schema: + type: integer + title: Location Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DataExportPost' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_StorageAsyncJobGet_' + /v0/storage/async-jobs/{job_id}/status: + get: + tags: + - storage + summary: Get Async Job Status + description: Get async job status + operationId: get_async_job_status + parameters: + - name: job_id + in: path + required: true + schema: + type: string + format: uuid + title: Job Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/StorageAsyncJobGet' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_StorageAsyncJobStatus_' + /v0/storage/async-jobs/{job_id}:abort: + post: + tags: + - storage + summary: Abort Async Job + description: aborts execution of an async job + operationId: abort_async_job + parameters: + - name: job_id + in: path + required: true + schema: + type: string + format: uuid + title: Job Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/StorageAsyncJobGet' + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + /v0/storage/async-jobs/{job_id}/result: + get: + tags: + - storage + summary: Get Async Job Result + description: Get the result of the async job + operationId: get_async_job_result + parameters: + - name: job_id + in: path + required: true + schema: + type: string + format: uuid + title: Job Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/StorageAsyncJobGet' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_StorageAsyncJobResult_' + /v0/storage/async-jobs: + get: + tags: + - storage + summary: Get Async Jobs + description: Retrunsa list of async jobs for the user + operationId: get_async_jobs + parameters: + - name: user_id + in: query + required: true + schema: + type: integer + exclusiveMinimum: true + title: User Id + minimum: 0 + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_list_StorageAsyncJobGet__' /v0/trash:empty: post: tags: @@ -8268,6 +8401,18 @@ components: - dataset title: DatCoreFileLink description: I/O port type to hold a link to a file in DATCORE storage + DataExportPost: + properties: + paths: + items: + type: string + format: path + type: array + title: Paths + type: object + required: + - paths + title: DataExportPost DatasetMetaData: properties: dataset_id: @@ -8967,6 +9112,45 @@ components: title: Error type: object title: Envelope[StatusDiagnosticsGet] + Envelope_StorageAsyncJobGet_: + properties: + data: + anyOf: + - $ref: '#/components/schemas/StorageAsyncJobGet' + - type: 'null' + error: + anyOf: + - {} + - type: 'null' + title: Error + type: object + title: Envelope[StorageAsyncJobGet] + Envelope_StorageAsyncJobResult_: + properties: + data: + anyOf: + - $ref: '#/components/schemas/StorageAsyncJobResult' + - type: 'null' + error: + anyOf: + - {} + - type: 'null' + title: Error + type: object + title: Envelope[StorageAsyncJobResult] + Envelope_StorageAsyncJobStatus_: + properties: + data: + anyOf: + - $ref: '#/components/schemas/StorageAsyncJobStatus' + - type: 'null' + error: + anyOf: + - {} + - type: 'null' + title: Error + type: object + title: Envelope[StorageAsyncJobStatus] Envelope_TagGet_: properties: data: @@ -9586,6 +9770,22 @@ components: title: Error type: object title: Envelope[list[ServiceOutputGet]] + Envelope_list_StorageAsyncJobGet__: + properties: + data: + anyOf: + - items: + $ref: '#/components/schemas/StorageAsyncJobGet' + type: array + - type: 'null' + title: Data + error: + anyOf: + - {} + - type: 'null' + title: Error + type: object + title: Envelope[list[StorageAsyncJobGet]] Envelope_list_TagGet__: properties: data: @@ -12722,6 +12922,59 @@ components: - productName - ui title: ProductUIGet + ProgressReport: + properties: + actual_value: + type: number + title: Actual Value + total: + type: number + title: Total + default: 1.0 + attempt: + type: integer + title: Attempt + default: 0 + unit: + anyOf: + - type: string + const: Byte + - type: 'null' + title: Unit + message: + anyOf: + - $ref: '#/components/schemas/ProgressStructuredMessage' + - type: 'null' + type: object + required: + - actual_value + title: ProgressReport + ProgressStructuredMessage: + properties: + description: + type: string + title: Description + current: + type: number + title: Current + total: + type: integer + title: Total + unit: + anyOf: + - type: string + - type: 'null' + title: Unit + sub: + anyOf: + - $ref: '#/components/schemas/ProgressStructuredMessage' + - type: 'null' + type: object + required: + - description + - current + - total + title: ProgressStructuredMessage ProjectCopyOverride: properties: name: @@ -14336,6 +14589,62 @@ components: - loop_tasks - top_tracemalloc title: StatusDiagnosticsGet + StorageAsyncJobGet: + properties: + jobId: + type: string + format: uuid + title: Jobid + type: object + required: + - jobId + title: StorageAsyncJobGet + StorageAsyncJobResult: + properties: + result: + anyOf: + - {} + - type: 'null' + title: Result + error: + anyOf: + - {} + - type: 'null' + title: Error + type: object + required: + - result + - error + title: StorageAsyncJobResult + StorageAsyncJobStatus: + properties: + jobId: + type: string + format: uuid + title: Jobid + progress: + $ref: '#/components/schemas/ProgressReport' + done: + type: boolean + title: Done + started: + type: string + format: date-time + title: Started + stopped: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Stopped + type: object + required: + - jobId + - progress + - done + - started + - stopped + title: StorageAsyncJobStatus Structure: properties: key: diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_api.py b/services/web/server/src/simcore_service_webserver/projects/_nodes_api.py index b371af80037..6b4b8df3f90 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_nodes_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_nodes_api.py @@ -7,11 +7,11 @@ from aiohttp import web from aiohttp.client import ClientError +from models_library.api_schemas_storage.storage_schemas import FileMetaDataGet from models_library.basic_types import KeyIDStr from models_library.projects import ProjectID from models_library.projects_nodes import Node from models_library.projects_nodes_io import NodeID, SimCoreFileLink -from models_library.storage_schemas import FileMetaDataGet from models_library.users import UserID from pydantic import ( BaseModel, diff --git a/services/web/server/src/simcore_service_webserver/storage/_exception_handlers.py b/services/web/server/src/simcore_service_webserver/storage/_exception_handlers.py new file mode 100644 index 00000000000..d6cecb75d9e --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/storage/_exception_handlers.py @@ -0,0 +1,32 @@ +from models_library.api_schemas_storage.data_export_async_jobs import ( + AccessRightError, + DataExportError, + InvalidFileIdentifierError, +) +from servicelib.aiohttp import status +from simcore_service_webserver.exception_handling import ( + ExceptionToHttpErrorMap, + HttpErrorInfo, + exception_handling_decorator, + to_exceptions_handlers_map, +) + +_TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { + InvalidFileIdentifierError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + "Could not find file.", + ), + AccessRightError: HttpErrorInfo( + status.HTTP_403_FORBIDDEN, + "Accessright error.", + ), + DataExportError: HttpErrorInfo( + status.HTTP_500_INTERNAL_SERVER_ERROR, + "Could not export data.", + ), +} + + +handle_data_export_exceptions = exception_handling_decorator( + to_exceptions_handlers_map(_TO_HTTP_ERROR_MAP) +) diff --git a/services/web/server/src/simcore_service_webserver/storage/_handlers.py b/services/web/server/src/simcore_service_webserver/storage/_rest.py similarity index 64% rename from services/web/server/src/simcore_service_webserver/storage/_handlers.py rename to services/web/server/src/simcore_service_webserver/storage/_rest.py index cca952ae562..5dc1fb227fd 100644 --- a/services/web/server/src/simcore_service_webserver/storage/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/storage/_rest.py @@ -3,19 +3,28 @@ Mostly resolves and redirect to storage API """ +import json import logging import urllib.parse from typing import Any, Final, NamedTuple from urllib.parse import quote, unquote from aiohttp import ClientTimeout, web -from models_library.projects_nodes_io import LocationID -from models_library.storage_schemas import ( +from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobAccessData +from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE +from models_library.api_schemas_storage.storage_schemas import ( FileUploadCompleteResponse, FileUploadCompletionBody, FileUploadSchema, LinkType, ) +from models_library.api_schemas_webserver.storage import ( + DataExportPost, + StorageAsyncJobGet, + StorageAsyncJobResult, + StorageAsyncJobStatus, +) +from models_library.projects_nodes_io import LocationID from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import AnyUrl, BaseModel, ByteSize, TypeAdapter from servicelib.aiohttp import status @@ -27,13 +36,23 @@ ) from servicelib.aiohttp.rest_responses import create_data_response from servicelib.common_headers import X_FORWARDED_PROTO +from servicelib.rabbitmq.rpc_interfaces.async_jobs.async_jobs import ( + abort, + get_result, + get_status, + list_jobs, + submit_job, +) from servicelib.request_keys import RQT_USERID_KEY from servicelib.rest_responses import unwrap_envelope +from simcore_service_webserver.rabbitmq import get_rabbitmq_rpc_client from yarl import URL from .._meta import API_VTAG from ..login.decorators import login_required +from ..models import RequestContext from ..security.decorators import permission_required +from ._exception_handlers import handle_data_export_exceptions from .schemas import StorageFileIDStr from .settings import StorageSettings, get_plugin_settings @@ -121,10 +140,11 @@ async def _forward_request_to_storage( # --------------------------------------------------------------------- routes = web.RouteTableDef() -_path_prefix = f"/{API_VTAG}/storage/locations" +_storage_prefix = f"/{API_VTAG}/storage" +_storage_locations_prefix = f"{_storage_prefix}/locations" -@routes.get(_path_prefix, name="list_storage_locations") +@routes.get(_storage_locations_prefix, name="list_storage_locations") @login_required @permission_required("storage.files.*") async def list_storage_locations(request: web.Request) -> web.Response: @@ -132,7 +152,9 @@ async def list_storage_locations(request: web.Request) -> web.Response: return create_data_response(payload, status=resp_status) -@routes.get(_path_prefix + "/{location_id}/datasets", name="list_datasets_metadata") +@routes.get( + _storage_locations_prefix + "/{location_id}/datasets", name="list_datasets_metadata" +) @login_required @permission_required("storage.files.*") async def list_datasets_metadata(request: web.Request) -> web.Response: @@ -146,7 +168,7 @@ class _PathParams(BaseModel): @routes.get( - _path_prefix + "/{location_id}/files/metadata", + _storage_locations_prefix + "/{location_id}/files/metadata", name="get_files_metadata", ) @login_required @@ -171,7 +193,7 @@ class _QueryParams(BaseModel): @routes.get( - _path_prefix + "/{location_id}/datasets/{dataset_id}/metadata", + _storage_locations_prefix + "/{location_id}/datasets/{dataset_id}/metadata", name="list_dataset_files_metadata", ) @login_required @@ -199,7 +221,7 @@ class _QueryParams(BaseModel): @routes.get( - _path_prefix + "/{location_id}/files/{file_id}/metadata", + _storage_locations_prefix + "/{location_id}/files/{file_id}/metadata", name="get_file_metadata", ) @login_required @@ -216,7 +238,7 @@ class _PathParams(BaseModel): @routes.get( - _path_prefix + "/{location_id}/files/{file_id}", + _storage_locations_prefix + "/{location_id}/files/{file_id}", name="download_file", ) @login_required @@ -238,7 +260,7 @@ class _QueryParams(BaseModel): @routes.put( - _path_prefix + "/{location_id}/files/{file_id}", + _storage_locations_prefix + "/{location_id}/files/{file_id}", name="upload_file", ) @login_required @@ -279,7 +301,7 @@ class _QueryParams(BaseModel): @routes.post( - _path_prefix + "/{location_id}/files/{file_id}:complete", + _storage_locations_prefix + "/{location_id}/files/{file_id}:complete", name="complete_upload_file", ) @login_required @@ -307,7 +329,7 @@ class _PathParams(BaseModel): @routes.post( - _path_prefix + "/{location_id}/files/{file_id}:abort", + _storage_locations_prefix + "/{location_id}/files/{file_id}:abort", name="abort_upload_file", ) @login_required @@ -324,7 +346,8 @@ class _PathParams(BaseModel): @routes.post( - _path_prefix + "/{location_id}/files/{file_id}:complete/futures/{future_id}", + _storage_locations_prefix + + "/{location_id}/files/{file_id}:complete/futures/{future_id}", name="is_completed_upload_file", ) @login_required @@ -342,7 +365,7 @@ class _PathParams(BaseModel): @routes.delete( - _path_prefix + "/{location_id}/files/{file_id}", + _storage_locations_prefix + "/{location_id}/files/{file_id}", name="delete_file", ) @login_required @@ -358,3 +381,142 @@ class _PathParams(BaseModel): request, "DELETE", body=None ) return create_data_response(payload, status=resp_status) + + +@routes.post( + _storage_locations_prefix + "/{location_id}/export-data", name="export_data" +) +@login_required +@permission_required("storage.files.*") +@handle_data_export_exceptions +async def export_data(request: web.Request) -> web.Response: + class _PathParams(BaseModel): + location_id: LocationID + + rabbitmq_rpc_client = get_rabbitmq_rpc_client(request.app) + _req_ctx = RequestContext.model_validate(request) + _path_params = parse_request_path_parameters_as(_PathParams, request) + data_export_post = await parse_request_body_as( + model_schema_cls=DataExportPost, request=request + ) + async_job_rpc_get = await submit_job( + rabbitmq_rpc_client=rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_name="start_data_export", + paths=data_export_post.to_rpc_schema( + user_id=_req_ctx.user_id, + product_name=_req_ctx.product_name, + location_id=_path_params.location_id, + ), + ) + return create_data_response( + StorageAsyncJobGet.from_rpc_schema(async_job_rpc_get), + status=status.HTTP_202_ACCEPTED, + ) + + +@routes.get( + _storage_prefix + "/async-jobs", + name="get_async_jobs", +) +@login_required +@permission_required("storage.files.*") +@handle_data_export_exceptions +async def get_async_jobs(request: web.Request) -> web.Response: + + _req_ctx = RequestContext.model_validate(request) + + rabbitmq_rpc_client = get_rabbitmq_rpc_client(request.app) + + user_async_jobs = await list_jobs( + rabbitmq_rpc_client=rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + filter_=json.dumps( + {"user_id": _req_ctx.user_id, "product_name": _req_ctx.product_name} + ), + ) + return create_data_response( + [StorageAsyncJobGet.from_rpc_schema(job) for job in user_async_jobs], + status=status.HTTP_200_OK, + ) + + +@routes.get( + _storage_prefix + "/async-jobs/{job_id}/status", + name="get_async_job_status", +) +@login_required +@permission_required("storage.files.*") +@handle_data_export_exceptions +async def get_async_job_status(request: web.Request) -> web.Response: + + _req_ctx = RequestContext.model_validate(request) + rabbitmq_rpc_client = get_rabbitmq_rpc_client(request.app) + + async_job_get = parse_request_path_parameters_as(StorageAsyncJobGet, request) + async_job_rpc_status = await get_status( + rabbitmq_rpc_client=rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id=async_job_get.job_id, + access_data=AsyncJobAccessData( + user_id=_req_ctx.user_id, product_name=_req_ctx.product_name + ), + ) + return create_data_response( + StorageAsyncJobStatus.from_rpc_schema(async_job_rpc_status), + status=status.HTTP_200_OK, + ) + + +@routes.post( + _storage_prefix + "/async-jobs/{job_id}:abort", + name="abort_async_job", +) +@login_required +@permission_required("storage.files.*") +@handle_data_export_exceptions +async def abort_async_job(request: web.Request) -> web.Response: + _req_ctx = RequestContext.model_validate(request) + + rabbitmq_rpc_client = get_rabbitmq_rpc_client(request.app) + async_job_get = parse_request_path_parameters_as(StorageAsyncJobGet, request) + async_job_rpc_abort = await abort( + rabbitmq_rpc_client=rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id=async_job_get.job_id, + access_data=AsyncJobAccessData( + user_id=_req_ctx.user_id, product_name=_req_ctx.product_name + ), + ) + return web.Response( + status=status.HTTP_200_OK + if async_job_rpc_abort.result + else status.HTTP_500_INTERNAL_SERVER_ERROR + ) + + +@routes.get( + _storage_prefix + "/async-jobs/{job_id}/result", + name="get_async_job_result", +) +@login_required +@permission_required("storage.files.*") +@handle_data_export_exceptions +async def get_async_job_result(request: web.Request) -> web.Response: + + _req_ctx = RequestContext.model_validate(request) + + rabbitmq_rpc_client = get_rabbitmq_rpc_client(request.app) + async_job_get = parse_request_path_parameters_as(StorageAsyncJobGet, request) + async_job_rpc_result = await get_result( + rabbitmq_rpc_client=rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id=async_job_get.job_id, + access_data=AsyncJobAccessData( + user_id=_req_ctx.user_id, product_name=_req_ctx.product_name + ), + ) + return create_data_response( + StorageAsyncJobResult.from_rpc_schema(async_job_rpc_result), + status=status.HTTP_200_OK, + ) diff --git a/services/web/server/src/simcore_service_webserver/storage/api.py b/services/web/server/src/simcore_service_webserver/storage/api.py index b4e77f46410..9d65ac3faf3 100644 --- a/services/web/server/src/simcore_service_webserver/storage/api.py +++ b/services/web/server/src/simcore_service_webserver/storage/api.py @@ -7,15 +7,15 @@ from typing import Any, Final from aiohttp import ClientError, ClientSession, ClientTimeout, web -from models_library.generics import Envelope -from models_library.projects import ProjectID -from models_library.projects_nodes_io import LocationID, NodeID, SimCoreFileLink -from models_library.storage_schemas import ( +from models_library.api_schemas_storage.storage_schemas import ( FileLocation, FileLocationArray, FileMetaDataGet, PresignedLink, ) +from models_library.generics import Envelope +from models_library.projects import ProjectID +from models_library.projects_nodes_io import LocationID, NodeID, SimCoreFileLink from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import ByteSize, HttpUrl, TypeAdapter diff --git a/services/web/server/src/simcore_service_webserver/storage/plugin.py b/services/web/server/src/simcore_service_webserver/storage/plugin.py index 104a9c37319..d1db7c23c57 100644 --- a/services/web/server/src/simcore_service_webserver/storage/plugin.py +++ b/services/web/server/src/simcore_service_webserver/storage/plugin.py @@ -9,7 +9,7 @@ from .._constants import APP_SETTINGS_KEY from ..rest.plugin import setup_rest -from . import _handlers +from . import _rest _logger = logging.getLogger(__name__) @@ -21,4 +21,4 @@ def setup_storage(app: web.Application): assert app[APP_SETTINGS_KEY].WEBSERVER_STORAGE # nosec setup_rest(app) - app.router.add_routes(_handlers.routes) + app.router.add_routes(_rest.routes) diff --git a/services/web/server/tests/unit/isolated/test_projects__nodes_api.py b/services/web/server/tests/unit/isolated/test_projects__nodes_api.py index 8c370cb2d3b..27f8df86ace 100644 --- a/services/web/server/tests/unit/isolated/test_projects__nodes_api.py +++ b/services/web/server/tests/unit/isolated/test_projects__nodes_api.py @@ -2,7 +2,7 @@ from uuid import uuid4 import pytest -from models_library.storage_schemas import FileMetaDataGet +from models_library.api_schemas_storage.storage_schemas import FileMetaDataGet from simcore_service_webserver.projects._nodes_api import ( _SUPPORTED_PREVIEW_FILE_EXTENSIONS, _FileWithThumbnail, diff --git a/services/web/server/tests/unit/with_dbs/01/test_storage.py b/services/web/server/tests/unit/with_dbs/01/test_storage.py index 5381dbb1962..02b1945f885 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_storage.py +++ b/services/web/server/tests/unit/with_dbs/01/test_storage.py @@ -15,10 +15,7 @@ from aiohttp.test_utils import TestClient from faker import Faker from fastapi import APIRouter, FastAPI, Request -from models_library.generics import Envelope -from models_library.projects import ProjectID -from models_library.projects_nodes_io import LocationID, StorageFileID -from models_library.storage_schemas import ( +from models_library.api_schemas_storage.storage_schemas import ( DatasetMetaDataGet, FileLocation, FileMetaDataGet, @@ -28,6 +25,9 @@ FileUploadSchema, LinkType, ) +from models_library.generics import Envelope +from models_library.projects import ProjectID +from models_library.projects_nodes_io import LocationID, StorageFileID from models_library.users import UserID from pydantic import AnyUrl, TypeAdapter from pytest_simcore.helpers.assert_checks import assert_status diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py index 10f8a6ee356..2e7549cd7fa 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py @@ -24,6 +24,10 @@ from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( DynamicServiceStop, ) +from models_library.api_schemas_storage.storage_schemas import ( + FileMetaDataGet, + PresignedLink, +) from models_library.generics import Envelope from models_library.projects_nodes_io import NodeID from models_library.services_resources import ( @@ -31,7 +35,6 @@ ServiceResourcesDict, ServiceResourcesDictHelpers, ) -from models_library.storage_schemas import FileMetaDataGet, PresignedLink from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import NonNegativeFloat, NonNegativeInt, TypeAdapter from pytest_simcore.helpers.assert_checks import assert_status diff --git a/services/web/server/tests/unit/with_dbs/03/test_storage_handlers.py b/services/web/server/tests/unit/with_dbs/03/test_storage_handlers.py index af1a6d3cbd3..57e16128c53 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_storage_handlers.py +++ b/services/web/server/tests/unit/with_dbs/03/test_storage_handlers.py @@ -9,7 +9,7 @@ import pytest from aiohttp.test_utils import TestClient -from models_library.storage_schemas import ( +from models_library.api_schemas_storage.storage_schemas import ( FileUploadCompleteResponse, FileUploadLinks, FileUploadSchema, @@ -43,7 +43,7 @@ async def _resp(*args, **kwargs) -> tuple[Any, int]: return (wrap_as_envelope(data=expected_response), 200) mocker.patch( - "simcore_service_webserver.storage._handlers._forward_request_to_storage", + "simcore_service_webserver.storage._rest._forward_request_to_storage", autospec=True, side_effect=_resp, ) @@ -52,7 +52,7 @@ def _resolve(*args, **kwargs) -> AnyUrl: return TypeAdapter(AnyUrl).validate_python("http://private-url") mocker.patch( - "simcore_service_webserver.storage._handlers._from_storage_url", + "simcore_service_webserver.storage._rest._from_storage_url", autospec=True, side_effect=_resolve, ) diff --git a/services/web/server/tests/unit/with_dbs/03/test_storage_rpc.py b/services/web/server/tests/unit/with_dbs/03/test_storage_rpc.py new file mode 100644 index 00000000000..e68f3ad1ef9 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/03/test_storage_rpc.py @@ -0,0 +1,209 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +from datetime import datetime +from pathlib import Path +from typing import Any, Callable + +import pytest +from aiohttp.test_utils import TestClient +from faker import Faker +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobAbort, + AsyncJobGet, + AsyncJobId, + AsyncJobResult, + AsyncJobStatus, +) +from models_library.api_schemas_rpc_async_jobs.exceptions import ( + ResultError, + StatusError, +) +from models_library.api_schemas_storage.data_export_async_jobs import ( + AccessRightError, + DataExportError, + InvalidFileIdentifierError, +) +from models_library.api_schemas_webserver.storage import ( + DataExportPost, + StorageAsyncJobGet, +) +from models_library.generics import Envelope +from models_library.progress_bar import ProgressReport +from pytest_mock import MockerFixture +from pytest_simcore.helpers.webserver_login import UserInfoDict +from servicelib.aiohttp import status +from servicelib.rabbitmq.rpc_interfaces.async_jobs.async_jobs import ( + abort, + get_result, + get_status, + list_jobs, + submit_job, +) +from simcore_postgres_database.models.users import UserRole + +_faker = Faker() + + +@pytest.fixture +def create_storage_rpc_client_mock(mocker: MockerFixture) -> Callable[[str, Any], None]: + def _(method: str, result_or_exception: Any): + def side_effect(*args, **kwargs): + if isinstance(result_or_exception, Exception): + raise result_or_exception + + return result_or_exception + + mocker.patch( + f"simcore_service_webserver.storage._rest.{method}", + side_effect=side_effect, + ) + + return _ + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +@pytest.mark.parametrize( + "backend_result_or_exception", + [ + AsyncJobGet(job_id=AsyncJobId(_faker.uuid4()), job_name=_faker.text()), + InvalidFileIdentifierError(file_id=Path("/my/file")), + AccessRightError(user_id=_faker.pyint(min_value=0), file_id=Path("/my/file")), + DataExportError(job_id=_faker.pyint(min_value=0)), + ], + ids=lambda x: type(x).__name__, +) +async def test_data_export( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + create_storage_rpc_client_mock: Callable[[str, Any], None], + faker: Faker, + backend_result_or_exception: Any, +): + create_storage_rpc_client_mock( + submit_job.__name__, + backend_result_or_exception, + ) + + _body = DataExportPost(paths=[Path(".")]) + response = await client.post( + "/v0/storage/locations/0/export-data", data=_body.model_dump_json() + ) + if isinstance(backend_result_or_exception, AsyncJobGet): + assert response.status == status.HTTP_202_ACCEPTED + Envelope[StorageAsyncJobGet].model_validate(await response.json()) + elif isinstance(backend_result_or_exception, InvalidFileIdentifierError): + assert response.status == status.HTTP_404_NOT_FOUND + elif isinstance(backend_result_or_exception, AccessRightError): + assert response.status == status.HTTP_403_FORBIDDEN + else: + assert isinstance(backend_result_or_exception, DataExportError) + assert response.status == status.HTTP_500_INTERNAL_SERVER_ERROR + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +@pytest.mark.parametrize( + "backend_result_or_exception", + [ + AsyncJobStatus( + job_id=_faker.uuid4(), + progress=ProgressReport(actual_value=0.5, total=1.0), + done=False, + started=datetime.now(), + stopped=None, + ), + StatusError(job_id=_faker.uuid4()), + ], + ids=lambda x: type(x).__name__, +) +async def test_get_async_jobs_status( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + create_storage_rpc_client_mock: Callable[[str, Any], None], + backend_result_or_exception: Any, +): + _job_id = AsyncJobId(_faker.uuid4()) + create_storage_rpc_client_mock(get_status.__name__, backend_result_or_exception) + + response = await client.get(f"/v0/storage/async-jobs/{_job_id}/status") + if isinstance(backend_result_or_exception, AsyncJobStatus): + assert response.status == status.HTTP_200_OK + response_body_data = ( + Envelope[StorageAsyncJobGet].model_validate(await response.json()).data + ) + assert response_body_data is not None + elif isinstance(backend_result_or_exception, StatusError): + assert response.status == status.HTTP_500_INTERNAL_SERVER_ERROR + else: + pytest.fail("Incorrectly configured test") + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +@pytest.mark.parametrize("abort_success", [True, False]) +async def test_abort_async_jobs( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + create_storage_rpc_client_mock: Callable[[str, Any], None], + faker: Faker, + abort_success: bool, +): + _job_id = AsyncJobId(faker.uuid4()) + create_storage_rpc_client_mock( + abort.__name__, AsyncJobAbort(result=abort_success, job_id=_job_id) + ) + + response = await client.post(f"/v0/storage/async-jobs/{_job_id}:abort") + + if abort_success: + assert response.status == status.HTTP_200_OK + else: + assert response.status == status.HTTP_500_INTERNAL_SERVER_ERROR + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +@pytest.mark.parametrize( + "backend_result_or_exception", + [ + AsyncJobResult(result=None, error=_faker.text()), + ResultError(job_id=_faker.uuid4()), + ], + ids=lambda x: type(x).__name__, +) +async def test_get_async_job_result( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + create_storage_rpc_client_mock: Callable[[str, Any], None], + faker: Faker, + backend_result_or_exception: Any, +): + _job_id = AsyncJobId(faker.uuid4()) + create_storage_rpc_client_mock(get_result.__name__, backend_result_or_exception) + + response = await client.get(f"/v0/storage/async-jobs/{_job_id}/result") + + if isinstance(backend_result_or_exception, AsyncJobResult): + assert response.status == status.HTTP_200_OK + elif isinstance(backend_result_or_exception, ResultError): + assert response.status == status.HTTP_500_INTERNAL_SERVER_ERROR + else: + pytest.fail("Incorrectly configured test") + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_get_user_async_jobs( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + create_storage_rpc_client_mock: Callable[[str, Any], None], +): + create_storage_rpc_client_mock( + list_jobs.__name__, [StorageAsyncJobGet(job_id=AsyncJobId(_faker.uuid4()))] + ) + + response = await client.get("/v0/storage/async-jobs") + + assert response.status == status.HTTP_200_OK + Envelope[list[StorageAsyncJobGet]].model_validate(await response.json())